mirror of
https://github.com/actions/runner.git
synced 2025-12-10 12:36:23 +00:00
Compare commits
188 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5effa808be | ||
|
|
88098a6705 | ||
|
|
2ee7717774 | ||
|
|
c946435010 | ||
|
|
0953ffa62b | ||
|
|
66727f76c8 | ||
|
|
7ee333b5cd | ||
|
|
3b34e203dc | ||
|
|
e808190dd2 | ||
|
|
d2cb9d7685 | ||
|
|
5ba6a2c78d | ||
|
|
fc3ca9bb92 | ||
|
|
a94a19bb36 | ||
|
|
a9be5f6557 | ||
|
|
3600f20cd3 | ||
|
|
81a00fff3e | ||
|
|
31474098ff | ||
|
|
7ff6ff6afa | ||
|
|
56529a1c2f | ||
|
|
510fadf71a | ||
|
|
007ac8138b | ||
|
|
1e12b8909a | ||
|
|
9ceb3d481a | ||
|
|
3bce2eb09c | ||
|
|
80bf68db81 | ||
|
|
a2e32170fd | ||
|
|
35dda19491 | ||
|
|
36bdf50bc6 | ||
|
|
95e2158dc6 | ||
|
|
3ebaeb9f19 | ||
|
|
9d678cb270 | ||
|
|
27788491ea | ||
|
|
5ba7affea4 | ||
|
|
ce92d7a6b5 | ||
|
|
d23ca0ba7a | ||
|
|
9d1c81f018 | ||
|
|
7a8abe726a | ||
|
|
a9135e61a0 | ||
|
|
feafd3e1d7 | ||
|
|
dc3b2d3a36 | ||
|
|
a371309079 | ||
|
|
5dd6bde4ca | ||
|
|
c196103e58 | ||
|
|
d55070da3e | ||
|
|
8279ae9a70 | ||
|
|
2e3b03623f | ||
|
|
c18c8746db | ||
|
|
6332a52d76 | ||
|
|
8bb588bb69 | ||
|
|
4510f69c73 | ||
|
|
c7b8552edf | ||
|
|
0face6e3af | ||
|
|
306be41266 | ||
|
|
4e85b8f3b7 | ||
|
|
444332ca88 | ||
|
|
e6eb9e381d | ||
|
|
3a76a2e291 | ||
|
|
9976cb92a0 | ||
|
|
d900654c42 | ||
|
|
65e3ec86b4 | ||
|
|
a7f205593a | ||
|
|
55f60a4ffc | ||
|
|
ca13b25240 | ||
|
|
b0c2734380 | ||
|
|
9e7b56f698 | ||
|
|
8c29e33e88 | ||
|
|
976217d6ec | ||
|
|
562eafab3a | ||
|
|
9015b95a72 | ||
|
|
7d4bbf46de | ||
|
|
7b608e3e92 | ||
|
|
f028b4e2b0 | ||
|
|
38f816c2ae | ||
|
|
bc1fe2cfe0 | ||
|
|
89a13db2c3 | ||
|
|
d59092d973 | ||
|
|
855b90c3d4 | ||
|
|
48ac96307c | ||
|
|
2e50dffb37 | ||
|
|
e7b0844772 | ||
|
|
d5a5550649 | ||
|
|
3d0147d322 | ||
|
|
bd1f245aac | ||
|
|
005f1c15b1 | ||
|
|
da3cb5506f | ||
|
|
32d439070b | ||
|
|
ec9f8f1682 | ||
|
|
0921af735a | ||
|
|
1cc3c08cf2 | ||
|
|
f9dca15c63 | ||
|
|
0877d9a533 | ||
|
|
d5e40c6a60 | ||
|
|
391bc35bb9 | ||
|
|
e4267b8434 | ||
|
|
2709cbc0ea | ||
|
|
5e0cde8649 | ||
|
|
cb2b323781 | ||
|
|
6c3958f365 | ||
|
|
9d7bd4706b | ||
|
|
5822a38c39 | ||
|
|
d42c9da2d7 | ||
|
|
121deedeb5 | ||
|
|
a0942ed345 | ||
|
|
7cef9a27ca | ||
|
|
df7e16954e | ||
|
|
4e7d27a53c | ||
|
|
89d1418e48 | ||
|
|
e728b8594d | ||
|
|
de4490d06d | ||
|
|
2e800f857e | ||
|
|
312c7668a8 | ||
|
|
eaf39bb058 | ||
|
|
5815819f24 | ||
|
|
1aea046932 | ||
|
|
eda463601c | ||
|
|
f994ae0542 | ||
|
|
3c5aef791c | ||
|
|
c4626d0c3a | ||
|
|
416a7ac4b8 | ||
|
|
11435857e4 | ||
|
|
6f260012a3 | ||
|
|
4fc87ddfc6 | ||
|
|
b45c1b9440 | ||
|
|
73307c0a30 | ||
|
|
cd8e4ddba1 | ||
|
|
abf59bdcb6 | ||
|
|
09cf59c1e0 | ||
|
|
7a65236022 | ||
|
|
462b5117c8 | ||
|
|
6922f3cb86 | ||
|
|
911135e66c | ||
|
|
01c9a8a8af | ||
|
|
33d2d2c328 | ||
|
|
a246b3b29d | ||
|
|
c7768d4a7b | ||
|
|
70729fb3c4 | ||
|
|
1470a3b6e2 | ||
|
|
2fadf430e4 | ||
|
|
f798f5606b | ||
|
|
3f7a01af93 | ||
|
|
d5c54f9819 | ||
|
|
9f78ad3b34 | ||
|
|
97883c8cd5 | ||
|
|
c5fa9fb062 | ||
|
|
b2dcdc21dc | ||
|
|
c126b52fe5 | ||
|
|
117ec1fff9 | ||
|
|
d5c7097d2c | ||
|
|
f9baec4b32 | ||
|
|
a20ad4e121 | ||
|
|
2bd0b1af0e | ||
|
|
baa6ded3bc | ||
|
|
7817e1a976 | ||
|
|
d90273a068 | ||
|
|
2cdde6cb16 | ||
|
|
1f52dfa636 | ||
|
|
83b5742278 | ||
|
|
ba69b5bc93 | ||
|
|
0e8777ebda | ||
|
|
a5f06b3ec2 | ||
|
|
be325f26a6 | ||
|
|
dec260920f | ||
|
|
b0a1294ef5 | ||
|
|
3d70ef2da1 | ||
|
|
e23d68f6e2 | ||
|
|
dff1024cd3 | ||
|
|
9fc0686dc2 | ||
|
|
ab001a7004 | ||
|
|
178a618e01 | ||
|
|
dfaf6e06ee | ||
|
|
b0a71481f0 | ||
|
|
88875ca1b0 | ||
|
|
a5eb8cb5c4 | ||
|
|
41f4ca3414 | ||
|
|
aa9f5bf070 | ||
|
|
2d6042421f | ||
|
|
c8890d0f3f | ||
|
|
53fb6297cb | ||
|
|
f9b5d626c5 | ||
|
|
d34afb54b1 | ||
|
|
e291ebc58a | ||
|
|
6bec1e3bb8 | ||
|
|
0cba42590f | ||
|
|
94e7560ccd | ||
|
|
d80ab095a5 | ||
|
|
2efd6f70e2 | ||
|
|
a6f144b014 | ||
|
|
5294a3ee06 |
3
.github/workflows/build.yml
vendored
3
.github/workflows/build.yml
vendored
@@ -1,9 +1,10 @@
|
||||
name: Runner CI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- releases/*
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
36
.github/workflows/codeql.yml
vendored
Normal file
36
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: "Code Scanning - Action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
|
||||
jobs:
|
||||
CodeQL-Build:
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
|
||||
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
# Override language selection by uncommenting this and choosing your languages
|
||||
# with:
|
||||
# languages: go, javascript, csharp, python, cpp, java
|
||||
|
||||
- name: Manual build
|
||||
run : |
|
||||
./dev.sh layout Release linux-x64
|
||||
working-directory: src
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
335
.github/workflows/e2etest.yml
vendored
Normal file
335
.github/workflows/e2etest.yml
vendored
Normal file
@@ -0,0 +1,335 @@
|
||||
name: Runner E2E Test
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/*
|
||||
|
||||
jobs:
|
||||
init:
|
||||
name: Initialize workflow ☕
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
unique_runner_label: ${{steps.generator.outputs.runner_label}}
|
||||
steps:
|
||||
- name: Delete all runners
|
||||
uses: actions/github-script@v3
|
||||
with:
|
||||
debug: true
|
||||
script: |
|
||||
var runnersResp = await github.actions.listSelfHostedRunnersForRepo({
|
||||
owner: 'actions',
|
||||
repo: 'runner',
|
||||
per_page: '100'
|
||||
});
|
||||
for(var i=0; i<runnersResp.data.total_count; i++){
|
||||
core.debug(JSON.stringify(runnersResp.data.runners[i]))
|
||||
await github.actions.deleteSelfHostedRunnerFromRepo({
|
||||
owner: 'actions',
|
||||
repo: 'runner',
|
||||
runner_id: runnersResp.data.runners[i].id
|
||||
});
|
||||
}
|
||||
github-token: ${{secrets.PAT}}
|
||||
- name: Generate Unique Runner label
|
||||
id: generator
|
||||
run: |
|
||||
label=$(openssl rand -hex 16)
|
||||
echo ::set-output name=runner_label::$label
|
||||
|
||||
build:
|
||||
name: Build runner packages 🏗 📦
|
||||
strategy:
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
|
||||
include:
|
||||
- runtime: linux-x64
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: linux-arm64
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: linux-arm
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: osx-x64
|
||||
os: macOS-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-latest
|
||||
devScript: ./dev
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
run: |
|
||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Create runner package tar.gz/zip
|
||||
- name: Package Release
|
||||
run: |
|
||||
${{ matrix.devScript }} package Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Upload runner package tar.gz/zip as artifact
|
||||
- name: Publish Artifact
|
||||
uses: actions/upload-artifact@v1
|
||||
with:
|
||||
name: runner-package-${{ matrix.runtime }}
|
||||
path: _package
|
||||
|
||||
dispatch_workflow:
|
||||
name: Dispatch workflow to runners 🚨
|
||||
needs: [init, build]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dispatch workflow
|
||||
timeout-minutes: 10
|
||||
uses: actions/github-script@v3
|
||||
with:
|
||||
debug: true
|
||||
script: |
|
||||
function sleep(ms) { return new Promise(resolve => setTimeout(resolve, ms)); }
|
||||
async function dispatchWorkflow(runner) {
|
||||
await github.actions.createWorkflowDispatch({
|
||||
owner: 'actions',
|
||||
repo: 'runner',
|
||||
workflow_id: 'runner-basic-e2e-test-case.yml',
|
||||
ref: 'main',
|
||||
inputs: {target_runner: runner}
|
||||
});
|
||||
}
|
||||
var runWin64 = false, runLinux64 = false, runOsx64 = false, runLinuxARM64 = false;
|
||||
while (true) {
|
||||
core.info(`------------- Waiting for runners to be configured --------------`)
|
||||
await sleep(10000);
|
||||
var runnersResp = await github.actions.listSelfHostedRunnersForRepo({owner: 'actions', repo: 'runner', per_page: '100'});
|
||||
for (var i = 0; i < runnersResp.data.total_count; i++) {
|
||||
core.debug(JSON.stringify(runnersResp.data.runners[i]))
|
||||
var labels = runnersResp.data.runners[i].labels;
|
||||
for (var j = 0; j < labels.length; j++) {
|
||||
core.debug(`Comparing: ${labels[j].name} to win-x64/linux-x64/osx-x64/linux-arm64-${{ needs.init.outputs.unique_runner_label }}`)
|
||||
if (labels[j].name == 'win-x64-${{needs.init.outputs.unique_runner_label}}' && runWin64 == false) {
|
||||
core.info(`------------------- Windows runner is configured, queue Windows Run -------------------------`)
|
||||
runWin64 = true;
|
||||
await dispatchWorkflow('win-x64-${{needs.init.outputs.unique_runner_label}}');
|
||||
break;
|
||||
} else if (labels[j].name == 'linux-x64-${{needs.init.outputs.unique_runner_label}}' && runLinux64 == false) {
|
||||
core.info(`------------------- Linux runner is configured, queue Linux Run -------------------------`)
|
||||
runLinux64 = true;
|
||||
await dispatchWorkflow('linux-x64-${{needs.init.outputs.unique_runner_label}}');
|
||||
break;
|
||||
} else if (labels[j].name == 'osx-x64-${{needs.init.outputs.unique_runner_label}}' && runOsx64 == false) {
|
||||
core.info(`------------------- macOS runner is configured, queue macOS Run -------------------------`)
|
||||
runOsx64 = true;
|
||||
await dispatchWorkflow('osx-x64-${{needs.init.outputs.unique_runner_label}}');
|
||||
break;
|
||||
} else if (labels[j].name == 'linux-arm64-${{needs.init.outputs.unique_runner_label}}' && runLinuxARM64 == false) {
|
||||
core.info(`------------------- Linux ARM64 runner is configured, queue Linux ARM64 Run-------------------------`)
|
||||
runLinuxARM64 = true;
|
||||
await dispatchWorkflow('linux-arm64-${{needs.init.outputs.unique_runner_label}}');
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (runWin64 && runLinux64 && runOsx64 && runLinuxARM64) {
|
||||
core.info(`--------------------- ALL runner are running jobs --------------------------`)
|
||||
break;
|
||||
} else {
|
||||
core.info(`---------- Windows running: ${runWin64} -- Linux running: ${runLinux64} -- macOS running: ${runOsx64} -- Linux ARM64 running: ${runLinuxARM64} -----------`)
|
||||
}
|
||||
}
|
||||
github-token: ${{secrets.PAT}}
|
||||
|
||||
LinuxE2E:
|
||||
needs: [build, init]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Runner
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner-package-linux-x64
|
||||
- name: Unzip Runner Package
|
||||
run: |
|
||||
tar -xzf *.tar.gz
|
||||
- name: Configure Runner
|
||||
env:
|
||||
unique_runner_name: linux-x64-${{needs.init.outputs.unique_runner_label}}
|
||||
run: |
|
||||
./config.sh --url ${{github.event.repository.html_url}} --unattended --name $unique_runner_name --pat ${{secrets.PAT}} --labels $unique_runner_name --replace
|
||||
- name: Start Runner and Wait for Job
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
./run.sh --once
|
||||
- name: Remove Runner
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: |
|
||||
./config.sh remove --pat ${{secrets.PAT}}
|
||||
- name: Upload Runner Logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: linux_x64_logs
|
||||
path: _diag
|
||||
macOSE2E:
|
||||
needs: [build, init]
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Download Runner
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner-package-osx-x64
|
||||
- name: Unzip Runner Package
|
||||
run: |
|
||||
tar -xzf *.tar.gz
|
||||
- name: Configure Runner
|
||||
env:
|
||||
unique_runner_name: osx-x64-${{needs.init.outputs.unique_runner_label}}
|
||||
run: |
|
||||
./config.sh --url ${{github.event.repository.html_url}} --unattended --name $unique_runner_name --pat ${{secrets.PAT}} --labels $unique_runner_name --replace
|
||||
- name: Start Runner and Wait for Job
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
./run.sh --once
|
||||
- name: Remove Runner
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: |
|
||||
./config.sh remove --pat ${{secrets.PAT}}
|
||||
- name: Upload Runner Logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: osx_x64_logs
|
||||
path: _diag
|
||||
|
||||
ARM64E2E:
|
||||
needs: [build, init]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Runner
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner-package-linux-arm64
|
||||
- name: Unzip Runner Package
|
||||
run: |
|
||||
tar -xzf *.tar.gz
|
||||
- name: Prepare QEMU
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static:register --reset
|
||||
- name: Configure Runner
|
||||
uses: docker://multiarch/ubuntu-core:arm64-bionic
|
||||
with:
|
||||
args: 'bash -c "apt-get update && apt-get install -y curl && ./bin/installdependencies.sh && ./config.sh --unattended --name $unique_runner_name --url ${{github.event.repository.html_url}} --pat ${{secrets.PAT}} --labels $unique_runner_name --replace"'
|
||||
env:
|
||||
RUNNER_ALLOW_RUNASROOT: 1
|
||||
unique_runner_name: linux-arm64-${{needs.init.outputs.unique_runner_label}}
|
||||
|
||||
- name: Start Runner and Wait for Job
|
||||
timeout-minutes: 5
|
||||
uses: docker://multiarch/ubuntu-core:arm64-bionic
|
||||
with:
|
||||
args: 'bash -c "apt-get update && apt-get install -y curl git && ./bin/installdependencies.sh && ./run.sh --once"'
|
||||
env:
|
||||
RUNNER_ALLOW_RUNASROOT: 1
|
||||
|
||||
- name: Remove Runner
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
uses: docker://multiarch/ubuntu-core:arm64-bionic
|
||||
with:
|
||||
args: 'bash -c "apt-get update && apt-get install -y curl && ./bin/installdependencies.sh && ./config.sh remove --pat ${{secrets.PAT}}"'
|
||||
env:
|
||||
RUNNER_ALLOW_RUNASROOT: 1
|
||||
|
||||
- name: Upload Runner Logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: linux_arm64_logs
|
||||
path: _diag
|
||||
|
||||
WindowsE2E:
|
||||
needs: [build, init]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Download Runner
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner-package-win-x64
|
||||
- name: Unzip Runner Package
|
||||
run: |
|
||||
Get-ChildItem *.zip | Expand-Archive -DestinationPath $PWD
|
||||
- name: Configure Runner
|
||||
shell: cmd
|
||||
run: |
|
||||
config.cmd --unattended --url ${{github.event.repository.html_url}} --name %unique_runner_name% --pat ${{secrets.PAT}} --labels %unique_runner_name% --replace
|
||||
env:
|
||||
unique_runner_name: win-x64-${{needs.init.outputs.unique_runner_label}}
|
||||
|
||||
- name: Start Runner and Wait for Job
|
||||
shell: cmd
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
run.cmd --once
|
||||
- name: Remove Runner
|
||||
shell: cmd
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: |
|
||||
config.cmd remove --pat ${{secrets.PAT}}
|
||||
- name: Upload Runner Logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: win_x64_logs
|
||||
path: _diag
|
||||
|
||||
check:
|
||||
name: Check runner logs 🕵️♂️
|
||||
needs: [WindowsE2E, LinuxE2E, macOSE2E, ARM64E2E]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Linux Runner Logs
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: linux_x64_logs
|
||||
path: linux_x64_logs
|
||||
- name: Download macOS Runner Logs
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: osx_x64_logs
|
||||
path: osx_x64_logs
|
||||
- name: Download Linux ARM64 Runner Logs
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: linux_arm64_logs
|
||||
path: linux_arm64_logs
|
||||
- name: Download Windows Runner Logs
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: win_x64_logs
|
||||
path: win_x64_logs
|
||||
- name: Check Runner Logs
|
||||
run: |
|
||||
function failed()
|
||||
{
|
||||
local error=${1:-Undefined error}
|
||||
echo "Failed: $error" >&2
|
||||
exit 1
|
||||
}
|
||||
grep -R "completed with result: Succeeded" ./win_x64_logs || failed "Windows Runner fail to run the job, please check logs"
|
||||
grep -R "completed with result: Succeeded" ./linux_x64_logs || failed "Linux Runner fail to run the job, please check logs"
|
||||
grep -R "completed with result: Succeeded" ./osx_x64_logs || failed "macOS Runner fail to run the job, please check logs"
|
||||
grep -R "completed with result: Succeeded" ./linux_arm64_logs || failed "Linux ARM64 Runner fail to run the job, please check logs"
|
||||
3
.github/workflows/release.yml
vendored
3
.github/workflows/release.yml
vendored
@@ -1,13 +1,14 @@
|
||||
name: Runner CD
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- releaseVersion
|
||||
|
||||
jobs:
|
||||
check:
|
||||
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/master'
|
||||
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
31
.github/workflows/runner-basic-e2e-test-case.yml
vendored
Normal file
31
.github/workflows/runner-basic-e2e-test-case.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Runner Basics Test Case
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
target_runner:
|
||||
description: 'Self-hosted runner will run the job'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- ${{github.event.inputs.target_runner}}
|
||||
|
||||
name: Runner Basic Test 🛠
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Run a one-line script
|
||||
run: echo Hello, world!
|
||||
- name: Run a multi-line script
|
||||
shell: bash
|
||||
run: |
|
||||
printenv|sort
|
||||
cat $GITHUB_EVENT_PATH
|
||||
- name: Validate GitHub Context
|
||||
shell: bash
|
||||
run: |
|
||||
declare -a context_vars=("GITHUB_ACTION" "GITHUB_ACTIONS" "GITHUB_REPOSITORY" "GITHUB_WORKSPACE" "GITHUB_SHA" "GITHUB_RUN_ID" "GITHUB_RUN_NUMBER")
|
||||
for var in ${context_vars[@]};
|
||||
do [ -z "${!var}" ] && echo "##[error]$var not found" && exit 1 || echo "$var: ${!var}"; done
|
||||
1
CODEOWNERS
Normal file
1
CODEOWNERS
Normal file
@@ -0,0 +1 @@
|
||||
* @actions/actions-runtime
|
||||
@@ -5,8 +5,9 @@
|
||||
# GitHub Actions Runner
|
||||
|
||||
[](https://github.com/actions/runner/actions)
|
||||
[](https://github.com/actions/runner/actions)
|
||||
|
||||
The runner is the application that runs a job from a GitHub Actions workflow. The runner can run on the [hosted machine pools](https://github.com/actions/virtual-environments) or run on [self-hosted environments](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners).
|
||||
The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the [hosted virtual environments](https://github.com/actions/virtual-environments), or you can [self-host the runner](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners) in your own environment.
|
||||
|
||||
## Get Started
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ These are described in detail below:
|
||||
- http://proxy.com
|
||||
- http://127.0.0.1:8080
|
||||
- http://user:password@proxy.com
|
||||
- `no_proxy` a comma seperated list of hosts that should not use the proxy. An optional port may be specified
|
||||
- `no_proxy` a comma separated list of hosts that should not use the proxy. An optional port may be specified
|
||||
- `google.com`
|
||||
- `yahoo.com:443`
|
||||
- `google.com,bing.com`
|
||||
@@ -31,9 +31,9 @@ We won't use `http_proxy` for https traffic when `https_proxy` is not set, this
|
||||
Otherwise action authors and workflow users need to adjust to differences between the runner proxy convention, and tools used by their actions and scripts.
|
||||
|
||||
Example:
|
||||
Customer set `http_proxy=http://127.0.0.1:8888` and configure the runner against `https://github.com/owner/repo`, with the `https_proxy` -> `http_proxy` fallback, the runner will connect to server without any problem. However, if user runs `git push` to `https://github.com/owner/repo`, `git` won't use the proxy since it require `https_proxy` to be set for any https traffic.
|
||||
Customer set `http_proxy=http://127.0.0.1:8888` and configure the runner against `https://github.com/owner/repo`, with the `https_proxy` -> `http_proxy` fallback, the runner will connect to the server without any problem. However, if a user runs `git push` to `https://github.com/owner/repo`, `git` won't use the proxy since it requires `https_proxy` to be set for any https traffic.
|
||||
|
||||
> `golang`, `node.js` and other dev tools from the linux community use `http_proxy` for both http and https traffic base on my research.
|
||||
> `golang`, `node.js` and other dev tools from the linux community use `http_proxy` for both http and https traffic based on my research.
|
||||
|
||||
A majority of our users are using Linux where these variables are commonly required to be set by various programs. By reading these values, we simplify the process for self hosted runners to set up proxy, and expose it in a way users are already familiar with.
|
||||
|
||||
@@ -43,7 +43,7 @@ We will support the lowercase and uppercase variants, with lowercase taking prio
|
||||
|
||||
### No Proxy Format
|
||||
|
||||
While exact implementations are different per application on handle `no_proxy` env, most applications accept a comma separated list of hosts. Some accept wildcard characters (*). We are going to do exact case-insentive matches, and not support wildcards at this time.
|
||||
While exact implementations are different per application on handle `no_proxy` env, most applications accept a comma separated list of hosts. Some accept wildcard characters (*). We are going to do exact case-insensitive matches, and not support wildcards at this time.
|
||||
For example:
|
||||
- example.com will match example.com, foo.example.com, foo.bar.example.com
|
||||
- foo.example.com will match bar.foo.example.com and foo.example.com
|
||||
@@ -57,5 +57,5 @@ We will not support IP addresses for `no_proxy`, only hostnames.
|
||||
3. The runner will read from the environmental variables during config and runtime and use the provided proxy if it exists
|
||||
4. Users may need to pass these environmental variables into other applications if they do not natively take these variables
|
||||
5. Action authors may need to update their workflows to react to the these environment variables
|
||||
6. We will document the way of setting environmental variables for runners using the environmental variables and how the runner uses them
|
||||
6. We will document the way of setting environmental variables for runners using the environment variables and how the runner uses them
|
||||
7. Like all other secrets, users will be able to relatively easily figure out proxy password if they can modify a workflow file running on a self hosted machine
|
||||
62
docs/adrs/0274-step-outcome-and-conclusion.md
Normal file
62
docs/adrs/0274-step-outcome-and-conclusion.md
Normal file
@@ -0,0 +1,62 @@
|
||||
# ADR 0274: Step outcome and conclusion
|
||||
|
||||
**Date**: 2020-01-13
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
|
||||
This ADR proposes adding `steps.<id>.outcome` and `steps.<id>.conclusion` to the steps context.
|
||||
|
||||
This allows downstream a step to run based on whether a previous step succeeded or failed.
|
||||
|
||||
Reminder, currently the steps contains `steps.<id>.outputs`.
|
||||
|
||||
## Decision
|
||||
|
||||
For steps that have completed, populate `steps.<id>.outcome` and `steps.<id>.conclusion` with one of the following values:
|
||||
|
||||
- `success`
|
||||
- `failure`
|
||||
- `cancelled`
|
||||
- `skipped`
|
||||
|
||||
When a continue-on-error step fails, the outcome will be `failure` even though the final conclusion is `success`.
|
||||
|
||||
### Example
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
|
||||
- id: experimental
|
||||
continue-on-error: true
|
||||
run: ./build.sh experimental
|
||||
|
||||
- if: ${{ steps.experimental.outcome == 'success' }}
|
||||
run: ./publish.sh experimental
|
||||
```
|
||||
|
||||
### Terminology
|
||||
|
||||
The runs API uses the term `conclusion`.
|
||||
|
||||
Therefore we use a different term `outcome` for the value prior to continue-on-error.
|
||||
|
||||
The following is a snippet from the runs API response payload:
|
||||
|
||||
```json
|
||||
"steps": [
|
||||
{
|
||||
"name": "Set up job",
|
||||
"status": "completed",
|
||||
"conclusion": "success",
|
||||
"number": 1,
|
||||
"started_at": "2020-01-09T11:06:16.000-05:00",
|
||||
"completed_at": "2020-01-09T11:06:18.000-05:00"
|
||||
},
|
||||
```
|
||||
|
||||
## Consequences
|
||||
|
||||
- Update runner
|
||||
- Update [docs](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#steps-context)
|
||||
@@ -34,7 +34,7 @@ A way out for rare cases where scoping is a problem.
|
||||
|
||||
`##[remove-matcher]owner`
|
||||
|
||||
For the this to be usable, the `owner` needs to be discoverable. Therefore, debug print the owner on registration.
|
||||
For this to be usable, the `owner` needs to be discoverable. Therefore, debug print the owner on registration.
|
||||
|
||||
### Single line matcher
|
||||
|
||||
@@ -184,7 +184,7 @@ Solving this problem means:
|
||||
- Use the `github.workspace` (where the repo is cloned on disk)
|
||||
- Match against a repository to determine the relative path within the repo
|
||||
|
||||
This is a place where we diverge from VSCode. VSCode task configuration are specific to the local workspace (workspace root is known or can be specified). We're solving a more generic problem, so we need more information - specifically the `fromPath` property - in order to accurately root the path.
|
||||
This is a place where we diverge from VSCode. VSCode task configurations are specific to the local workspace (workspace root is known or can be specified). We're solving a more generic problem, so we need more information - specifically the `fromPath` property - in order to accurately root the path.
|
||||
|
||||
In order to avoid creating inaccurate hyperlinks on the error issues, the agent will verify the file exists and is in the main repository. Otherwise omit the file property from the error issue and debug trace what happened.
|
||||
|
||||
@@ -203,7 +203,7 @@ Problem matchers are unable to interpret severity strings other than `warning` a
|
||||
|
||||
However some tools indicate error/warning in different ways. For example `flake8` uses codes like `E100`, `W200`, and `F300` (error, warning, fatal, respectively).
|
||||
|
||||
Therefore, allow a property `severity`, sibling to `owner`, which identifies the default severity for the problem matcher. This allows two problem matchers are registered - one for warnings and one for errors.
|
||||
Therefore, allow a property `severity`, sibling to `owner`, which identifies the default severity for the problem matcher. This allows two problem matchers to be registered - one for warnings and one for errors.
|
||||
|
||||
For example, given the following `flake8` output:
|
||||
|
||||
|
||||
@@ -84,7 +84,7 @@ powershell/pwsh
|
||||
- Users can always opt out by not using the builtins, and providing a shell option like: `pwsh -File {0}`, or `powershell -Command "& '{0}'"`, depending on need
|
||||
|
||||
cmd
|
||||
- There doesnt seem to be a way to fully opt in to fail-fast behavior other than writing your script to check each error code and respond accordingly, so we cant actually provide that behavior by default, it will be completely up to the user to write this behavior into their script
|
||||
- There doesn't seem to be a way to fully opt in to fail-fast behavior other than writing your script to check each error code and respond accordingly, so we can't actually provide that behavior by default, it will be completely up to the user to write this behavior into their script
|
||||
- cmd.exe will exit (return the error code to the runner) with the errorlevel of the last program it executed. This is internally consistent with the previous default behavior (sh, pwsh) and is the cmd.exe default, so we keep that behavior
|
||||
|
||||
## Consequences
|
||||
|
||||
35
docs/adrs/0354-runner-machine-info.md
Normal file
35
docs/adrs/0354-runner-machine-info.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# ADR 354: Expose runner machine info
|
||||
|
||||
**Date**: 2020-03-02
|
||||
|
||||
**Status**: Pending
|
||||
|
||||
## Context
|
||||
|
||||
- Provide a mechanism in the runner to include extra information in `Set up job` step's log.
|
||||
Ex: Include OS/Software info from Hosted image.
|
||||
|
||||
## Decision
|
||||
|
||||
The runner will look for a file `.setup_info` under the runner's root directory, The file can be a JSON with a simple schema.
|
||||
```json
|
||||
[
|
||||
{
|
||||
"group": "OS Detail",
|
||||
"detail": "........"
|
||||
},
|
||||
{
|
||||
"group": "Software Detail",
|
||||
"detail": "........"
|
||||
}
|
||||
]
|
||||
```
|
||||
The runner will use `##[group]` and `##[endgroup]` to fold all detail info into an expandable group.
|
||||
|
||||
Both [virtual-environments](https://github.com/actions/virtual-environments) and self-hosted runners can use this mechanism to add extra logging info to the `Set up job` step's log.
|
||||
|
||||
## Consequences
|
||||
|
||||
1. Change the runner to best effort read/parse `.extra_setup_info` file under runner root directory.
|
||||
2. [virtual-environments](https://github.com/actions/virtual-environments) generate the file during image generation.
|
||||
3. Change MMS provisioner to properly copy the file to runner root directory at runtime.
|
||||
75
docs/adrs/0361-wrapper-action.md
Normal file
75
docs/adrs/0361-wrapper-action.md
Normal file
@@ -0,0 +1,75 @@
|
||||
# ADR 361: Wrapper Action
|
||||
|
||||
**Date**: 2020-03-06
|
||||
|
||||
**Status**: Pending
|
||||
|
||||
## Context
|
||||
|
||||
In addition to action's regular execution, action author may wants their action has a chance to participate in:
|
||||
- Job initialize
|
||||
My Action will collect machine resource usage (CPU/RAM/Disk) during a workflow job execution, we need to start perf recorder at the begin of the job.
|
||||
- Job cleanup
|
||||
My Action will dirty local workspace or machine environment during execution, we need to cleanup these changes at the end of the job.
|
||||
Ex: `actions/checkout@v2` will write `github.token` into local `.git/config` during execution, it has post job cleanup defined to undo the changes.
|
||||
|
||||
## Decision
|
||||
|
||||
### Add `pre` and `post` execution to action
|
||||
|
||||
Node Action Example:
|
||||
|
||||
```yaml
|
||||
name: 'My action with pre'
|
||||
description: 'My action with pre'
|
||||
runs:
|
||||
using: 'node12'
|
||||
pre: 'setup.js'
|
||||
pre-if: 'success()' // Optional
|
||||
main: 'index.js'
|
||||
post: 'cleanup.js'
|
||||
post-if: 'success()' // Optional
|
||||
```
|
||||
|
||||
Container Action Example:
|
||||
|
||||
```yaml
|
||||
name: 'My action with pre'
|
||||
description: 'My action with pre'
|
||||
runs:
|
||||
using: 'docker'
|
||||
image: 'mycontainer:latest'
|
||||
pre-entrypoint: 'setup.sh'
|
||||
pre-if: 'success()' // Optional
|
||||
entrypoint: 'entrypoint.sh'
|
||||
post-entrypoint: 'cleanup.sh'
|
||||
post-if: 'success()' // Optional
|
||||
```
|
||||
|
||||
Both `pre` and `post` will has default `pre-if/post-if` sets to `always()`.
|
||||
Setting `pre` to `always()` will make sure no matter what condition evaluate result the `main` gets at runtime, the `pre` has always run already.
|
||||
`pre` executes in order of how the steps are defined.
|
||||
`pre` will always be added to job steps list during job setup.
|
||||
> Action referenced from local repository (`./my-action`) won't get `pre` setup correctly since the repository haven't checkout during job initialize.
|
||||
> We can't use GitHub api to download the repository since there is a about 3 mins delay between `git push` and the new commit available to download using GitHub api.
|
||||
|
||||
`post` will be pushed into a `poststeps` stack lazily when the action's `pre` or `main` execution passed `if` condition check and about to run, you can't have an action that only contains a `post`, we will pop and run each `post` after all `pre` and `main` finished.
|
||||
> Currently `post` works for both repository action (`org/repo@v1`) and local action (`./my-action`)
|
||||
|
||||
Valid action:
|
||||
- only has `main`
|
||||
- has `pre` and `main`
|
||||
- has `main` and `post`
|
||||
- has `pre`, `main` and `post`
|
||||
|
||||
Invalid action:
|
||||
- only has `pre`
|
||||
- only has `post`
|
||||
- has `pre` and `post`
|
||||
|
||||
Potential downside of introducing `pre`:
|
||||
|
||||
- Extra magic wrt step order. Users should control the step order. Especially when we introduce templates.
|
||||
- Eliminates the possibility to lazily download the action tarball, since `pre` always run by default, we have to download the tarball to check whether action defined a `pre`
|
||||
- `pre` doesn't work with local action, we suggested customer use local action for testing their action changes, ex CI for their action, to avoid delay between `git push` and GitHub repo tarball download api.
|
||||
- Condition on the `pre` can't be controlled using dynamic step outputs. `pre` executes too early.
|
||||
56
docs/adrs/0397-runner-registration-labels.md
Normal file
56
docs/adrs/0397-runner-registration-labels.md
Normal file
@@ -0,0 +1,56 @@
|
||||
# ADR 0397: Support adding custom labels during runner config
|
||||
**Date**: 2020-03-30
|
||||
|
||||
**Status**: Approved
|
||||
|
||||
## Context
|
||||
|
||||
Since configuring self-hosted runners is commonly automated via scripts, the labels need to be able to be created during configuration. The runner currently registers the built-in labels (os, arch) during registration but does not accept labels via command line args to extend the set registered.
|
||||
|
||||
See Issue: https://github.com/actions/runner/issues/262
|
||||
|
||||
This is another version of [ADR275](https://github.com/actions/runner/pull/275)
|
||||
|
||||
## Decision
|
||||
|
||||
This ADR proposes that we add a `--labels` option to `config`, which could be used to add custom additional labels to the configured runner.
|
||||
|
||||
For example, to add a single extra label the operator could run:
|
||||
```bash
|
||||
./config.sh --labels mylabel
|
||||
```
|
||||
> Note: the current runner command line parsing and envvar override algorithm only supports a single argument (key).
|
||||
|
||||
This would add the label `mylabel` to the runner, and enable users to select the runner in their workflow using this label:
|
||||
```yaml
|
||||
runs-on: [self-hosted, mylabel]
|
||||
```
|
||||
|
||||
To add multiple labels the operator could run:
|
||||
```bash
|
||||
./config.sh --labels mylabel,anotherlabel
|
||||
```
|
||||
> Note: the current runner command line parsing and envvar override algorithm only supports a single argument (key).
|
||||
|
||||
This would add the label `mylabel` and `anotherlabel` to the runner, and enable users to select the runner in their workflow using this label:
|
||||
```yaml
|
||||
runs-on: [self-hosted, mylabel, anotherlabel]
|
||||
```
|
||||
|
||||
It would not be possible to remove labels from an existing runner using `config.sh`, instead labels would have to be removed using the GitHub UI.
|
||||
|
||||
The labels argument will split on commas, trim and discard empty strings. That effectively means don't use commans in unattended config label names. Alternatively we could choose to escape commans but it's a nice to have.
|
||||
|
||||
## Replace
|
||||
|
||||
If an existing runner exists and the option to replace is chosen (interactively of via unattend as in this scenario), then the labels will be replaced / overwritten (not merged).
|
||||
|
||||
## Overriding built-in labels
|
||||
|
||||
Note that it is possible to register "built-in" hosted labels like `ubuntu-latest` and is not considered an error. This is an effective way for the org / runner admin to dictate by policy through registration that this set of runners will be used without having to edit all the workflow files now and in the future.
|
||||
|
||||
We will also not make other restrictions such as limiting explicitly adding os / arch labels and validating. We will assume that explicit labels were added for a reason and not restricting offers the most flexibility and future proofing / compat.
|
||||
|
||||
## Consequences
|
||||
|
||||
The ability to add custom labels to a self-hosted runner would enable most scenarios where job runner selection based on runner capabilities or characteristics are required.
|
||||
378
docs/adrs/0549-composite-run-steps.md
Normal file
378
docs/adrs/0549-composite-run-steps.md
Normal file
@@ -0,0 +1,378 @@
|
||||
# ADR 0549: Composite Run Steps
|
||||
|
||||
**Date**: 2020-06-17
|
||||
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
|
||||
Customers want to be able to compose actions from actions (ex: https://github.com/actions/runner/issues/438)
|
||||
|
||||
An important step towards meeting this goal is to build in functionality for actions where users can simply execute any number of steps.
|
||||
|
||||
### Guiding Principles
|
||||
|
||||
We don't want the workflow author to need to know how the internal workings of the action work. Users shouldn't know the internal workings of the composite action (for example, `default.shell` and `default.workingDir` should not be inherited from the workflow file to the action file). When deciding how to design certain parts of composite run steps, we want to think one logical step from the consumer.
|
||||
|
||||
A composite action is treated as **one** individual job step (this is known as encapsulation).
|
||||
|
||||
## Decision
|
||||
|
||||
**In this ADR, we only support running multiple run steps in an Action.** In doing so, we build in support for mapping and flowing the inputs, outputs, and env variables (ex: All nested steps should have access to its parents' input variables and nested steps can overwrite the input variables).
|
||||
|
||||
### Composite Run Steps Features
|
||||
This feature supports at the top action level:
|
||||
- name
|
||||
- description
|
||||
- inputs
|
||||
- runs
|
||||
- outputs
|
||||
|
||||
This feature supports at the run step level:
|
||||
- name
|
||||
- id
|
||||
- run
|
||||
- env
|
||||
- shell
|
||||
- working-directory
|
||||
|
||||
This feature **does not support** at the run step level:
|
||||
- timeout-minutes
|
||||
- secrets
|
||||
- conditionals (needs, if, etc.)
|
||||
- continue-on-error
|
||||
|
||||
### Steps
|
||||
|
||||
Example `workflow.yml`
|
||||
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: self-hosted
|
||||
steps:
|
||||
- id: step1
|
||||
uses: actions/setup-python@v1
|
||||
- id: step2
|
||||
uses: actions/setup-node@v2
|
||||
- uses: actions/checkout@v2
|
||||
- uses: user/composite@v1
|
||||
- name: workflow step 1
|
||||
run: echo hello world 3
|
||||
- name: workflow step 2
|
||||
run: echo hello world 4
|
||||
```
|
||||
|
||||
Example `user/composite/action.yml`
|
||||
|
||||
```yaml
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- run: pip install -r requirements.txt
|
||||
shell: bash
|
||||
- run: npm install
|
||||
shell: bash
|
||||
```
|
||||
|
||||
Example Output
|
||||
|
||||
```yaml
|
||||
[npm installation output]
|
||||
[pip requirements output]
|
||||
echo hello world 3
|
||||
echo hello world 4
|
||||
```
|
||||
|
||||
We add a token called "composite" which allows our Runner code to process composite actions. By invoking "using: composite", our Runner code then processes the "steps" attribute, converts this template code to a list of steps, and finally runs each run step sequentially. If any step fails and there are no `if` conditions defined, the whole composite action job fails.
|
||||
|
||||
### Defaults
|
||||
|
||||
We will not support "defaults" in a composite action.
|
||||
|
||||
### Shell and Working-directory
|
||||
|
||||
For each run step in a composite action, the action author can set the `shell` and `working-directory` attributes for that step. The shell attribute is **required** for each run step because the action author does not know what the workflow author is using for the operating system so we need to explicitly prevent unknown behavior by making sure that each run step has an explicit shell **set by the action author.** On the other hand, `working-directory` is optional. Moreover, the composite action author can map in values from the `inputs` for it's `shell` and `working-directory` attributes at the step level for an action.
|
||||
|
||||
For example,
|
||||
|
||||
`action.yml`
|
||||
|
||||
|
||||
```yaml
|
||||
inputs:
|
||||
shell_1:
|
||||
description: 'Your name'
|
||||
default: 'pwsh'
|
||||
steps:
|
||||
- run: echo 1
|
||||
shell: ${{ inputs.shell_1 }}
|
||||
```
|
||||
|
||||
Note, the workflow file and action file are treated as separate entities. **So, the workflow `defaults` will never change the `shell` and `working-directory` value in the run steps in a composite action.** Note, `defaults` in a workflow only apply to run steps not "uses" steps (steps that use an action).
|
||||
|
||||
### Running Local Scripts
|
||||
|
||||
Example 'workflow.yml':
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: self-hosted
|
||||
steps:
|
||||
- uses: user/composite@v1
|
||||
```
|
||||
|
||||
Example `user/composite/action.yml`:
|
||||
|
||||
```yaml
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- run: chmod +x ${{ github.action_path }}/test/script2.sh
|
||||
shell: bash
|
||||
- run: chmod +x $GITHUB_ACTION_PATH/script.sh
|
||||
shell: bash
|
||||
- run: ${{ github.action_path }}/test/script2.sh
|
||||
shell: bash
|
||||
- run: $GITHUB_ACTION_PATH/script.sh
|
||||
shell: bash
|
||||
```
|
||||
Where `user/composite` has the file structure:
|
||||
```
|
||||
.
|
||||
+-- action.yml
|
||||
+-- script.sh
|
||||
+-- test
|
||||
| +-- script2.sh
|
||||
```
|
||||
|
||||
|
||||
Users will be able to run scripts located in their action folder by first prepending the relative path and script name with `$GITHUB_ACTION_PATH` or `github.action_path` which contains the path in which the composite action is downloaded to and where those "files" live. Note, you'll have to use `chmod` before running each script if you do not git check in your script files into your github repo with the executable bit turned on.
|
||||
|
||||
### Inputs
|
||||
|
||||
Example `workflow.yml`:
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- id: foo
|
||||
uses: user/composite@v1
|
||||
with:
|
||||
your_name: "Octocat"
|
||||
```
|
||||
|
||||
Example `user/composite/action.yml`:
|
||||
|
||||
```yaml
|
||||
inputs:
|
||||
your_name:
|
||||
description: 'Your name'
|
||||
default: 'Ethan'
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- run: echo hello ${{ inputs.your_name }}
|
||||
shell: bash
|
||||
```
|
||||
|
||||
Example Output:
|
||||
|
||||
```
|
||||
hello Octocat
|
||||
```
|
||||
|
||||
Each input variable in the composite action is only viewable in its own scope.
|
||||
|
||||
### Outputs
|
||||
|
||||
Example `workflow.yml`:
|
||||
|
||||
```yaml
|
||||
...
|
||||
steps:
|
||||
- id: foo
|
||||
uses: user/composite@v1
|
||||
- run: echo random-number ${{ steps.foo.outputs.random-number }}
|
||||
shell: bash
|
||||
```
|
||||
|
||||
Example `user/composite/action.yml`:
|
||||
|
||||
```yaml
|
||||
outputs:
|
||||
random-number:
|
||||
description: "Random number"
|
||||
value: ${{ steps.random-number-generator.outputs.random-id }}
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- id: random-number-generator
|
||||
run: echo "::set-output name=random-id::$(echo $RANDOM)"
|
||||
shell: bash
|
||||
```
|
||||
|
||||
Example Output:
|
||||
|
||||
```
|
||||
::set-output name=my-output::43243
|
||||
random-number 43243
|
||||
```
|
||||
|
||||
Each of the output variables from the composite action is viewable from the workflow file that uses the composite action. In other words, every child action output(s) is viewable only by its parent using dot notation (ex `steps.foo.outputs.random-number`).
|
||||
|
||||
Moreover, the output ids are only accessible within the scope where it was defined. Note that in the example above, in our `workflow.yml` file, it should not have access to output id (i.e. `random-id`). The reason why we are doing this is because we don't want to require the workflow author to know the internal workings of the composite action.
|
||||
|
||||
### Context
|
||||
|
||||
Similar to the workflow file, the composite action has access to the [same context objects](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#contexts) (ex: `github`, `env`, `strategy`).
|
||||
|
||||
### Environment
|
||||
|
||||
In the Composite Action, you'll only be able to use `::set-env::` to set environment variables just like you could with other actions.
|
||||
|
||||
### Secrets
|
||||
|
||||
**We will not support "Secrets" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||
|
||||
We'll pass the secrets from the composite action's parents (ex: the workflow file) to the composite action. Secrets can be created in the composite action with the secrets context. In the actions yaml, we'll automatically mask the secret.
|
||||
|
||||
|
||||
### If Condition
|
||||
|
||||
** If and needs conditions will not be supported in the composite run steps feature. It will be supported later on in a new feature. **
|
||||
|
||||
Old reasoning:
|
||||
|
||||
Example `workflow.yml`:
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- run: exit 1
|
||||
- uses: user/composite@v1 # <--- this will run, as it's marked as always runing
|
||||
if: always()
|
||||
```
|
||||
|
||||
Example `user/composite/action.yml`:
|
||||
|
||||
```yaml
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- run: echo "just succeeding"
|
||||
shell: bash
|
||||
- run: echo "I will run, as my current scope is succeeding"
|
||||
shell: bash
|
||||
if: success()
|
||||
- run: exit 1
|
||||
shell: bash
|
||||
- run: echo "I will not run, as my current scope is now failing"
|
||||
shell: bash
|
||||
```
|
||||
|
||||
**We will not support "if Condition" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||
|
||||
See the paragraph below for a rudimentary approach (thank you to @cybojenix for the idea, example, and explanation for this approach):
|
||||
|
||||
The `if` statement in the parent (in the example above, this is the `workflow.yml`) shows whether or not we should run the composite action. So, our composite action will run since the `if` condition for running the composite action is `always()`.
|
||||
|
||||
**Note that the if condition on the parent does not propagate to the rest of its children though.**
|
||||
|
||||
In the child action (in this example, this is the `action.yml`), it starts with a clean slate (in other words, no imposing if conditions). Similar to the logic in the paragraph above, `echo "I will run, as my current scope is succeeding"` will run since the `if` condition checks if the previous steps **within this composite action** has not failed. `run: echo "I will not run, as my current scope is now failing"` will not run since the previous step resulted in an error and by default, the if expression is set to `success()` if the if condition is not set for a step.
|
||||
|
||||
|
||||
What if a step has `cancelled()`? We do the opposite of our approach above if `cancelled()` is used for any of our composite run steps. We will cancel any step that has this condition if the workflow is cancelled at all.
|
||||
|
||||
### Timeout-minutes
|
||||
|
||||
Example `workflow.yml`:
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- id: bar
|
||||
uses: user/test@v1
|
||||
timeout-minutes: 50
|
||||
```
|
||||
|
||||
Example `user/composite/action.yml`:
|
||||
|
||||
```yaml
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- id: foo1
|
||||
run: echo test 1
|
||||
timeout-minutes: 10
|
||||
shell: bash
|
||||
- id: foo2
|
||||
run: echo test 2
|
||||
shell: bash
|
||||
- id: foo3
|
||||
run: echo test 3
|
||||
timeout-minutes: 10
|
||||
shell: bash
|
||||
```
|
||||
|
||||
**We will not support "timeout-minutes" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||
|
||||
A composite action in its entirety is a job. You can set both timeout-minutes for the whole composite action or its steps as long as the the sum of the `timeout-minutes` for each composite action step that has the attribute `timeout-minutes` is less than or equals to `timeout-minutes` for the composite action. There is no default timeout-minutes for each composite action step.
|
||||
|
||||
If the time taken for any of the steps in combination or individually exceed the whole composite action `timeout-minutes` attribute, the whole job will fail (1). If an individual step exceeds its own `timeout-minutes` attribute but the total time that has been used including this step is below the overall composite action `timeout-minutes`, the individual step will fail but the rest of the steps will run based on their own `timeout-minutes` attribute (they will still abide by condition (1) though).
|
||||
|
||||
For reference, in the example above, if the composite step `foo1` takes 11 minutes to run, that step will fail but the rest of the steps, `foo1` and `foo2`, will proceed as long as their total runtime with the previous failed `foo1` action is less than the composite action's `timeout-minutes` (50 minutes). If the composite step `foo2` takes 51 minutes to run, it will cause the whole composite action job to fail. I
|
||||
|
||||
The rationale behind this is that users can configure their steps with the `if` condition to conditionally set how steps rely on each other. Due to the additional capabilities that are offered with combining `timeout-minutes` and/or `if`, we wanted the `timeout-minutes` condition to be as dumb as possible and not effect other steps.
|
||||
|
||||
[Usage limits still apply](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions?query=if%28%29#usage-limits)
|
||||
|
||||
|
||||
### Continue-on-error
|
||||
|
||||
Example `workflow.yml`:
|
||||
|
||||
```yaml
|
||||
steps:
|
||||
- run: exit 1
|
||||
- id: bar
|
||||
uses: user/test@v1
|
||||
continue-on-error: false
|
||||
- id: foo
|
||||
run: echo "Hello World" <------- This step will not run
|
||||
```
|
||||
|
||||
Example `user/composite/action.yml`:
|
||||
|
||||
```yaml
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- run: exit 1
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
- run: echo "Hello World 2" <----- This step will run
|
||||
shell: bash
|
||||
```
|
||||
|
||||
**We will not support "continue-on-error" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||
|
||||
If any of the steps fail in the composite action and the `continue-on-error` is set to `false` for the whole composite action step in the workflow file, then the steps below it will run. On the flip side, if `continue-on-error` is set to `true` for the whole composite action step in the workflow file, the next job step will run.
|
||||
|
||||
For the composite action steps, it follows the same logic as above. In this example, `"Hello World 2"` will be outputted because the previous step has `continue-on-error` set to `true` although that previous step errored.
|
||||
|
||||
### Visualizing Composite Action in the GitHub Actions UI
|
||||
We want all the composite action's steps to be condensed into the original composite action node.
|
||||
|
||||
Here is a visual represenation of the [first example](#Steps)
|
||||
|
||||
```yaml
|
||||
| composite_action_node |
|
||||
| echo hello world 1 |
|
||||
| echo hello world 2 |
|
||||
| echo hello world 3 |
|
||||
| echo hello world 4 |
|
||||
|
||||
```
|
||||
|
||||
|
||||
## Consequences
|
||||
|
||||
This ADR lays the framework for eventually supporting nested Composite Actions within Composite Actions. This ADR allows for users to run multiple run steps within a GitHub Composite Action with the support of inputs, outputs, environment, and context for use in any steps as well as the if, timeout-minutes, and the continue-on-error attributes for each Composite Action step.
|
||||
57
docs/automate.md
Normal file
57
docs/automate.md
Normal file
@@ -0,0 +1,57 @@
|
||||
# Automate Configuring Self-Hosted Runners
|
||||
|
||||
|
||||
## Export PAT
|
||||
|
||||
Before running any of these sample scripts, create a GitHub PAT and export it before running the script
|
||||
|
||||
```bash
|
||||
export RUNNER_CFG_PAT=yourPAT
|
||||
```
|
||||
|
||||
## Create running as a service
|
||||
|
||||
**Scenario**: Run on a machine or VM (not container) which automates:
|
||||
|
||||
- Resolving latest released runner
|
||||
- Download and extract latest
|
||||
- Acquire a registration token
|
||||
- Configure the runner
|
||||
- Run as a systemd (linux) or Launchd (osx) service
|
||||
|
||||
:point_right: [Sample script here](../scripts/create-latest-svc.sh) :point_left:
|
||||
|
||||
Run as a one-liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level)
|
||||
```bash
|
||||
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/create-latest-svc.sh | bash -s yourorg/yourrepo
|
||||
```
|
||||
|
||||
## Uninstall running as service
|
||||
|
||||
**Scenario**: Run on a machine or VM (not container) which automates:
|
||||
|
||||
- Stops and uninstalls the systemd (linux) or Launchd (osx) service
|
||||
- Acquires a removal token
|
||||
- Removes the runner
|
||||
|
||||
:point_right: [Sample script here](../scripts/remove-svc.sh) :point_left:
|
||||
|
||||
Repo level one liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level)
|
||||
```bash
|
||||
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/remove-svc.sh | bash -s yourorg/yourrepo
|
||||
```
|
||||
|
||||
### Delete an offline runner
|
||||
|
||||
**Scenario**: Deletes a registered runner that is offline:
|
||||
|
||||
- Ensures the runner is offline
|
||||
- Resolves id from name
|
||||
- Deletes the runner
|
||||
|
||||
:point_right: [Sample script here](../scripts/delete.sh) :point_left:
|
||||
|
||||
Repo level one-liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level) and replace runnername
|
||||
```bash
|
||||
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/delete.sh | bash -s yourorg/yourrepo runnername
|
||||
```
|
||||
44
docs/checks/actions.md
Normal file
44
docs/checks/actions.md
Normal file
@@ -0,0 +1,44 @@
|
||||
|
||||
# Actions Connection Check
|
||||
|
||||
## What is this check for?
|
||||
|
||||
Make sure the runner has access to actions service for GitHub.com or GitHub Enterprise Server
|
||||
|
||||
- For GitHub.com
|
||||
- The runner needs to access https://api.github.com for downloading actions.
|
||||
- The runner needs to access https://vstoken.actions.githubusercontent.com/_apis/.../ for requesting an access token.
|
||||
- The runner needs to access https://pipelines.actions.githubusercontent.com/_apis/.../ for receiving workflow jobs.
|
||||
- For GitHub Enterprise Server
|
||||
- The runner needs to access https://myGHES.com/api/v3 for downloading actions.
|
||||
- The runner needs to access https://myGHES.com/_services/vstoken/_apis/.../ for requesting an access token.
|
||||
- The runner needs to access https://myGHES.com/_services/pipelines/_apis/.../ for receiving workflow jobs.
|
||||
|
||||
## What is checked?
|
||||
|
||||
- DNS lookup for api.github.com or myGHES.com using dotnet
|
||||
- Ping api.github.com or myGHES.com using dotnet
|
||||
- Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
---
|
||||
- DNS lookup for vstoken.actions.githubusercontent.com using dotnet
|
||||
- Ping vstoken.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
---
|
||||
- DNS lookup for pipelines.actions.githubusercontent.com using dotnet
|
||||
- Ping pipelines.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
### 2. SSL certificate related issue
|
||||
|
||||
If you are seeing `System.Net.Http.HttpRequestException: The SSL connection could not be established, see inner exception.` in the log, it means the runner can't connect to Actions service due to SSL handshake failure.
|
||||
> Please check the [SSL cert doc](./sslcert.md)
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
34
docs/checks/git.md
Normal file
34
docs/checks/git.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Git Connection Check
|
||||
|
||||
## What is this check for?
|
||||
|
||||
Make sure `git` can access GitHub.com or your GitHub Enterprise Server.
|
||||
|
||||
|
||||
## What is checked?
|
||||
|
||||
The test is done by executing
|
||||
```bash
|
||||
# For GitHub.com
|
||||
git ls-remote --exit-code https://github.com/actions/checkout HEAD
|
||||
|
||||
# For GitHub Enterprise Server
|
||||
git ls-remote --exit-code https://ghes.me/actions/checkout HEAD
|
||||
```
|
||||
|
||||
The test also set environment variable `GIT_TRACE=1` and `GIT_CURL_VERBOSE=1` before running `git ls-remote`, this will make `git` to produce debug log for better debug any potential issues.
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
### 2. SSL certificate related issue
|
||||
|
||||
If you are seeing `SSL Certificate problem:` in the log, it means the `git` can't connect to the GitHub server due to SSL handshake failure.
|
||||
> Please check the [SSL cert doc](./sslcert.md)
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
26
docs/checks/internet.md
Normal file
26
docs/checks/internet.md
Normal file
@@ -0,0 +1,26 @@
|
||||
# Internet Connection Check
|
||||
|
||||
## What is this check for?
|
||||
|
||||
Make sure the runner has access to https://api.github.com
|
||||
|
||||
The runner needs to access https://api.github.com to download any actions from the marketplace.
|
||||
|
||||
Even the runner is configured to GitHub Enterprise Server, the runner can still download actions from GitHub.com with [GitHub Connect](https://docs.github.com/en/enterprise-server@2.22/admin/github-actions/enabling-automatic-access-to-githubcom-actions-using-github-connect)
|
||||
|
||||
|
||||
## What is checked?
|
||||
|
||||
- DNS lookup for api.github.com using dotnet
|
||||
- Ping api.github.com using dotnet
|
||||
- Make HTTP GET to https://api.github.com using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
29
docs/checks/network.md
Normal file
29
docs/checks/network.md
Normal file
@@ -0,0 +1,29 @@
|
||||
## Common Network Related Issues
|
||||
|
||||
### Common things that can cause the runner to not working properly
|
||||
|
||||
- Bug in the runner or the dotnet framework that causes actions runner can't make Http request in a certain network environment.
|
||||
|
||||
- Proxy/Firewall block certain HTTP method, like it block all POST and PUT calls which the runner will use to upload logs.
|
||||
|
||||
- Proxy/Firewall only allows requests with certain user-agent to pass through and the actions runner user-agent is not in the allow list.
|
||||
|
||||
- Proxy try to decrypt and exam HTTPS traffic for security purpose but cause the actions-runner to fail to finish SSL handshake due to the lack of trusting proxy's CA.
|
||||
|
||||
- Firewall rules that block action runner from accessing certain hosts, ex: `*.github.com`, `*.actions.githubusercontent.com`, etc.
|
||||
|
||||
|
||||
### Identify and solve these problems
|
||||
|
||||
The key is to figure out where is the problem, the network environment, or the actions runner?
|
||||
|
||||
Use a 3rd party tool to make the same requests as the runner did would be a good start point.
|
||||
|
||||
- Use `nslookup` to check DNS
|
||||
- Use `ping` to check Ping
|
||||
- Use `curl -v` to check the network stack, good for verifying default certificate/proxy settings.
|
||||
- Use `Invoke-WebRequest` from `pwsh` (`PowerShell Core`) to check the dotnet network stack, good for verifying bugs in the dotnet framework.
|
||||
|
||||
If the 3rd party tool is also experiencing the same error as the runner does, then you might want to contact your network administrator for help.
|
||||
|
||||
Otherwise, contact GitHub customer support or log an issue at https://github.com/actions/runner
|
||||
30
docs/checks/nodejs.md
Normal file
30
docs/checks/nodejs.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# Node.js Connection Check
|
||||
|
||||
## What is this check for?
|
||||
|
||||
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
|
||||
|
||||
The runner carries it's own copy of node.js executable under `<runner_root>/externals/node12/`.
|
||||
|
||||
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node12/`.
|
||||
|
||||
> Not the `node` from `$PATH`
|
||||
|
||||
## What is checked?
|
||||
|
||||
- Make HTTPS GET to https://api.github.com or https://myGHES.com/api/v3 using node.js, make sure it gets 200 response code.
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
### 2. SSL certificate related issue
|
||||
|
||||
If you are seeing `Https request failed due to SSL cert issue` in the log, it means the `node.js` can't connect to the GitHub server due to SSL handshake failure.
|
||||
> Please check the [SSL cert doc](./sslcert.md)
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
89
docs/checks/sslcert.md
Normal file
89
docs/checks/sslcert.md
Normal file
@@ -0,0 +1,89 @@
|
||||
## SSL Certificate Related Issues
|
||||
|
||||
You might run into an SSL certificate error when your GitHub Enterprise Server is using a self-signed SSL server certificate or a web proxy within your network is decrypting HTTPS traffic for a security audit.
|
||||
|
||||
As long as your certificate is generated properly, most of the issues should be fixed after your trust the certificate properly on the runner machine.
|
||||
|
||||
> Different OS might have extra requirements on SSL certificate,
|
||||
> Ex: macOS requires `ExtendedKeyUsage` https://support.apple.com/en-us/HT210176
|
||||
|
||||
### Don't skip SSL cert validation
|
||||
|
||||
> !!! DO NOT SKIP SSL CERT VALIDATION !!!
|
||||
> !!! IT IS A BAD SECURITY PRACTICE !!!
|
||||
|
||||
### Download SSL certificate chain
|
||||
|
||||
Depends on how your SSL server certificate gets configured, you might need to download the whole certificate chain from a machine that has trusted the SSL certificate's CA.
|
||||
|
||||
- Approach 1: Download certificate chain using a browser (Chrome, Firefox, IT), you can google for more example, [here is what I found](https://medium.com/@menakajain/export-download-ssl-certificate-from-server-site-url-bcfc41ea46a2)
|
||||
|
||||
- Approach 2: Download certificate chain using OpenSSL, you can google for more example, [here is what I found](https://superuser.com/a/176721)
|
||||
|
||||
- Approach 3: Ask your network administrator or the owner of the CA certificate to send you a copy of it
|
||||
|
||||
### Trust CA certificate for the Runner
|
||||
|
||||
The actions runner is a dotnet core application which will follow how dotnet load SSL CA certificates on each OS.
|
||||
|
||||
You can get full details documentation at [here](https://docs.microsoft.com/en-us/dotnet/standard/security/cross-platform-cryptography#x509store)
|
||||
|
||||
In short:
|
||||
- Windows: Load from Windows certificate store.
|
||||
- Linux: Load from OpenSSL CA cert bundle.
|
||||
- macOS: Load from macOS KeyChain.
|
||||
|
||||
To let the runner trusts your CA certificate, you will need to:
|
||||
1. Save your SSL certificate chain which includes the root CA and all intermediate CAs into a `.pem` file.
|
||||
2. Use `OpenSSL` to convert `.pem` file to a proper format for different OS, here is some [doc with sample commands](https://www.sslshopper.com/ssl-converter.html)
|
||||
3. Trust CA on different OS:
|
||||
- Windows: https://docs.microsoft.com/en-us/skype-sdk/sdn/articles/installing-the-trusted-root-certificate
|
||||
- macOS: 
|
||||
- Linux: Refer to the distribution documentation
|
||||
1. RedHat: https://www.redhat.com/sysadmin/ca-certificates-cli
|
||||
2. Ubuntu: http://manpages.ubuntu.com/manpages/focal/man8/update-ca-certificates.8.html
|
||||
3. Google search: "trust ca certificate on [linux distribution]"
|
||||
4. If all approaches failed, set environment variable `SSL_CERT_FILE` to the CA bundle `.pem` file we get.
|
||||
> To verity cert gets installed properly on Linux, you can try use `curl -v https://sitewithsslissue.com` and `pwsh -Command \"Invoke-WebRequest -Uri https://sitewithsslissue.com\"`
|
||||
|
||||
### Trust CA certificate for Git CLI
|
||||
|
||||
Git uses various CA bundle file depends on your operation system.
|
||||
- Git packaged the CA bundle file within the Git installation on Windows
|
||||
- Git use OpenSSL certificate CA bundle file on Linux and macOS
|
||||
|
||||
You can check where Git check CA file by running:
|
||||
```bash
|
||||
export GIT_CURL_VERBOSE=1
|
||||
git ls-remote https://github.com/actions/runner HEAD
|
||||
```
|
||||
|
||||
You should see something like:
|
||||
```
|
||||
* Couldn't find host github.com in the .netrc file; using defaults
|
||||
* Trying 140.82.114.4...
|
||||
* TCP_NODELAY set
|
||||
* Connected to github.com (140.82.114.4) port 443 (#0)
|
||||
* ALPN, offering h2
|
||||
* ALPN, offering http/1.1
|
||||
* successfully set certificate verify locations:
|
||||
* CAfile: /etc/ssl/cert.pem
|
||||
CApath: none
|
||||
* SSL connection using TLSv1.2 / ECDHE-RSA-AES128-GCM-SHA256
|
||||
```
|
||||
This tells me `/etc/ssl/cert.pem` is where it read trusted CA certificates.
|
||||
|
||||
To let Git trusts your CA certificate, you will need to:
|
||||
1. Save your SSL certificate chain which includes the root CA and all intermediate CAs into a `.pem` file.
|
||||
2. Set `http.sslCAInfo` Git config or `GIT_SSL_CAINFO` environment variable to the full path of the `.pem` file [Git Doc](https://git-scm.com/docs/git-config#Documentation/git-config.txt-httpsslCAInfo)
|
||||
> I would recommend using `http.sslCAInfo` since it can be scope to certain hosts that need the extra trusted CA.
|
||||
> Ex: `git config --global http.https://myghes.com/.sslCAInfo /extra/ca/cert.pem`
|
||||
> This will make Git use the `/extra/ca/cert.pem` only when communicates with `https://myghes.com` and keep using the default CA bundle with others.
|
||||
|
||||
### Trust CA certificate for Node.js
|
||||
|
||||
Node.js has compiled a snapshot of the Mozilla CA store that is fixed at each version of Node.js' release time.
|
||||
|
||||
To let Node.js trusts your CA certificate, you will need to:
|
||||
1. Save your SSL certificate chain which includes the root CA and all intermediate CAs into a `.pem` file.
|
||||
2. Set environment variable `NODE_EXTRA_CA_CERTS` which point to the file. ex: `export NODE_EXTRA_CA_CERTS=/full/path/to/cacert.pem` or `set NODE_EXTRA_CA_CERTS=C:\full\path\to\cacert.pem`
|
||||
@@ -14,7 +14,7 @@ Issues in this repository should be for the runner application. Note that the V
|
||||
|
||||
We ask that before significant effort is put into code changes, that we have agreement on taking the change before time is invested in code changes.
|
||||
|
||||
1. Create a feature request. Once agreed we will take the enhancment
|
||||
1. Create a feature request. Once agreed we will take the enhancement
|
||||
2. Create an ADR to agree on the details of the change.
|
||||
|
||||
An ADR is an Architectural Decision Record. This allows consensus on the direction forward and also serves as a record of the change and motivation. [Read more here](adrs/README.md)
|
||||
@@ -23,7 +23,7 @@ An ADR is an Architectural Decision Record. This allows consensus on the direct
|
||||
|
||||
### Required Dev Dependencies
|
||||
|
||||
 Git for Windows [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
|
||||
  Git for Windows and Linux [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
|
||||
|
||||
### To Build, Test, Layout
|
||||
|
||||
@@ -43,17 +43,31 @@ Sample developer flow:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/actions/runner
|
||||
cd runner
|
||||
cd ./src
|
||||
./dev.(sh/cmd) layout # the runner that build from source is in {root}/_layout
|
||||
./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout
|
||||
<make code changes>
|
||||
./dev.(sh/cmd) build # {root}/_layout will get updated
|
||||
./dev.(sh/cmd) test # run all unit tests before git commit/push
|
||||
```
|
||||
|
||||
View logs:
|
||||
```bash
|
||||
cd runner/_layout/_diag
|
||||
ls
|
||||
cat (Runner/Worker)_TIMESTAMP.log # view your log file
|
||||
```
|
||||
|
||||
Run Runner:
|
||||
```bash
|
||||
cd runner/_layout
|
||||
./run.sh # run your custom runner
|
||||
```
|
||||
|
||||
### Editors
|
||||
|
||||
[Using Visual Studio Code](https://code.visualstudio.com/)
|
||||
[Using Visual Studio 2019](https://www.visualstudio.com/vs/)
|
||||
[Using Visual Studio](https://code.visualstudio.com/docs)
|
||||
|
||||
### Styling
|
||||
|
||||
|
||||
61
docs/design/auth.md
Normal file
61
docs/design/auth.md
Normal file
@@ -0,0 +1,61 @@
|
||||
# Runner Authentication and Authorization
|
||||
|
||||
## Goals
|
||||
- Support runner installs in untrusted domains.
|
||||
- The account that configures or runs the runner process is not relevant for accessing GitHub resources.
|
||||
- Accessing GitHub resources is done with a per-job token which expires when job completes.
|
||||
- The token is granted to trusted parts of the system including the runner, actions and script steps specified by the workflow author as trusted.
|
||||
- All OAuth tokens that come from the Token Service that the runner uses to access Actions Service resources are the same. It's just the scope and expiration of the token that may vary.
|
||||
|
||||
## Configuration
|
||||
|
||||
Configuring a self-hosted runner is [covered here in the documentation](https://help.github.com/en/actions/hosting-your-own-runners/adding-self-hosted-runners).
|
||||
|
||||
Configuration is done with the user being authenticated via a time-limited, GitHub runner registration token.
|
||||
|
||||
*Your credentials are never used for registering the runner with the service.*
|
||||
|
||||

|
||||
|
||||
During configuration, an RSA public/private key pair is created, the private key is stored in file on disk. On Windows, the content is protected with DPAPI (machine level encrypted - runner only valid on that machine) and on Linux/OSX with `chmod` permissions.
|
||||
|
||||
Using your credentials, the runner is registered with the service by sending the public key to the service which adds that runner to the pool and stores the public key, the Token Service will generate a `clientId` associated with the public key.
|
||||
|
||||
## Start and Listen
|
||||
|
||||
After configuring the runner, the runner can be started interactively (`./run.cmd` or `./run.sh`) or as a service.
|
||||
|
||||

|
||||
|
||||
On start, the runner listener process loads the RSA private key (on Windows decrypting with machine key DPAPI), and asks the Token Service for an OAuth token which is signed with the RSA private key.
|
||||
The server then responds with an OAuth token that grants permission to access the message queue (HTTP long poll), allowing the runner to acquire the messages it will eventually run.
|
||||
|
||||
## Run a workflow
|
||||
|
||||
When a workflow is run, its labels are evaluated, it is matched to a runner and a message is placed in a queue of messages for that runner.
|
||||
The runner then starts listening for jobs via the message queue HTTP long poll.
|
||||
The message is encrypted with the runner's public key, stored during runner configuration.
|
||||
|
||||

|
||||
|
||||
A workflow is queued as a result of a triggered [event](https://help.github.com/en/actions/reference/events-that-trigger-workflows). Workflows can be scheduled to [run at specific UTC times](https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) using POSIX `cron` syntax.
|
||||
An [OAuth token](http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html) is generated, granting limited access to the host in Actions Service associated with the github.com repository/organization.
|
||||
The lifetime of the OAuth token is the lifetime of the run or at most the [job timeout (default: 6 hours)](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idtimeout-minutes), plus 10 additional minutes.
|
||||
|
||||
## Accessing GitHub resources
|
||||
|
||||
The job message sent to the runner contains the OAuth token to talk back to the Actions Service.
|
||||
The runner listener parent process will spawn a runner worker process for that job and send it the job message over IPC.
|
||||
The token is never persisted.
|
||||
|
||||
Each action is run as a unique subprocess.
|
||||
The encrypted access token will be provided as an environment variable in each action subprocess.
|
||||
The token is registered with the runner as a secret and scrubbed from the logs as they are written.
|
||||
|
||||
Authentication in a workflow run to github.com can be accomplished by using the [`GITHUB_TOKEN`](https://help.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#about-the-github_token-secret)) secret. This token expires after 60 minutes. Please note that this token is different from the OAuth token that the runner uses to talk to the Actions Service.
|
||||
|
||||
## Hosted runner authentication
|
||||
|
||||
Hosted runner authentication differs from self-hosted authentication in that runners do not undergo a registration process, but instead, the hosted runners get the OAuth token directly by reading the `.credentials` file. The scope of this particular token is limited for a given workflow job execution, and the token is revoked as soon as the job is finished.
|
||||
|
||||

|
||||
BIN
docs/res/hosted-config-start.png
Normal file
BIN
docs/res/hosted-config-start.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 31 KiB |
BIN
docs/res/macOStrustCA.gif
Normal file
BIN
docs/res/macOStrustCA.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 14 MiB |
52
docs/res/runner-auth-diags.txt
Normal file
52
docs/res/runner-auth-diags.txt
Normal file
@@ -0,0 +1,52 @@
|
||||
# Markup used to generate the runner auth diagrams: https://websequencediagrams.com
|
||||
|
||||
title Runner Configuration (self-hosted only)
|
||||
|
||||
note left of Runner: GitHub repo URL as input
|
||||
Runner->github.com: Retrieve Actions Service access using runner registration token
|
||||
github.com->Runner: Access token for Actions Service
|
||||
note left of Runner: Generate RSA key pair
|
||||
note left of Runner: Store encrypted RSA private key on disk
|
||||
Runner->Actions Service: Register runner using Actions Service access token
|
||||
note right of Runner: Runner name, RSA public key sent
|
||||
note right of Actions Service: Public key stored
|
||||
Actions Service->Token Service: Register runner as an app along with the RSA public key
|
||||
note right of Token Service: Public key stored
|
||||
Token Service->Actions Service: Client Id for the runner application
|
||||
Actions Service->Runner: Client Id and Token Endpoint URL
|
||||
note left of Runner: Store runner configuration info into .runner file
|
||||
note left of Runner: Store Token registration info into .credentials file
|
||||
|
||||
title Runner Start and Running (self-hosted only)
|
||||
|
||||
Runner.Listener->Runner.Listener: Start
|
||||
note left of Runner.Listener: Load config info from .runner
|
||||
note left of Runner.Listener: Load token registration from .credentials
|
||||
Runner.Listener->Token Service: Exchange OAuth token (happens every 50 mins)
|
||||
note right of Runner.Listener: Construct JWT token, use Client Id signed by RSA private key
|
||||
note left of Actions Service: Find corresponding RSA public key, use Client Id\nVerify JWT token's signature
|
||||
Token Service->Runner.Listener: OAuth token with limited permission and valid for 50 mins
|
||||
Runner.Listener->Actions Service: Connect to Actions Service with OAuth token
|
||||
Actions Service->Runner.Listener: Workflow job
|
||||
|
||||
title Running workflow
|
||||
|
||||
Runner.Listener->Service (Message Queue): Get message
|
||||
note right of Runner.Listener: Authenticate with exchanged OAuth token
|
||||
Event->Actions Service: Queue workflow
|
||||
Actions Service->Actions Service: Generate OAuth token per job
|
||||
Actions Service->Actions Service: Build job message with the OAuth token
|
||||
Actions Service->Actions Service: Encrypt job message with the target runner's public key
|
||||
Actions Service->Service (Message Queue): Send encrypted job message to runner
|
||||
Service (Message Queue)->Runner.Listener: Send job
|
||||
note right of Runner.Listener: Decrypt message with runner's private key
|
||||
Runner.Listener->Runner.Worker: Create worker process per job and run the job
|
||||
|
||||
title Runner Configuration, Start and Running (hosted only)
|
||||
|
||||
Machine Management Service->Runner.Listener: Construct .runner configuration file, store token in .credentials
|
||||
Runner.Listener->Runner.Listener: Start
|
||||
note left of Runner.Listener: Load config info from .runner
|
||||
note left of Runner.Listener: Load OAuth token from .credentials
|
||||
Runner.Listener->Actions Service: Connect to Actions Service with OAuth token in .credentials
|
||||
Actions Service->Runner.Listener: Workflow job
|
||||
BIN
docs/res/self-hosted-config.png
Normal file
BIN
docs/res/self-hosted-config.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 98 KiB |
BIN
docs/res/self-hosted-start.png
Normal file
BIN
docs/res/self-hosted-start.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 43 KiB |
BIN
docs/res/workflow-run.png
Normal file
BIN
docs/res/workflow-run.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 46 KiB |
@@ -15,16 +15,16 @@ x64
|
||||
- openSUSE 15+
|
||||
- SUSE Enterprise Linux (SLES) 12 SP2+
|
||||
|
||||
## Install .Net Core 3.x Linux Dependencies
|
||||
## Install .Net Core 5 Linux Dependencies
|
||||
|
||||
The `./config.sh` will check .Net Core 3.x dependencies during runner configuration.
|
||||
The `./config.sh` will check .Net Core 5 dependencies during runner configuration.
|
||||
You might see something like this which indicate a dependency's missing.
|
||||
```bash
|
||||
./config.sh
|
||||
libunwind.so.8 => not found
|
||||
libunwind-x86_64.so.8 => not found
|
||||
Dependencies is missing for Dotnet Core 3.0
|
||||
Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies.
|
||||
Dependencies is missing for Dotnet 5
|
||||
Execute ./bin/installdependencies.sh to install any missing Dotnet 5 dependencies.
|
||||
```
|
||||
You can easily correct the problem by executing `./bin/installdependencies.sh`.
|
||||
The `installdependencies.sh` script should install all required dependencies on all supported Linux versions
|
||||
@@ -40,7 +40,7 @@ Debian based OS (Debian, Ubuntu, Linux Mint)
|
||||
- libssl1.1, libssl1.0.2 or libssl1.0.0
|
||||
- libicu63, libicu60, libicu57 or libicu55
|
||||
|
||||
Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7)
|
||||
Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7)
|
||||
|
||||
- lttng-ust
|
||||
- openssl-libs
|
||||
|
||||
@@ -1,36 +1,22 @@
|
||||
## Features
|
||||
- Expose whether debug is on/off via RUNNER_DEBUG. (#253)
|
||||
- Upload log on runner when worker get killed due to cancellation timeout. (#255)
|
||||
- Update config.sh/cmd --help documentation (#282)
|
||||
- Set http_proxy and related env vars for job/service containers (#304)
|
||||
- Set both http_proxy and HTTP_PROXY env for runner/worker processes. (#298)
|
||||
|
||||
## Bugs
|
||||
- Verify runner Windows service hash started successfully after configuration (#236)
|
||||
- Detect source file path in L0 without using env. (#257)
|
||||
- Handle escaped '%' in commands data section (#200)
|
||||
- Allow container to be null/empty during matrix expansion (#266)
|
||||
- Translate problem matcher file to host path (#272)
|
||||
- Change hashFiles() expression function to use @actions/glob. (#268)
|
||||
- Default post-job action's condition to always(). (#293)
|
||||
- Support action.yaml file as action's entry file (#288)
|
||||
- Trace javascript action exit code to debug instead of user logs (#290)
|
||||
- Change prompt message when removing a runner to lines up with GitHub.com UI (#303)
|
||||
- Include step.env as part of env context. (#300)
|
||||
- Update Base64 Encoders to deal with suffixes (#284)
|
||||
- Downgrade runner to .NET 3 to address an issue with broken pipes in Ubuntu (#928)
|
||||
- Fixed an issue where FIPS Cryptography broke back-compat scenarios (#928)
|
||||
|
||||
## Misc
|
||||
- Move .sln file under ./src (#238)
|
||||
- Treat warnings as errors during compile (#249)
|
||||
- Updated dotnet install scripts (#928)
|
||||
|
||||
## Windows x64
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows
|
||||
```
|
||||
// Create a folder under the drive root
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
|
||||
|
||||
The following snipped needs to be run on `powershell`:
|
||||
``` powershell
|
||||
# Create a folder under the drive root
|
||||
mkdir \actions-runner ; cd \actions-runner
|
||||
// Download the latest runner package
|
||||
# Download the latest runner package
|
||||
Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-win-x64-<RUNNER_VERSION>.zip -OutFile actions-runner-win-x64-<RUNNER_VERSION>.zip
|
||||
// Extract the installer
|
||||
# Extract the installer
|
||||
Add-Type -AssemblyName System.IO.Compression.FileSystem ;
|
||||
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
|
||||
```
|
||||
@@ -38,44 +24,44 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
|
||||
## OSX
|
||||
|
||||
``` bash
|
||||
// Create a folder
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
// Download the latest runner package
|
||||
# Download the latest runner package
|
||||
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
||||
// Extract the installer
|
||||
# Extract the installer
|
||||
tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
|
||||
## Linux x64
|
||||
|
||||
``` bash
|
||||
// Create a folder
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
// Download the latest runner package
|
||||
# Download the latest runner package
|
||||
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
||||
// Extract the installer
|
||||
# Extract the installer
|
||||
tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
|
||||
## Linux arm64 (Pre-release)
|
||||
|
||||
``` bash
|
||||
// Create a folder
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
// Download the latest runner package
|
||||
# Download the latest runner package
|
||||
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
||||
// Extract the installer
|
||||
# Extract the installer
|
||||
tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
|
||||
## Linux arm (Pre-release)
|
||||
|
||||
``` bash
|
||||
// Create a folder
|
||||
# Create a folder
|
||||
mkdir actions-runner && cd actions-runner
|
||||
// Download the latest runner package
|
||||
# Download the latest runner package
|
||||
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
||||
// Extract the installer
|
||||
# Extract the installer
|
||||
tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
||||
```
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
2.164.0
|
||||
2.276.1
|
||||
|
||||
4
scripts/README.md
Normal file
4
scripts/README.md
Normal file
@@ -0,0 +1,4 @@
|
||||
# Sample scripts for self-hosted runners
|
||||
|
||||
Here are some examples to work from if you'd like to automate your use of self-hosted runners.
|
||||
See the docs [here](../docs/automate.md).
|
||||
149
scripts/create-latest-svc.sh
Executable file
149
scripts/create-latest-svc.sh
Executable file
@@ -0,0 +1,149 @@
|
||||
#/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
#
|
||||
# Downloads latest releases (not pre-release) runner
|
||||
# Configures as a service
|
||||
#
|
||||
# Examples:
|
||||
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myuser/myrepo my.ghe.deployment.net
|
||||
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myorg my.ghe.deployment.net
|
||||
#
|
||||
# Usage:
|
||||
# export RUNNER_CFG_PAT=<yourPAT>
|
||||
# ./create-latest-svc scope [ghe_domain] [name] [user] [labels]
|
||||
#
|
||||
# scope required repo (:owner/:repo) or org (:organization)
|
||||
# ghe_domain optional the fully qualified domain name of your GitHub Enterprise Server deployment
|
||||
# name optional defaults to hostname
|
||||
# user optional user svc will run as. defaults to current
|
||||
# labels optional list of labels (split by comma) applied on the runner
|
||||
#
|
||||
# Notes:
|
||||
# PATS over envvars are more secure
|
||||
# Should be used on VMs and not containers
|
||||
# Works on OSX and Linux
|
||||
# Assumes x64 arch
|
||||
#
|
||||
|
||||
runner_scope=${1}
|
||||
ghe_hostname=${2}
|
||||
runner_name=${3:-$(hostname)}
|
||||
svc_user=${4:-$USER}
|
||||
labels=${5}
|
||||
|
||||
echo "Configuring runner @ ${runner_scope}"
|
||||
sudo echo
|
||||
|
||||
#---------------------------------------
|
||||
# Validate Environment
|
||||
#---------------------------------------
|
||||
runner_plat=linux
|
||||
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
|
||||
|
||||
function fatal()
|
||||
{
|
||||
echo "error: $1" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
|
||||
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
|
||||
|
||||
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
|
||||
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
|
||||
|
||||
# bail early if there's already a runner there. also sudo early
|
||||
if [ -d ./runner ]; then
|
||||
fatal "Runner already exists. Use a different directory or delete ./runner"
|
||||
fi
|
||||
|
||||
sudo -u ${svc_user} mkdir runner
|
||||
|
||||
# TODO: validate not in a container
|
||||
# TODO: validate systemd or osx svc installer
|
||||
|
||||
#--------------------------------------
|
||||
# Get a config token
|
||||
#--------------------------------------
|
||||
echo
|
||||
echo "Generating a registration token..."
|
||||
|
||||
base_api_url="https://api.github.com"
|
||||
if [ -n "${ghe_hostname}" ]; then
|
||||
base_api_url="https://${ghe_hostname}/api/v3"
|
||||
fi
|
||||
|
||||
# if the scope has a slash, it's a repo runner
|
||||
orgs_or_repos="orgs"
|
||||
if [[ "$runner_scope" == *\/* ]]; then
|
||||
orgs_or_repos="repos"
|
||||
fi
|
||||
|
||||
export RUNNER_TOKEN=$(curl -s -X POST ${base_api_url}/${orgs_or_repos}/${runner_scope}/actions/runners/registration-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token')
|
||||
|
||||
if [ "null" == "$RUNNER_TOKEN" -o -z "$RUNNER_TOKEN" ]; then fatal "Failed to get a token"; fi
|
||||
|
||||
#---------------------------------------
|
||||
# Download latest released and extract
|
||||
#---------------------------------------
|
||||
echo
|
||||
echo "Downloading latest runner ..."
|
||||
|
||||
# For the GHES Alpha, download the runner from github.com
|
||||
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
|
||||
latest_version=$(echo ${latest_version_label:1})
|
||||
runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz"
|
||||
|
||||
if [ -f "${runner_file}" ]; then
|
||||
echo "${runner_file} exists. skipping download."
|
||||
else
|
||||
runner_url="https://github.com/actions/runner/releases/download/${latest_version_label}/${runner_file}"
|
||||
|
||||
echo "Downloading ${latest_version_label} for ${runner_plat} ..."
|
||||
echo $runner_url
|
||||
|
||||
curl -O -L ${runner_url}
|
||||
fi
|
||||
|
||||
ls -la *.tar.gz
|
||||
|
||||
#---------------------------------------------------
|
||||
# extract to runner directory in this directory
|
||||
#---------------------------------------------------
|
||||
echo
|
||||
echo "Extracting ${runner_file} to ./runner"
|
||||
|
||||
tar xzf "./${runner_file}" -C runner
|
||||
|
||||
# export of pass
|
||||
sudo chown -R $svc_user ./runner
|
||||
|
||||
pushd ./runner
|
||||
|
||||
#---------------------------------------
|
||||
# Unattend config
|
||||
#---------------------------------------
|
||||
runner_url="https://github.com/${runner_scope}"
|
||||
if [ -n "${ghe_hostname}" ]; then
|
||||
runner_url="https://${ghe_hostname}/${runner_scope}"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Configuring ${runner_name} @ $runner_url"
|
||||
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name --labels $labels"
|
||||
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name --labels $labels
|
||||
|
||||
#---------------------------------------
|
||||
# Configuring as a service
|
||||
#---------------------------------------
|
||||
echo
|
||||
echo "Configuring as a service ..."
|
||||
prefix=""
|
||||
if [ "${runner_plat}" == "linux" ]; then
|
||||
prefix="sudo "
|
||||
fi
|
||||
|
||||
${prefix}./svc.sh install ${svc_user}
|
||||
${prefix}./svc.sh start
|
||||
83
scripts/delete.sh
Executable file
83
scripts/delete.sh
Executable file
@@ -0,0 +1,83 @@
|
||||
#/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
#
|
||||
# Force deletes a runner from the service
|
||||
# The caller should have already ensured the runner is gone and/or stopped
|
||||
#
|
||||
# Examples:
|
||||
# RUNNER_CFG_PAT=<yourPAT> ./delete.sh myuser/myrepo myname
|
||||
# RUNNER_CFG_PAT=<yourPAT> ./delete.sh myorg
|
||||
#
|
||||
# Usage:
|
||||
# export RUNNER_CFG_PAT=<yourPAT>
|
||||
# ./delete.sh scope name
|
||||
#
|
||||
# scope required repo (:owner/:repo) or org (:organization)
|
||||
# name optional defaults to hostname. name to delete
|
||||
#
|
||||
# Notes:
|
||||
# PATS over envvars are more secure
|
||||
# Works on OSX and Linux
|
||||
# Assumes x64 arch
|
||||
#
|
||||
|
||||
runner_scope=${1}
|
||||
runner_name=${2}
|
||||
|
||||
echo "Deleting runner ${runner_name} @ ${runner_scope}"
|
||||
|
||||
function fatal()
|
||||
{
|
||||
echo "error: $1" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
|
||||
if [ -z "${runner_name}" ]; then fatal "supply name as argument 2"; fi
|
||||
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
|
||||
|
||||
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
|
||||
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
|
||||
|
||||
base_api_url="https://api.github.com/orgs"
|
||||
if [[ "$runner_scope" == *\/* ]]; then
|
||||
base_api_url="https://api.github.com/repos"
|
||||
fi
|
||||
|
||||
|
||||
#--------------------------------------
|
||||
# Ensure offline
|
||||
#--------------------------------------
|
||||
runner_status=$(curl -s -X GET ${base_api_url}/${runner_scope}/actions/runners?per_page=100 -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" \
|
||||
| jq -M -j ".runners | .[] | [select(.name == \"${runner_name}\")] | .[0].status")
|
||||
|
||||
if [ -z "${runner_status}" ]; then
|
||||
fatal "Could not find runner with name ${runner_name}"
|
||||
fi
|
||||
|
||||
echo "Status: ${runner_status}"
|
||||
|
||||
if [ "${runner_status}" != "offline" ]; then
|
||||
fatal "Runner should be offline before removing"
|
||||
fi
|
||||
|
||||
#--------------------------------------
|
||||
# Get id of runner to remove
|
||||
#--------------------------------------
|
||||
runner_id=$(curl -s -X GET ${base_api_url}/${runner_scope}/actions/runners?per_page=100 -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" \
|
||||
| jq -M -j ".runners | .[] | [select(.name == \"${runner_name}\")] | .[0].id")
|
||||
|
||||
if [ -z "${runner_id}" ]; then
|
||||
fatal "Could not find runner with name ${runner_name}"
|
||||
fi
|
||||
|
||||
echo "Removing id ${runner_id}"
|
||||
|
||||
#--------------------------------------
|
||||
# Remove the runner
|
||||
#--------------------------------------
|
||||
curl -s -X DELETE ${base_api_url}/${runner_scope}/actions/runners/${runner_id} -H "authorization: token ${RUNNER_CFG_PAT}"
|
||||
|
||||
echo "Done."
|
||||
76
scripts/remove-svc.sh
Executable file
76
scripts/remove-svc.sh
Executable file
@@ -0,0 +1,76 @@
|
||||
#/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
#
|
||||
# Removes a runner running as a service
|
||||
# Must be run on the machine where the service is run
|
||||
#
|
||||
# Examples:
|
||||
# RUNNER_CFG_PAT=<yourPAT> ./remove-svc.sh myuser/myrepo
|
||||
# RUNNER_CFG_PAT=<yourPAT> ./remove-svc.sh myorg
|
||||
#
|
||||
# Usage:
|
||||
# export RUNNER_CFG_PAT=<yourPAT>
|
||||
# ./remove-svc scope name
|
||||
#
|
||||
# scope required repo (:owner/:repo) or org (:organization)
|
||||
# name optional defaults to hostname. name to uninstall and remove
|
||||
#
|
||||
# Notes:
|
||||
# PATS over envvars are more secure
|
||||
# Should be used on VMs and not containers
|
||||
# Works on OSX and Linux
|
||||
# Assumes x64 arch
|
||||
#
|
||||
|
||||
runner_scope=${1}
|
||||
runner_name=${2:-$(hostname)}
|
||||
|
||||
echo "Uninstalling runner ${runner_name} @ ${runner_scope}"
|
||||
sudo echo
|
||||
|
||||
function fatal()
|
||||
{
|
||||
echo "error: $1" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
|
||||
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
|
||||
|
||||
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
|
||||
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
|
||||
|
||||
runner_plat=linux
|
||||
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
|
||||
|
||||
#--------------------------------------
|
||||
# Get a remove token
|
||||
#--------------------------------------
|
||||
echo
|
||||
echo "Generating a removal token..."
|
||||
|
||||
# if the scope has a slash, it's an repo runner
|
||||
base_api_url="https://api.github.com/orgs"
|
||||
if [[ "$runner_scope" == *\/* ]]; then
|
||||
base_api_url="https://api.github.com/repos"
|
||||
fi
|
||||
|
||||
export REMOVE_TOKEN=$(curl -s -X POST ${base_api_url}/${runner_scope}/actions/runners/remove-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token')
|
||||
|
||||
if [ -z "$REMOVE_TOKEN" ]; then fatal "Failed to get a token"; fi
|
||||
|
||||
#---------------------------------------
|
||||
# Stop and uninstall the service
|
||||
#---------------------------------------
|
||||
echo
|
||||
echo "Uninstall the service ..."
|
||||
pushd ./runner
|
||||
prefix=""
|
||||
if [ "${runner_plat}" == "linux" ]; then
|
||||
prefix="sudo "
|
||||
fi
|
||||
${prefix}./svc.sh stop
|
||||
${prefix}./svc.sh uninstall
|
||||
${prefix}./config.sh remove --token $REMOVE_TOKEN
|
||||
10
src/.editorconfig
Normal file
10
src/.editorconfig
Normal file
@@ -0,0 +1,10 @@
|
||||
[*.cs]
|
||||
charset = utf-8
|
||||
insert_final_newline = true
|
||||
|
||||
csharp_new_line_before_else = true
|
||||
csharp_new_line_before_catch = true
|
||||
csharp_new_line_before_finally = true
|
||||
csharp_new_line_before_open_brace = all
|
||||
|
||||
csharp_space_after_keywords_in_control_flow_statements = true
|
||||
@@ -1,4 +1,4 @@
|
||||
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio Version 16
|
||||
VisualStudioVersion = 16.0.29411.138
|
||||
@@ -21,6 +21,11 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sdk", "Sdk\Sdk.csproj", "{D
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Test", "Test\Test.csproj", "{C932061F-F6A1-4F1E-B854-A6C6B30DC3EF}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{EFB254FC-7927-445E-BA64-6676ADB309E9}"
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
.editorconfig = .editorconfig
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
|
||||
484
src/Misc/dotnet-install.ps1
vendored
484
src/Misc/dotnet-install.ps1
vendored
@@ -23,8 +23,6 @@
|
||||
Default: latest
|
||||
Represents a build version on specific channel. Possible values:
|
||||
- latest - most latest build on specific channel
|
||||
- coherent - most latest coherent build on specific channel
|
||||
coherent applies only to SDK downloads
|
||||
- 3-part version in a format A.B.C - represents specific version of build
|
||||
examples: 2.0.0-preview2-006120, 1.1.0
|
||||
.PARAMETER InstallDir
|
||||
@@ -69,6 +67,8 @@
|
||||
.PARAMETER ProxyUseDefaultCredentials
|
||||
Default: false
|
||||
Use default credentials, when using proxy address.
|
||||
.PARAMETER ProxyBypassList
|
||||
If set with ProxyAddress, will provide the list of comma separated urls that will bypass the proxy
|
||||
.PARAMETER SkipNonVersionedFiles
|
||||
Default: false
|
||||
Skips installing non-versioned files if they already exist, such as dotnet.exe.
|
||||
@@ -96,6 +96,7 @@ param(
|
||||
[string]$FeedCredential,
|
||||
[string]$ProxyAddress,
|
||||
[switch]$ProxyUseDefaultCredentials,
|
||||
[string[]]$ProxyBypassList=@(),
|
||||
[switch]$SkipNonVersionedFiles,
|
||||
[switch]$NoCdn
|
||||
)
|
||||
@@ -119,12 +120,46 @@ $VersionRegEx="/\d+\.\d+[^/]+/"
|
||||
$OverrideNonVersionedFiles = !$SkipNonVersionedFiles
|
||||
|
||||
function Say($str) {
|
||||
try {
|
||||
Write-Host "dotnet-install: $str"
|
||||
}
|
||||
catch {
|
||||
# Some platforms cannot utilize Write-Host (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: $str"
|
||||
}
|
||||
}
|
||||
|
||||
function Say-Warning($str) {
|
||||
try {
|
||||
Write-Warning "dotnet-install: $str"
|
||||
}
|
||||
catch {
|
||||
# Some platforms cannot utilize Write-Warning (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: Warning: $str"
|
||||
}
|
||||
}
|
||||
|
||||
# Writes a line with error style settings.
|
||||
# Use this function to show a human-readable comment along with an exception.
|
||||
function Say-Error($str) {
|
||||
try {
|
||||
# Write-Error is quite oververbose for the purpose of the function, let's write one line with error style settings.
|
||||
$Host.UI.WriteErrorLine("dotnet-install: $str")
|
||||
}
|
||||
catch {
|
||||
Write-Output "dotnet-install: Error: $str"
|
||||
}
|
||||
}
|
||||
|
||||
function Say-Verbose($str) {
|
||||
try {
|
||||
Write-Verbose "dotnet-install: $str"
|
||||
}
|
||||
catch {
|
||||
# Some platforms cannot utilize Write-Verbose (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: $str"
|
||||
}
|
||||
}
|
||||
|
||||
function Say-Invocation($Invocation) {
|
||||
$command = $Invocation.MyCommand;
|
||||
@@ -137,7 +172,7 @@ function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [in
|
||||
|
||||
while ($true) {
|
||||
try {
|
||||
return $ScriptBlock.Invoke()
|
||||
return & $ScriptBlock
|
||||
}
|
||||
catch {
|
||||
$Attempts++
|
||||
@@ -154,7 +189,16 @@ function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [in
|
||||
function Get-Machine-Architecture() {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
# possible values: amd64, x64, x86, arm64, arm
|
||||
# On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems.
|
||||
# To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432.
|
||||
# PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE.
|
||||
# Possible values: amd64, x64, x86, arm64, arm
|
||||
|
||||
if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null )
|
||||
{
|
||||
return $ENV:PROCESSOR_ARCHITEW6432
|
||||
}
|
||||
|
||||
return $ENV:PROCESSOR_ARCHITECTURE
|
||||
}
|
||||
|
||||
@@ -167,7 +211,7 @@ function Get-CLIArchitecture-From-Architecture([string]$Architecture) {
|
||||
{ $_ -eq "x86" } { return "x86" }
|
||||
{ $_ -eq "arm" } { return "arm" }
|
||||
{ $_ -eq "arm64" } { return "arm64" }
|
||||
default { throw "Architecture not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues" }
|
||||
default { throw "Architecture '$Architecture' not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues" }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -228,7 +272,11 @@ function GetHTTPResponse([Uri] $Uri)
|
||||
|
||||
if($ProxyAddress) {
|
||||
$HttpClientHandler = New-Object System.Net.Http.HttpClientHandler
|
||||
$HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{Address=$ProxyAddress;UseDefaultCredentials=$ProxyUseDefaultCredentials}
|
||||
$HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{
|
||||
Address=$ProxyAddress;
|
||||
UseDefaultCredentials=$ProxyUseDefaultCredentials;
|
||||
BypassList = $ProxyBypassList;
|
||||
}
|
||||
$HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler
|
||||
}
|
||||
else {
|
||||
@@ -238,19 +286,42 @@ function GetHTTPResponse([Uri] $Uri)
|
||||
# Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out
|
||||
# 20 minutes allows it to work over much slower connections.
|
||||
$HttpClient.Timeout = New-TimeSpan -Minutes 20
|
||||
$Response = $HttpClient.GetAsync("${Uri}${FeedCredential}").Result
|
||||
if (($Response -eq $null) -or (-not ($Response.IsSuccessStatusCode))) {
|
||||
$Task = $HttpClient.GetAsync("${Uri}${FeedCredential}").ConfigureAwait("false");
|
||||
$Response = $Task.GetAwaiter().GetResult();
|
||||
|
||||
if (($null -eq $Response) -or (-not ($Response.IsSuccessStatusCode))) {
|
||||
# The feed credential is potentially sensitive info. Do not log FeedCredential to console output.
|
||||
$ErrorMsg = "Failed to download $Uri."
|
||||
if ($Response -ne $null) {
|
||||
$ErrorMsg += " $Response"
|
||||
$DownloadException = [System.Exception] "Unable to download $Uri."
|
||||
|
||||
if ($null -ne $Response) {
|
||||
$DownloadException.Data["StatusCode"] = [int] $Response.StatusCode
|
||||
$DownloadException.Data["ErrorMessage"] = "Unable to download $Uri. Returned HTTP status code: " + $DownloadException.Data["StatusCode"]
|
||||
}
|
||||
|
||||
throw $ErrorMsg
|
||||
throw $DownloadException
|
||||
}
|
||||
|
||||
return $Response
|
||||
}
|
||||
catch [System.Net.Http.HttpRequestException] {
|
||||
$DownloadException = [System.Exception] "Unable to download $Uri."
|
||||
|
||||
# Pick up the exception message and inner exceptions' messages if they exist
|
||||
$CurrentException = $PSItem.Exception
|
||||
$ErrorMsg = $CurrentException.Message + "`r`n"
|
||||
while ($CurrentException.InnerException) {
|
||||
$CurrentException = $CurrentException.InnerException
|
||||
$ErrorMsg += $CurrentException.Message + "`r`n"
|
||||
}
|
||||
|
||||
# Check if there is an issue concerning TLS.
|
||||
if ($ErrorMsg -like "*SSL/TLS*") {
|
||||
$ErrorMsg += "Ensure that TLS 1.2 or higher is enabled to use this script.`r`n"
|
||||
}
|
||||
|
||||
$DownloadException.Data["ErrorMessage"] = $ErrorMsg
|
||||
throw $DownloadException
|
||||
}
|
||||
finally {
|
||||
if ($HttpClient -ne $null) {
|
||||
$HttpClient.Dispose()
|
||||
@@ -259,7 +330,7 @@ function GetHTTPResponse([Uri] $Uri)
|
||||
})
|
||||
}
|
||||
|
||||
function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Coherent) {
|
||||
function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$VersionFileUrl = $null
|
||||
@@ -274,13 +345,8 @@ function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Co
|
||||
$VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
if ($Coherent) {
|
||||
$VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.coherent.version"
|
||||
}
|
||||
else {
|
||||
$VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version"
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
}
|
||||
@@ -288,7 +354,8 @@ function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Co
|
||||
$Response = GetHTTPResponse -Uri $VersionFileUrl
|
||||
}
|
||||
catch {
|
||||
throw "Could not resolve version information."
|
||||
Say-Error "Could not resolve version information."
|
||||
throw
|
||||
}
|
||||
$StringContent = $Response.Content.ReadAsStringAsync().Result
|
||||
|
||||
@@ -314,7 +381,8 @@ function Parse-Jsonfile-For-Version([string]$JSonFile) {
|
||||
$JSonContent = Get-Content($JSonFile) -Raw | ConvertFrom-Json | Select-Object -expand "sdk" -ErrorAction SilentlyContinue
|
||||
}
|
||||
catch {
|
||||
throw "Json file unreadable: '$JSonFile'"
|
||||
Say-Error "Json file unreadable: '$JSonFile'"
|
||||
throw
|
||||
}
|
||||
if ($JSonContent) {
|
||||
try {
|
||||
@@ -327,7 +395,8 @@ function Parse-Jsonfile-For-Version([string]$JSonFile) {
|
||||
}
|
||||
}
|
||||
catch {
|
||||
throw "Unable to parse the SDK node in '$JSonFile'"
|
||||
Say-Error "Unable to parse the SDK node in '$JSonFile'"
|
||||
throw
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -343,16 +412,12 @@ function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel,
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if (-not $JSonFile) {
|
||||
switch ($Version.ToLower()) {
|
||||
{ $_ -eq "latest" } {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $False
|
||||
if ($Version.ToLower() -eq "latest") {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
{ $_ -eq "coherent" } {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $True
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
default { return $Version }
|
||||
else {
|
||||
return $Version
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -363,17 +428,20 @@ function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel,
|
||||
function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
# If anything fails in this lookup it will default to $SpecificVersion
|
||||
$SpecificProductVersion = Get-Product-Version -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-runtime-$SpecificVersion-win-$CLIArchitecture.zip"
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$PayloadURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/aspnetcore-runtime-$SpecificVersion-win-$CLIArchitecture.zip"
|
||||
$PayloadURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/aspnetcore-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificVersion-win-$CLIArchitecture.zip"
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-sdk-$SpecificVersion-win-$CLIArchitecture.zip"
|
||||
$PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-sdk-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
@@ -381,7 +449,7 @@ function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string
|
||||
|
||||
Say-Verbose "Constructed primary named payload URL: $PayloadURL"
|
||||
|
||||
return $PayloadURL
|
||||
return $PayloadURL, $SpecificProductVersion
|
||||
}
|
||||
|
||||
function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
|
||||
@@ -402,6 +470,51 @@ function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [
|
||||
return $PayloadURL
|
||||
}
|
||||
|
||||
function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value '$Runtime' specified for `$Runtime"
|
||||
}
|
||||
|
||||
Say-Verbose "Checking for existence of $ProductVersionTxtURL"
|
||||
|
||||
try {
|
||||
$productVersionResponse = GetHTTPResponse($productVersionTxtUrl)
|
||||
|
||||
if ($productVersionResponse.StatusCode -eq 200) {
|
||||
$productVersion = $productVersionResponse.Content.ReadAsStringAsync().Result.Trim()
|
||||
if ($productVersion -ne $SpecificVersion)
|
||||
{
|
||||
Say "Using alternate version $productVersion found in $ProductVersionTxtURL"
|
||||
}
|
||||
|
||||
return $productVersion
|
||||
}
|
||||
else {
|
||||
Say-Verbose "Got StatusCode $($productVersionResponse.StatusCode) trying to get productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
|
||||
$productVersion = $SpecificVersion
|
||||
}
|
||||
} catch {
|
||||
Say-Verbose "Could not read productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion (Exception: '$($_.Exception.Message)' )"
|
||||
$productVersion = $SpecificVersion
|
||||
}
|
||||
|
||||
return $productVersion
|
||||
}
|
||||
|
||||
function Get-User-Share-Path() {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
@@ -539,6 +652,23 @@ function DownloadFile($Source, [string]$OutPath) {
|
||||
}
|
||||
}
|
||||
|
||||
function SafeRemoveFile($Path) {
|
||||
try {
|
||||
if (Test-Path $Path) {
|
||||
Remove-Item $Path
|
||||
Say-Verbose "The temporary file `"$Path`" was removed."
|
||||
}
|
||||
else
|
||||
{
|
||||
Say-Verbose "The temporary file `"$Path`" does not exist, therefore is not removed."
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
Say-Warning "Failed to remove the temporary file: `"$Path`", remove it manually."
|
||||
}
|
||||
}
|
||||
|
||||
function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolderRelativePath) {
|
||||
$BinPath = Get-Absolute-Path $(Join-Path -Path $InstallRoot -ChildPath $BinFolderRelativePath)
|
||||
if (-Not $NoPath) {
|
||||
@@ -555,9 +685,14 @@ function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolde
|
||||
}
|
||||
}
|
||||
|
||||
Say "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:"
|
||||
Say "- The SDK needs to be installed without user interaction and without admin rights."
|
||||
Say "- The SDK installation doesn't need to persist across multiple CI runs."
|
||||
Say "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.`r`n"
|
||||
|
||||
$CLIArchitecture = Get-CLIArchitecture-From-Architecture $Architecture
|
||||
$SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $AzureFeed -Channel $Channel -Version $Version -JSonFile $JSonFile
|
||||
$DownloadLink = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
$DownloadLink, $EffectiveVersion = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
$LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
|
||||
$InstallRoot = Resolve-Installation-Path $InstallDir
|
||||
@@ -583,7 +718,12 @@ if ($DryRun) {
|
||||
}
|
||||
}
|
||||
Say "Repeatable invocation: $RepeatableCommand"
|
||||
exit 0
|
||||
if ($SpecificVersion -ne $EffectiveVersion)
|
||||
{
|
||||
Say "NOTE: Due to finding a version manifest with this runtime, it would actually install with version '$EffectiveVersion'"
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
@@ -606,12 +746,18 @@ else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
}
|
||||
|
||||
if ($SpecificVersion -ne $EffectiveVersion)
|
||||
{
|
||||
Say "Performing installation checks for effective version: $EffectiveVersion"
|
||||
$SpecificVersion = $EffectiveVersion
|
||||
}
|
||||
|
||||
# Check if the SDK version is already installed.
|
||||
$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
|
||||
if ($isAssetInstalled) {
|
||||
Say "$assetName version $SpecificVersion is already installed."
|
||||
Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
|
||||
exit 0
|
||||
return
|
||||
}
|
||||
|
||||
New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null
|
||||
@@ -619,30 +765,69 @@ New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null
|
||||
$installDrive = $((Get-Item $InstallRoot).PSDrive.Name);
|
||||
$diskInfo = Get-PSDrive -Name $installDrive
|
||||
if ($diskInfo.Free / 1MB -le 100) {
|
||||
Say "There is not enough disk space on drive ${installDrive}:"
|
||||
exit 0
|
||||
throw "There is not enough disk space on drive ${installDrive}:"
|
||||
}
|
||||
|
||||
$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
|
||||
Say-Verbose "Zip path: $ZipPath"
|
||||
|
||||
$DownloadFailed = $false
|
||||
Say "Downloading link: $DownloadLink"
|
||||
|
||||
$PrimaryDownloadStatusCode = 0
|
||||
$LegacyDownloadStatusCode = 0
|
||||
|
||||
$PrimaryDownloadFailedMsg = ""
|
||||
$LegacyDownloadFailedMsg = ""
|
||||
|
||||
Say "Downloading primary link $DownloadLink"
|
||||
try {
|
||||
DownloadFile -Source $DownloadLink -OutPath $ZipPath
|
||||
}
|
||||
catch {
|
||||
Say "Cannot download: $DownloadLink"
|
||||
if ($PSItem.Exception.Data.Contains("StatusCode")) {
|
||||
$PrimaryDownloadStatusCode = $PSItem.Exception.Data["StatusCode"]
|
||||
}
|
||||
|
||||
if ($PSItem.Exception.Data.Contains("ErrorMessage")) {
|
||||
$PrimaryDownloadFailedMsg = $PSItem.Exception.Data["ErrorMessage"]
|
||||
} else {
|
||||
$PrimaryDownloadFailedMsg = $PSItem.Exception.Message
|
||||
}
|
||||
|
||||
if ($PrimaryDownloadStatusCode -eq 404) {
|
||||
Say "The resource at $DownloadLink is not available."
|
||||
} else {
|
||||
Say $PSItem.Exception.Message
|
||||
}
|
||||
|
||||
SafeRemoveFile -Path $ZipPath
|
||||
|
||||
if ($LegacyDownloadLink) {
|
||||
$DownloadLink = $LegacyDownloadLink
|
||||
$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
|
||||
Say-Verbose "Legacy zip path: $ZipPath"
|
||||
Say "Downloading legacy link: $DownloadLink"
|
||||
Say "Downloading legacy link $DownloadLink"
|
||||
try {
|
||||
DownloadFile -Source $DownloadLink -OutPath $ZipPath
|
||||
}
|
||||
catch {
|
||||
Say "Cannot download: $DownloadLink"
|
||||
if ($PSItem.Exception.Data.Contains("StatusCode")) {
|
||||
$LegacyDownloadStatusCode = $PSItem.Exception.Data["StatusCode"]
|
||||
}
|
||||
|
||||
if ($PSItem.Exception.Data.Contains("ErrorMessage")) {
|
||||
$LegacyDownloadFailedMsg = $PSItem.Exception.Data["ErrorMessage"]
|
||||
} else {
|
||||
$LegacyDownloadFailedMsg = $PSItem.Exception.Message
|
||||
}
|
||||
|
||||
if ($LegacyDownloadStatusCode -eq 404) {
|
||||
Say "The resource at $DownloadLink is not available."
|
||||
} else {
|
||||
Say $PSItem.Exception.Message
|
||||
}
|
||||
|
||||
SafeRemoveFile -Path $ZipPath
|
||||
$DownloadFailed = $true
|
||||
}
|
||||
}
|
||||
@@ -652,7 +837,19 @@ catch {
|
||||
}
|
||||
|
||||
if ($DownloadFailed) {
|
||||
throw "Could not find/download: `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
if (($PrimaryDownloadStatusCode -eq 404) -and ((-not $LegacyDownloadLink) -or ($LegacyDownloadStatusCode -eq 404))) {
|
||||
throw "Could not find `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
} else {
|
||||
# 404-NotFound is an expected response if it goes from only one of the links, do not show that error.
|
||||
# If primary path is available (not 404-NotFound) then show the primary error else show the legacy error.
|
||||
if ($PrimaryDownloadStatusCode -ne 404) {
|
||||
throw "Could not download `"$assetName`" with version = $SpecificVersion`r`n$PrimaryDownloadFailedMsg"
|
||||
}
|
||||
if (($LegacyDownloadLink) -and ($LegacyDownloadStatusCode -ne 404)) {
|
||||
throw "Could not download `"$assetName`" with version = $SpecificVersion`r`n$LegacyDownloadFailedMsg"
|
||||
}
|
||||
throw "Could not download `"$assetName`" with version = $SpecificVersion"
|
||||
}
|
||||
}
|
||||
|
||||
Say "Extracting zip from $DownloadLink"
|
||||
@@ -674,13 +871,208 @@ if (!$isAssetInstalled) {
|
||||
$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
|
||||
}
|
||||
|
||||
# Version verification failed. More likely something is wrong either with the downloaded content or with the verification algorithm.
|
||||
if (!$isAssetInstalled) {
|
||||
Say-Error "Failed to verify the version of installed `"$assetName`".`nInstallation source: $DownloadLink.`nInstallation location: $InstallRoot.`nReport the bug at https://github.com/dotnet/install-scripts/issues."
|
||||
throw "`"$assetName`" with version = $SpecificVersion failed to install with an unknown error."
|
||||
}
|
||||
|
||||
Remove-Item $ZipPath
|
||||
SafeRemoveFile -Path $ZipPath
|
||||
|
||||
Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
|
||||
|
||||
Say "Note that the script does not resolve dependencies during installation."
|
||||
Say "To check the list of dependencies, go to https://docs.microsoft.com/dotnet/core/install/windows#dependencies"
|
||||
Say "Installation finished"
|
||||
exit 0
|
||||
# SIG # Begin signature block
|
||||
# MIIjkgYJKoZIhvcNAQcCoIIjgzCCI38CAQExDzANBglghkgBZQMEAgEFADB5Bgor
|
||||
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
|
||||
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCD2c707qnCLOLIC
|
||||
# n6Mu5Gr4+Xp68foyZlGlTycnycc5l6CCDYEwggX/MIID56ADAgECAhMzAAABh3IX
|
||||
# chVZQMcJAAAAAAGHMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
|
||||
# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy
|
||||
# b3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNpZ25p
|
||||
# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ3WhcNMjEwMzAzMTgzOTQ3WjB0MQsw
|
||||
# CQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9u
|
||||
# ZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMR4wHAYDVQQDExVNaWNy
|
||||
# b3NvZnQgQ29ycG9yYXRpb24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
|
||||
# AQDOt8kLc7P3T7MKIhouYHewMFmnq8Ayu7FOhZCQabVwBp2VS4WyB2Qe4TQBT8aB
|
||||
# znANDEPjHKNdPT8Xz5cNali6XHefS8i/WXtF0vSsP8NEv6mBHuA2p1fw2wB/F0dH
|
||||
# sJ3GfZ5c0sPJjklsiYqPw59xJ54kM91IOgiO2OUzjNAljPibjCWfH7UzQ1TPHc4d
|
||||
# weils8GEIrbBRb7IWwiObL12jWT4Yh71NQgvJ9Fn6+UhD9x2uk3dLj84vwt1NuFQ
|
||||
# itKJxIV0fVsRNR3abQVOLqpDugbr0SzNL6o8xzOHL5OXiGGwg6ekiXA1/2XXY7yV
|
||||
# Fc39tledDtZjSjNbex1zzwSXAgMBAAGjggF+MIIBejAfBgNVHSUEGDAWBgorBgEE
|
||||
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUhov4ZyO96axkJdMjpzu2zVXOJcsw
|
||||
# UAYDVR0RBEkwR6RFMEMxKTAnBgNVBAsTIE1pY3Jvc29mdCBPcGVyYXRpb25zIFB1
|
||||
# ZXJ0byBSaWNvMRYwFAYDVQQFEw0yMzAwMTIrNDU4Mzg1MB8GA1UdIwQYMBaAFEhu
|
||||
# ZOVQBdOCqhc3NyK1bajKdQKVMFQGA1UdHwRNMEswSaBHoEWGQ2h0dHA6Ly93d3cu
|
||||
# bWljcm9zb2Z0LmNvbS9wa2lvcHMvY3JsL01pY0NvZFNpZ1BDQTIwMTFfMjAxMS0w
|
||||
# Ny0wOC5jcmwwYQYIKwYBBQUHAQEEVTBTMFEGCCsGAQUFBzAChkVodHRwOi8vd3d3
|
||||
# Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NlcnRzL01pY0NvZFNpZ1BDQTIwMTFfMjAx
|
||||
# MS0wNy0wOC5jcnQwDAYDVR0TAQH/BAIwADANBgkqhkiG9w0BAQsFAAOCAgEAixmy
|
||||
# S6E6vprWD9KFNIB9G5zyMuIjZAOuUJ1EK/Vlg6Fb3ZHXjjUwATKIcXbFuFC6Wr4K
|
||||
# NrU4DY/sBVqmab5AC/je3bpUpjtxpEyqUqtPc30wEg/rO9vmKmqKoLPT37svc2NV
|
||||
# BmGNl+85qO4fV/w7Cx7J0Bbqk19KcRNdjt6eKoTnTPHBHlVHQIHZpMxacbFOAkJr
|
||||
# qAVkYZdz7ikNXTxV+GRb36tC4ByMNxE2DF7vFdvaiZP0CVZ5ByJ2gAhXMdK9+usx
|
||||
# zVk913qKde1OAuWdv+rndqkAIm8fUlRnr4saSCg7cIbUwCCf116wUJ7EuJDg0vHe
|
||||
# yhnCeHnBbyH3RZkHEi2ofmfgnFISJZDdMAeVZGVOh20Jp50XBzqokpPzeZ6zc1/g
|
||||
# yILNyiVgE+RPkjnUQshd1f1PMgn3tns2Cz7bJiVUaqEO3n9qRFgy5JuLae6UweGf
|
||||
# AeOo3dgLZxikKzYs3hDMaEtJq8IP71cX7QXe6lnMmXU/Hdfz2p897Zd+kU+vZvKI
|
||||
# 3cwLfuVQgK2RZ2z+Kc3K3dRPz2rXycK5XCuRZmvGab/WbrZiC7wJQapgBodltMI5
|
||||
# GMdFrBg9IeF7/rP4EqVQXeKtevTlZXjpuNhhjuR+2DMt/dWufjXpiW91bo3aH6Ea
|
||||
# jOALXmoxgltCp1K7hrS6gmsvj94cLRf50QQ4U8Qwggd6MIIFYqADAgECAgphDpDS
|
||||
# AAAAAAADMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzETMBEGA1UECBMK
|
||||
# V2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0
|
||||
# IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBDZXJ0aWZpY2F0
|
||||
# ZSBBdXRob3JpdHkgMjAxMTAeFw0xMTA3MDgyMDU5MDlaFw0yNjA3MDgyMTA5MDla
|
||||
# MH4xCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdS
|
||||
# ZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMT
|
||||
# H01pY3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTEwggIiMA0GCSqGSIb3DQEB
|
||||
# AQUAA4ICDwAwggIKAoICAQCr8PpyEBwurdhuqoIQTTS68rZYIZ9CGypr6VpQqrgG
|
||||
# OBoESbp/wwwe3TdrxhLYC/A4wpkGsMg51QEUMULTiQ15ZId+lGAkbK+eSZzpaF7S
|
||||
# 35tTsgosw6/ZqSuuegmv15ZZymAaBelmdugyUiYSL+erCFDPs0S3XdjELgN1q2jz
|
||||
# y23zOlyhFvRGuuA4ZKxuZDV4pqBjDy3TQJP4494HDdVceaVJKecNvqATd76UPe/7
|
||||
# 4ytaEB9NViiienLgEjq3SV7Y7e1DkYPZe7J7hhvZPrGMXeiJT4Qa8qEvWeSQOy2u
|
||||
# M1jFtz7+MtOzAz2xsq+SOH7SnYAs9U5WkSE1JcM5bmR/U7qcD60ZI4TL9LoDho33
|
||||
# X/DQUr+MlIe8wCF0JV8YKLbMJyg4JZg5SjbPfLGSrhwjp6lm7GEfauEoSZ1fiOIl
|
||||
# XdMhSz5SxLVXPyQD8NF6Wy/VI+NwXQ9RRnez+ADhvKwCgl/bwBWzvRvUVUvnOaEP
|
||||
# 6SNJvBi4RHxF5MHDcnrgcuck379GmcXvwhxX24ON7E1JMKerjt/sW5+v/N2wZuLB
|
||||
# l4F77dbtS+dJKacTKKanfWeA5opieF+yL4TXV5xcv3coKPHtbcMojyyPQDdPweGF
|
||||
# RInECUzF1KVDL3SV9274eCBYLBNdYJWaPk8zhNqwiBfenk70lrC8RqBsmNLg1oiM
|
||||
# CwIDAQABo4IB7TCCAekwEAYJKwYBBAGCNxUBBAMCAQAwHQYDVR0OBBYEFEhuZOVQ
|
||||
# BdOCqhc3NyK1bajKdQKVMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1Ud
|
||||
# DwQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFHItOgIxkEO5FAVO
|
||||
# 4eqnxzHRI4k0MFoGA1UdHwRTMFEwT6BNoEuGSWh0dHA6Ly9jcmwubWljcm9zb2Z0
|
||||
# LmNvbS9wa2kvY3JsL3Byb2R1Y3RzL01pY1Jvb0NlckF1dDIwMTFfMjAxMV8wM18y
|
||||
# Mi5jcmwwXgYIKwYBBQUHAQEEUjBQME4GCCsGAQUFBzAChkJodHRwOi8vd3d3Lm1p
|
||||
# Y3Jvc29mdC5jb20vcGtpL2NlcnRzL01pY1Jvb0NlckF1dDIwMTFfMjAxMV8wM18y
|
||||
# Mi5jcnQwgZ8GA1UdIASBlzCBlDCBkQYJKwYBBAGCNy4DMIGDMD8GCCsGAQUFBwIB
|
||||
# FjNodHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2RvY3MvcHJpbWFyeWNw
|
||||
# cy5odG0wQAYIKwYBBQUHAgIwNB4yIB0ATABlAGcAYQBsAF8AcABvAGwAaQBjAHkA
|
||||
# XwBzAHQAYQB0AGUAbQBlAG4AdAAuIB0wDQYJKoZIhvcNAQELBQADggIBAGfyhqWY
|
||||
# 4FR5Gi7T2HRnIpsLlhHhY5KZQpZ90nkMkMFlXy4sPvjDctFtg/6+P+gKyju/R6mj
|
||||
# 82nbY78iNaWXXWWEkH2LRlBV2AySfNIaSxzzPEKLUtCw/WvjPgcuKZvmPRul1LUd
|
||||
# d5Q54ulkyUQ9eHoj8xN9ppB0g430yyYCRirCihC7pKkFDJvtaPpoLpWgKj8qa1hJ
|
||||
# Yx8JaW5amJbkg/TAj/NGK978O9C9Ne9uJa7lryft0N3zDq+ZKJeYTQ49C/IIidYf
|
||||
# wzIY4vDFLc5bnrRJOQrGCsLGra7lstnbFYhRRVg4MnEnGn+x9Cf43iw6IGmYslmJ
|
||||
# aG5vp7d0w0AFBqYBKig+gj8TTWYLwLNN9eGPfxxvFX1Fp3blQCplo8NdUmKGwx1j
|
||||
# NpeG39rz+PIWoZon4c2ll9DuXWNB41sHnIc+BncG0QaxdR8UvmFhtfDcxhsEvt9B
|
||||
# xw4o7t5lL+yX9qFcltgA1qFGvVnzl6UJS0gQmYAf0AApxbGbpT9Fdx41xtKiop96
|
||||
# eiL6SJUfq/tHI4D1nvi/a7dLl+LrdXga7Oo3mXkYS//WsyNodeav+vyL6wuA6mk7
|
||||
# r/ww7QRMjt/fdW1jkT3RnVZOT7+AVyKheBEyIXrvQQqxP/uozKRdwaGIm1dxVk5I
|
||||
# RcBCyZt2WwqASGv9eZ/BvW1taslScxMNelDNMYIVZzCCFWMCAQEwgZUwfjELMAkG
|
||||
# A1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQx
|
||||
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEoMCYGA1UEAxMfTWljcm9z
|
||||
# b2Z0IENvZGUgU2lnbmluZyBQQ0EgMjAxMQITMwAAAYdyF3IVWUDHCQAAAAABhzAN
|
||||
# BglghkgBZQMEAgEFAKCBrjAZBgkqhkiG9w0BCQMxDAYKKwYBBAGCNwIBBDAcBgor
|
||||
# BgEEAYI3AgELMQ4wDAYKKwYBBAGCNwIBFTAvBgkqhkiG9w0BCQQxIgQgE/MRhWyu
|
||||
# Zg+EA2WKcxYC31nHVCTE6guHppZppc70RtkwQgYKKwYBBAGCNwIBDDE0MDKgFIAS
|
||||
# AE0AaQBjAHIAbwBzAG8AZgB0oRqAGGh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbTAN
|
||||
# BgkqhkiG9w0BAQEFAASCAQBvcYCjRDXUYEIz9j2j0r4GFI2Y3g/CoNxDDBaeQ+gV
|
||||
# khO0fK0oLh18RbV271Mg6SF7X7+mXB5MnL68voVQDqHnsCYrIAuMF/AEpv9YuDDp
|
||||
# ZRJuqN7Vwg3HM02l/FyATBIMgf/V79aYzJL3jjtt9bRIyxk6aPU4XcwMeA4usnUQ
|
||||
# rMhIiQz07DgfSrcQWe4AvGFAIvqTAKE4P944EZWWVnWI/10rvatEAefqJZX3XljW
|
||||
# sK/6NY/0MyAyiILOuXbvVS0YFbHaR2qd1jUXbrY79fS+H4Ts6qnbufOkHQvmcDxs
|
||||
# 801wKLHumMdPTtMVzfVMCwPvrHP0wtzsFlmCcKjBbGpvoYIS8TCCEu0GCisGAQQB
|
||||
# gjcDAwExghLdMIIS2QYJKoZIhvcNAQcCoIISyjCCEsYCAQMxDzANBglghkgBZQME
|
||||
# AgEFADCCAVUGCyqGSIb3DQEJEAEEoIIBRASCAUAwggE8AgEBBgorBgEEAYRZCgMB
|
||||
# MDEwDQYJYIZIAWUDBAIBBQAEINdeoXtuzW+Dihw6n+VdG+91si0f6TvWhJXaPtvW
|
||||
# oF4cAgZfu+i3IT8YEzIwMjAxMjE3MDYzMDM2LjU0M1owBIACAfSggdSkgdEwgc4x
|
||||
# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt
|
||||
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1p
|
||||
# Y3Jvc29mdCBPcGVyYXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMg
|
||||
# VFNTIEVTTjo4OTdBLUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUt
|
||||
# U3RhbXAgU2VydmljZaCCDkQwggT1MIID3aADAgECAhMzAAABLCKvRZd1+RvuAAAA
|
||||
# AAEsMA0GCSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNo
|
||||
# aW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29y
|
||||
# cG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEw
|
||||
# MB4XDTE5MTIxOTAxMTUwM1oXDTIxMDMxNzAxMTUwM1owgc4xCzAJBgNVBAYTAlVT
|
||||
# MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQK
|
||||
# ExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1pY3Jvc29mdCBPcGVy
|
||||
# YXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMgVFNTIEVTTjo4OTdB
|
||||
# LUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAgU2Vydmlj
|
||||
# ZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPK1zgSSq+MxAYo3qpCt
|
||||
# QDxSMPPJy6mm/wfEJNjNUnYtLFBwl1BUS5trEk/t41ldxITKehs+ABxYqo4Qxsg3
|
||||
# Gy1ugKiwHAnYiiekfC+ZhptNFgtnDZIn45zC0AlVr/6UfLtsLcHCh1XElLUHfEC0
|
||||
# nBuQcM/SpYo9e3l1qY5NdMgDGxCsmCKdiZfYXIu+U0UYIBhdzmSHnB3fxZOBVcr5
|
||||
# htFHEBBNt/rFJlm/A4yb8oBsp+Uf0p5QwmO/bCcdqB15JpylOhZmWs0sUfJKlK9E
|
||||
# rAhBwGki2eIRFKsQBdkXS9PWpF1w2gIJRvSkDEaCf+lbGTPdSzHSbfREWOF9wY3i
|
||||
# Yj8CAwEAAaOCARswggEXMB0GA1UdDgQWBBRRahZSGfrCQhCyIyGH9DkiaW7L0zAf
|
||||
# BgNVHSMEGDAWgBTVYzpcijGQ80N7fEYbxTNoWoVtVTBWBgNVHR8ETzBNMEugSaBH
|
||||
# hkVodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNU
|
||||
# aW1TdGFQQ0FfMjAxMC0wNy0wMS5jcmwwWgYIKwYBBQUHAQEETjBMMEoGCCsGAQUF
|
||||
# BzAChj5odHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpL2NlcnRzL01pY1RpbVN0
|
||||
# YVBDQV8yMDEwLTA3LTAxLmNydDAMBgNVHRMBAf8EAjAAMBMGA1UdJQQMMAoGCCsG
|
||||
# AQUFBwMIMA0GCSqGSIb3DQEBCwUAA4IBAQBPFxHIwi4vAH49w9Svmz6K3tM55RlW
|
||||
# 5pPeULXdut2Rqy6Ys0+VpZsbuaEoxs6Z1C3hMbkiqZFxxyltxJpuHTyGTg61zfNI
|
||||
# F5n6RsYF3s7IElDXNfZznF1/2iWc6uRPZK8rxxUJ/7emYXZCYwuUY0XjsCpP9pbR
|
||||
# RKeJi6r5arSyI+NfKxvgoM21JNt1BcdlXuAecdd/k8UjxCscffanoK2n6LFw1PcZ
|
||||
# lEO7NId7o+soM2C0QY5BYdghpn7uqopB6ixyFIIkDXFub+1E7GmAEwfU6VwEHL7y
|
||||
# 9rNE8bd+JrQs+yAtkkHy9FmXg/PsGq1daVzX1So7CJ6nyphpuHSN3VfTMIIGcTCC
|
||||
# BFmgAwIBAgIKYQmBKgAAAAAAAjANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMC
|
||||
# VVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNV
|
||||
# BAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJv
|
||||
# b3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTAwHhcNMTAwNzAxMjEzNjU1WhcN
|
||||
# MjUwNzAxMjE0NjU1WjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3Rv
|
||||
# bjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0
|
||||
# aW9uMSYwJAYDVQQDEx1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMDCCASIw
|
||||
# DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKkdDbx3EYo6IOz8E5f1+n9plGt0
|
||||
# VBDVpQoAgoX77XxoSyxfxcPlYcJ2tz5mK1vwFVMnBDEfQRsalR3OCROOfGEwWbEw
|
||||
# RA/xYIiEVEMM1024OAizQt2TrNZzMFcmgqNFDdDq9UeBzb8kYDJYYEbyWEeGMoQe
|
||||
# dGFnkV+BVLHPk0ySwcSmXdFhE24oxhr5hoC732H8RsEnHSRnEnIaIYqvS2SJUGKx
|
||||
# Xf13Hz3wV3WsvYpCTUBR0Q+cBj5nf/VmwAOWRH7v0Ev9buWayrGo8noqCjHw2k4G
|
||||
# kbaICDXoeByw6ZnNPOcvRLqn9NxkvaQBwSAJk3jN/LzAyURdXhacAQVPIk0CAwEA
|
||||
# AaOCAeYwggHiMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBTVYzpcijGQ80N7
|
||||
# fEYbxTNoWoVtVTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTALBgNVHQ8EBAMC
|
||||
# AYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBTV9lbLj+iiXGJo0T2UkFvX
|
||||
# zpoYxDBWBgNVHR8ETzBNMEugSaBHhkVodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20v
|
||||
# cGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXRfMjAxMC0wNi0yMy5jcmwwWgYI
|
||||
# KwYBBQUHAQEETjBMMEoGCCsGAQUFBzAChj5odHRwOi8vd3d3Lm1pY3Jvc29mdC5j
|
||||
# b20vcGtpL2NlcnRzL01pY1Jvb0NlckF1dF8yMDEwLTA2LTIzLmNydDCBoAYDVR0g
|
||||
# AQH/BIGVMIGSMIGPBgkrBgEEAYI3LgMwgYEwPQYIKwYBBQUHAgEWMWh0dHA6Ly93
|
||||
# d3cubWljcm9zb2Z0LmNvbS9QS0kvZG9jcy9DUFMvZGVmYXVsdC5odG0wQAYIKwYB
|
||||
# BQUHAgIwNB4yIB0ATABlAGcAYQBsAF8AUABvAGwAaQBjAHkAXwBTAHQAYQB0AGUA
|
||||
# bQBlAG4AdAAuIB0wDQYJKoZIhvcNAQELBQADggIBAAfmiFEN4sbgmD+BcQM9naOh
|
||||
# IW+z66bM9TG+zwXiqf76V20ZMLPCxWbJat/15/B4vceoniXj+bzta1RXCCtRgkQS
|
||||
# +7lTjMz0YBKKdsxAQEGb3FwX/1z5Xhc1mCRWS3TvQhDIr79/xn/yN31aPxzymXlK
|
||||
# kVIArzgPF/UveYFl2am1a+THzvbKegBvSzBEJCI8z+0DpZaPWSm8tv0E4XCfMkon
|
||||
# /VWvL/625Y4zu2JfmttXQOnxzplmkIz/amJ/3cVKC5Em4jnsGUpxY517IW3DnKOi
|
||||
# PPp/fZZqkHimbdLhnPkd/DjYlPTGpQqWhqS9nhquBEKDuLWAmyI4ILUl5WTs9/S/
|
||||
# fmNZJQ96LjlXdqJxqgaKD4kWumGnEcua2A5HmoDF0M2n0O99g/DhO3EJ3110mCII
|
||||
# YdqwUB5vvfHhAN/nMQekkzr3ZUd46PioSKv33nJ+YWtvd6mBy6cJrDm77MbL2IK0
|
||||
# cs0d9LiFAR6A+xuJKlQ5slvayA1VmXqHczsI5pgt6o3gMy4SKfXAL1QnIffIrE7a
|
||||
# KLixqduWsqdCosnPGUFN4Ib5KpqjEWYw07t0MkvfY3v1mYovG8chr1m1rtxEPJdQ
|
||||
# cdeh0sVV42neV8HR3jDA/czmTfsNv11P6Z0eGTgvvM9YBS7vDaBQNdrvCScc1bN+
|
||||
# NR4Iuto229Nfj950iEkSoYIC0jCCAjsCAQEwgfyhgdSkgdEwgc4xCzAJBgNVBAYT
|
||||
# AlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYD
|
||||
# VQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1pY3Jvc29mdCBP
|
||||
# cGVyYXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMgVFNTIEVTTjo4
|
||||
# OTdBLUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAgU2Vy
|
||||
# dmljZaIjCgEBMAcGBSsOAwIaAxUADE5OKSMoNx/mYxYWap1RTOohbJ2ggYMwgYCk
|
||||
# fjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
|
||||
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYDVQQD
|
||||
# Ex1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMDANBgkqhkiG9w0BAQUFAAIF
|
||||
# AOOFYaowIhgPMjAyMDEyMTcwODQ4NDJaGA8yMDIwMTIxODA4NDg0MlowdzA9Bgor
|
||||
# BgEEAYRZCgQBMS8wLTAKAgUA44VhqgIBADAKAgEAAgIoWgIB/zAHAgEAAgISJTAK
|
||||
# AgUA44azKgIBADA2BgorBgEEAYRZCgQCMSgwJjAMBgorBgEEAYRZCgMCoAowCAIB
|
||||
# AAIDB6EgoQowCAIBAAIDAYagMA0GCSqGSIb3DQEBBQUAA4GBAB53NDoDDF4vqFWY
|
||||
# fwUnSvAy3z0CtqSFeA9RzDKGklPRwVkya5DtmVBDTZUbVQ2ST9hvRAVxhktfyVBZ
|
||||
# ewapGJsvwMhg7nnEqBOumt6TvueIZpbs+p5z//3+iFYGkT3YFQI0Gd2JkvgBxfs5
|
||||
# +GptO6JKtiyA+zkKijxqXZvMqMxBMYIDDTCCAwkCAQEwgZMwfDELMAkGA1UEBhMC
|
||||
# VVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNV
|
||||
# BAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9zb2Z0IFRp
|
||||
# bWUtU3RhbXAgUENBIDIwMTACEzMAAAEsIq9Fl3X5G+4AAAAAASwwDQYJYIZIAWUD
|
||||
# BAIBBQCgggFKMBoGCSqGSIb3DQEJAzENBgsqhkiG9w0BCRABBDAvBgkqhkiG9w0B
|
||||
# CQQxIgQg3wEUtEvxwCp3aAFB2vGXOOqg/AXHyXZh9P9J+0uArDMwgfoGCyqGSIb3
|
||||
# DQEJEAIvMYHqMIHnMIHkMIG9BCBbn/0uFFh42hTM5XOoKdXevBaiSxmYK9Ilcn9n
|
||||
# u5ZH4TCBmDCBgKR+MHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9u
|
||||
# MRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRp
|
||||
# b24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwAhMzAAAB
|
||||
# LCKvRZd1+RvuAAAAAAEsMCIEINBRtGID6jvA2ptfwIuPyG7qPcLRYb9YrJ8aKfVg
|
||||
# TulFMA0GCSqGSIb3DQEBCwUABIIBACQQpFGWW6JmH5MTKwhaE/8+gyzI2bT8XJnA
|
||||
# t8k7PHFvEGA7whgp9eNgW+wWJm1gnsmswjx2l7FW4DLg9lghM8FK77JRCg7CJfse
|
||||
# dSbnTv81/4VhSXOAO0jMP2dALP7DF59vQmlDh50u8/Wu61ActMOt6cArkoUhBRXO
|
||||
# LnqOQCOEEku5Xy2ES9g9eUfLUvTvlWo6HiAq+cJnNV08QRBOnGWRxdwy8YJ5vwNW
|
||||
# Pwx0ZG3rTvMtGzOaW6Ve5O36H2ynoEdzCmpakeDaF2sZ86/LNERKyIXiykV/Uig1
|
||||
# SZh2VLY/Yni9SCVHbYgvTOCh5ZZE5eOi6BwLf0T4xl5alHUx+AA=
|
||||
# SIG # End signature block
|
||||
|
||||
299
src/Misc/dotnet-install.sh
vendored
299
src/Misc/dotnet-install.sh
vendored
@@ -40,7 +40,7 @@ if [ -t 1 ] && command -v tput > /dev/null; then
|
||||
fi
|
||||
|
||||
say_warning() {
|
||||
printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}"
|
||||
printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}" >&3
|
||||
}
|
||||
|
||||
say_err() {
|
||||
@@ -183,6 +183,9 @@ get_current_os_name() {
|
||||
elif is_musl_based_distro; then
|
||||
echo "linux-musl"
|
||||
return 0
|
||||
elif [ "$linux_platform_name" = "linux-musl" ]; then
|
||||
echo "linux-musl"
|
||||
return 0
|
||||
else
|
||||
echo "linux"
|
||||
return 0
|
||||
@@ -241,42 +244,6 @@ check_min_reqs() {
|
||||
return 0
|
||||
}
|
||||
|
||||
check_pre_reqs() {
|
||||
eval $invocation
|
||||
|
||||
if [ "${DOTNET_INSTALL_SKIP_PREREQS:-}" = "1" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [ "$(uname)" = "Linux" ]; then
|
||||
if is_musl_based_distro; then
|
||||
if ! command -v scanelf > /dev/null; then
|
||||
say_warning "scanelf not found, please install pax-utils package."
|
||||
return 0
|
||||
fi
|
||||
LDCONFIG_COMMAND="scanelf --ldpath -BF '%f'"
|
||||
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libintl)" ] && say_warning "Unable to locate libintl. Probable prerequisite missing; install libintl (or gettext)."
|
||||
else
|
||||
if [ ! -x "$(command -v ldconfig)" ]; then
|
||||
say_verbose "ldconfig is not in PATH, trying /sbin/ldconfig."
|
||||
LDCONFIG_COMMAND="/sbin/ldconfig"
|
||||
else
|
||||
LDCONFIG_COMMAND="ldconfig"
|
||||
fi
|
||||
local librarypath=${LD_LIBRARY_PATH:-}
|
||||
LDCONFIG_COMMAND="$LDCONFIG_COMMAND -NXv ${librarypath//:/ }"
|
||||
fi
|
||||
|
||||
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep zlib)" ] && say_warning "Unable to locate zlib. Probable prerequisite missing; install zlib."
|
||||
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep ssl)" ] && say_warning "Unable to locate libssl. Probable prerequisite missing; install libssl."
|
||||
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libicu)" ] && say_warning "Unable to locate libicu. Probable prerequisite missing; install libicu."
|
||||
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep lttng)" ] && say_warning "Unable to locate liblttng. Probable prerequisite missing; install libcurl."
|
||||
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libcurl)" ] && say_warning "Unable to locate libcurl. Probable prerequisite missing; install libcurl."
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# input - $1
|
||||
to_lowercase() {
|
||||
@@ -332,7 +299,7 @@ get_machine_architecture() {
|
||||
if command -v uname > /dev/null; then
|
||||
CPUName=$(uname -m)
|
||||
case $CPUName in
|
||||
armv7l)
|
||||
armv*l)
|
||||
echo "arm"
|
||||
return 0
|
||||
;;
|
||||
@@ -373,10 +340,34 @@ get_normalized_architecture_from_architecture() {
|
||||
;;
|
||||
esac
|
||||
|
||||
say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues"
|
||||
say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues"
|
||||
return 1
|
||||
}
|
||||
|
||||
# args:
|
||||
# user_defined_os - $1
|
||||
get_normalized_os() {
|
||||
eval $invocation
|
||||
|
||||
local osname="$(to_lowercase "$1")"
|
||||
if [ ! -z "$osname" ]; then
|
||||
case "$osname" in
|
||||
osx | freebsd | rhel.6 | linux-musl | linux)
|
||||
echo "$osname"
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
say_err "'$user_defined_os' is not a supported value for --os option, supported values are: osx, linux, linux-musl, freebsd, rhel.6. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues."
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
else
|
||||
osname="$(get_current_os_name)" || return 1
|
||||
fi
|
||||
echo "$osname"
|
||||
return 0
|
||||
}
|
||||
|
||||
# The version text returned from the feeds is a 1-line or 2-line string:
|
||||
# For the SDK and the dotnet runtime (2 lines):
|
||||
# Line 1: # commit_hash
|
||||
@@ -418,14 +409,12 @@ is_dotnet_package_installed() {
|
||||
# azure_feed - $1
|
||||
# channel - $2
|
||||
# normalized_architecture - $3
|
||||
# coherent - $4
|
||||
get_latest_version_info() {
|
||||
eval $invocation
|
||||
|
||||
local azure_feed="$1"
|
||||
local channel="$2"
|
||||
local normalized_architecture="$3"
|
||||
local coherent="$4"
|
||||
|
||||
local version_file_url=null
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
@@ -433,11 +422,7 @@ get_latest_version_info() {
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
version_file_url="$uncached_feed/aspnetcore/Runtime/$channel/latest.version"
|
||||
elif [ -z "$runtime" ]; then
|
||||
if [ "$coherent" = true ]; then
|
||||
version_file_url="$uncached_feed/Sdk/$channel/latest.coherent.version"
|
||||
else
|
||||
version_file_url="$uncached_feed/Sdk/$channel/latest.version"
|
||||
fi
|
||||
else
|
||||
say_err "Invalid value for \$runtime"
|
||||
return 1
|
||||
@@ -468,7 +453,6 @@ parse_jsonfile_for_version() {
|
||||
sdk_list=$(echo $sdk_section | awk -F"[{}]" '{print $2}')
|
||||
sdk_list=${sdk_list//[\" ]/}
|
||||
sdk_list=${sdk_list//,/$'\n'}
|
||||
sdk_list="$(echo -e "${sdk_list}" | tr -d '[[:space:]]')"
|
||||
|
||||
local version_info=""
|
||||
while read -r line; do
|
||||
@@ -505,26 +489,16 @@ get_specific_version_from_version() {
|
||||
local json_file="$5"
|
||||
|
||||
if [ -z "$json_file" ]; then
|
||||
case "$version" in
|
||||
latest)
|
||||
if [[ "$version" == "latest" ]]; then
|
||||
local version_info
|
||||
version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1
|
||||
say_verbose "get_specific_version_from_version: version_info=$version_info"
|
||||
echo "$version_info" | get_version_from_version_info
|
||||
return 0
|
||||
;;
|
||||
coherent)
|
||||
local version_info
|
||||
version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" true)" || return 1
|
||||
say_verbose "get_specific_version_from_version: version_info=$version_info"
|
||||
echo "$version_info" | get_version_from_version_info
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
else
|
||||
echo "$version"
|
||||
return 0
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
else
|
||||
local version_info
|
||||
version_info="$(parse_jsonfile_for_version "$json_file")" || return 1
|
||||
@@ -538,6 +512,7 @@ get_specific_version_from_version() {
|
||||
# channel - $2
|
||||
# normalized_architecture - $3
|
||||
# specific_version - $4
|
||||
# normalized_os - $5
|
||||
construct_download_link() {
|
||||
eval $invocation
|
||||
|
||||
@@ -545,17 +520,16 @@ construct_download_link() {
|
||||
local channel="$2"
|
||||
local normalized_architecture="$3"
|
||||
local specific_version="${4//[$'\t\r\n']}"
|
||||
|
||||
local osname
|
||||
osname="$(get_current_os_name)" || return 1
|
||||
local specific_product_version="$(get_specific_product_version "$1" "$4")"
|
||||
local osname="$5"
|
||||
|
||||
local download_link=null
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_version-$osname-$normalized_architecture.tar.gz"
|
||||
download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_version-$osname-$normalized_architecture.tar.gz"
|
||||
download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
|
||||
elif [ -z "$runtime" ]; then
|
||||
download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_version-$osname-$normalized_architecture.tar.gz"
|
||||
download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_product_version-$osname-$normalized_architecture.tar.gz"
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
@@ -564,6 +538,50 @@ construct_download_link() {
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# azure_feed - $1
|
||||
# specific_version - $2
|
||||
get_specific_product_version() {
|
||||
# If we find a 'productVersion.txt' at the root of any folder, we'll use its contents
|
||||
# to resolve the version of what's in the folder, superseding the specified version.
|
||||
eval $invocation
|
||||
|
||||
local azure_feed="$1"
|
||||
local specific_version="${2//[$'\t\r\n']}"
|
||||
local specific_product_version=$specific_version
|
||||
|
||||
local download_link=null
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
download_link="$azure_feed/Runtime/$specific_version/productVersion.txt${feed_credential}"
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
download_link="$azure_feed/aspnetcore/Runtime/$specific_version/productVersion.txt${feed_credential}"
|
||||
elif [ -z "$runtime" ]; then
|
||||
download_link="$azure_feed/Sdk/$specific_version/productVersion.txt${feed_credential}"
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
|
||||
if machine_has "curl"
|
||||
then
|
||||
specific_product_version=$(curl -s --fail "$download_link")
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
specific_product_version=$specific_version
|
||||
fi
|
||||
elif machine_has "wget"
|
||||
then
|
||||
specific_product_version=$(wget -qO- "$download_link")
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
specific_product_version=$specific_version
|
||||
fi
|
||||
fi
|
||||
specific_product_version="${specific_product_version//[$'\t\r\n']}"
|
||||
|
||||
echo "$specific_product_version"
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# azure_feed - $1
|
||||
# channel - $2
|
||||
@@ -684,11 +702,31 @@ extract_dotnet_package() {
|
||||
find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files"
|
||||
|
||||
rm -rf "$temp_out_path"
|
||||
rm -f "$zip_path" && say_verbose "Temporary zip file $zip_path was removed"
|
||||
|
||||
if [ "$failed" = true ]; then
|
||||
say_err "Extraction failed"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
get_http_header_curl() {
|
||||
eval $invocation
|
||||
local remote_path="$1"
|
||||
remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
curl_options="-I -sSL --retry 5 --retry-delay 2 --connect-timeout 15 "
|
||||
curl $curl_options "$remote_path_with_credential" || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
get_http_header_wget() {
|
||||
eval $invocation
|
||||
local remote_path="$1"
|
||||
remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
wget_options="-q -S --spider --tries 5 --waitretry 2 --connect-timeout 15 "
|
||||
wget $wget_options "$remote_path_with_credential" 2>&1 || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
@@ -720,43 +758,56 @@ download() {
|
||||
return 0
|
||||
}
|
||||
|
||||
# Updates global variables $http_code and $download_error_msg
|
||||
downloadcurl() {
|
||||
eval $invocation
|
||||
local remote_path="$1"
|
||||
local out_path="${2:-}"
|
||||
|
||||
# Append feed_credential as late as possible before calling curl to avoid logging feed_credential
|
||||
remote_path="${remote_path}${feed_credential}"
|
||||
|
||||
local remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
|
||||
local failed=false
|
||||
if [ -z "$out_path" ]; then
|
||||
curl --retry 10 -sSL -f --create-dirs "$remote_path" || failed=true
|
||||
curl $curl_options "$remote_path_with_credential" || failed=true
|
||||
else
|
||||
curl --retry 10 -sSL -f --create-dirs -o "$out_path" "$remote_path" || failed=true
|
||||
curl $curl_options -o "$out_path" "$remote_path_with_credential" || failed=true
|
||||
fi
|
||||
if [ "$failed" = true ]; then
|
||||
say_verbose "Curl download failed"
|
||||
local response=$(get_http_header_curl $remote_path_with_credential)
|
||||
http_code=$( echo "$response" | awk '/^HTTP/{print $2}' | tail -1 )
|
||||
download_error_msg="Unable to download $remote_path."
|
||||
if [[ $http_code != 2* ]]; then
|
||||
download_error_msg+=" Returned HTTP status code: $http_code."
|
||||
fi
|
||||
say_verbose "$download_error_msg"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
# Updates global variables $http_code and $download_error_msg
|
||||
downloadwget() {
|
||||
eval $invocation
|
||||
local remote_path="$1"
|
||||
local out_path="${2:-}"
|
||||
|
||||
# Append feed_credential as late as possible before calling wget to avoid logging feed_credential
|
||||
remote_path="${remote_path}${feed_credential}"
|
||||
|
||||
local remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
local wget_options="--tries 20 --waitretry 2 --connect-timeout 15 "
|
||||
local failed=false
|
||||
if [ -z "$out_path" ]; then
|
||||
wget -q --tries 10 -O - "$remote_path" || failed=true
|
||||
wget -q $wget_options -O - "$remote_path_with_credential" || failed=true
|
||||
else
|
||||
wget --tries 10 -O "$out_path" "$remote_path" || failed=true
|
||||
wget $wget_options -O "$out_path" "$remote_path_with_credential" || failed=true
|
||||
fi
|
||||
if [ "$failed" = true ]; then
|
||||
say_verbose "Wget download failed"
|
||||
local response=$(get_http_header_wget $remote_path_with_credential)
|
||||
http_code=$( echo "$response" | awk '/^ HTTP/{print $2}' | tail -1 )
|
||||
download_error_msg="Unable to download $remote_path."
|
||||
if [[ $http_code != 2* ]]; then
|
||||
download_error_msg+=" Returned HTTP status code: $http_code."
|
||||
fi
|
||||
say_verbose "$download_error_msg"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
@@ -769,14 +820,18 @@ calculate_vars() {
|
||||
normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")"
|
||||
say_verbose "normalized_architecture=$normalized_architecture"
|
||||
|
||||
normalized_os="$(get_normalized_os "$user_defined_os")"
|
||||
say_verbose "normalized_os=$normalized_os"
|
||||
|
||||
specific_version="$(get_specific_version_from_version "$azure_feed" "$channel" "$normalized_architecture" "$version" "$json_file")"
|
||||
specific_product_version="$(get_specific_product_version "$azure_feed" "$specific_version")"
|
||||
say_verbose "specific_version=$specific_version"
|
||||
if [ -z "$specific_version" ]; then
|
||||
say_err "Could not resolve version information."
|
||||
return 1
|
||||
fi
|
||||
|
||||
download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")"
|
||||
download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version" "$normalized_os")"
|
||||
say_verbose "Constructed primary named payload URL: $download_link"
|
||||
|
||||
legacy_download_link="$(construct_legacy_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false
|
||||
@@ -821,38 +876,76 @@ install_dotnet() {
|
||||
zip_path="$(mktemp "$temporary_file_template")"
|
||||
say_verbose "Zip path: $zip_path"
|
||||
|
||||
say "Downloading link: $download_link"
|
||||
|
||||
# Failures are normal in the non-legacy case for ultimately legacy downloads.
|
||||
# Do not output to stderr, since output to stderr is considered an error.
|
||||
say "Downloading primary link $download_link"
|
||||
|
||||
# The download function will set variables $http_code and $download_error_msg in case of failure.
|
||||
http_code=""; download_error_msg=""
|
||||
download "$download_link" "$zip_path" 2>&1 || download_failed=true
|
||||
primary_path_http_code="$http_code"; primary_path_download_error_msg="$download_error_msg"
|
||||
|
||||
# if the download fails, download the legacy_download_link
|
||||
if [ "$download_failed" = true ]; then
|
||||
say "Cannot download: $download_link"
|
||||
|
||||
case $primary_path_http_code in
|
||||
404)
|
||||
say "The resource at $download_link is not available."
|
||||
;;
|
||||
*)
|
||||
say "$primary_path_download_error_msg"
|
||||
;;
|
||||
esac
|
||||
rm -f "$zip_path" 2>&1 && say_verbose "Temporary zip file $zip_path was removed"
|
||||
if [ "$valid_legacy_download_link" = true ]; then
|
||||
download_failed=false
|
||||
download_link="$legacy_download_link"
|
||||
zip_path="$(mktemp "$temporary_file_template")"
|
||||
say_verbose "Legacy zip path: $zip_path"
|
||||
say "Downloading legacy link: $download_link"
|
||||
|
||||
say "Downloading legacy link $download_link"
|
||||
|
||||
# The download function will set variables $http_code and $download_error_msg in case of failure.
|
||||
http_code=""; download_error_msg=""
|
||||
download "$download_link" "$zip_path" 2>&1 || download_failed=true
|
||||
legacy_path_http_code="$http_code"; legacy_path_download_error_msg="$download_error_msg"
|
||||
|
||||
if [ "$download_failed" = true ]; then
|
||||
say "Cannot download: $download_link"
|
||||
case $legacy_path_http_code in
|
||||
404)
|
||||
say "The resource at $download_link is not available."
|
||||
;;
|
||||
*)
|
||||
say "$legacy_path_download_error_msg"
|
||||
;;
|
||||
esac
|
||||
rm -f "$zip_path" 2>&1 && say_verbose "Temporary zip file $zip_path was removed"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$download_failed" = true ]; then
|
||||
say_err "Could not find/download: \`$asset_name\` with version = $specific_version"
|
||||
if [[ "$primary_path_http_code" = "404" && ( "$valid_legacy_download_link" = false || "$legacy_path_http_code" = "404") ]]; then
|
||||
say_err "Could not find \`$asset_name\` with version = $specific_version"
|
||||
say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
else
|
||||
say_err "Could not download: \`$asset_name\` with version = $specific_version"
|
||||
# 404-NotFound is an expected response if it goes from only one of the links, do not show that error.
|
||||
# If primary path is available (not 404-NotFound) then show the primary error else show the legacy error.
|
||||
if [ "$primary_path_http_code" != "404" ]; then
|
||||
say_err "$primary_path_download_error_msg"
|
||||
return 1
|
||||
fi
|
||||
if [[ "$valid_legacy_download_link" = true && "$legacy_path_http_code" != "404" ]]; then
|
||||
say_err "$legacy_path_download_error_msg"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
say "Extracting zip from $download_link"
|
||||
extract_dotnet_package "$zip_path" "$install_root"
|
||||
extract_dotnet_package "$zip_path" "$install_root" || return 1
|
||||
|
||||
# Check if the SDK version is installed; if not, fail the installation.
|
||||
# if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
|
||||
@@ -868,12 +961,14 @@ install_dotnet() {
|
||||
fi
|
||||
|
||||
# Check if the standard SDK version is installed.
|
||||
say_verbose "Checking installation: version = $specific_version"
|
||||
if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_version"; then
|
||||
say_verbose "Checking installation: version = $specific_product_version"
|
||||
if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_product_version"; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
say_err "\`$asset_name\` with version = $specific_version failed to install with an unknown error."
|
||||
# Version verification failed. More likely something is wrong either with the downloaded content or with the verification algorithm.
|
||||
say_err "Failed to verify the version of installed \`$asset_name\`.\nInstallation source: $download_link.\nInstallation location: $install_root.\nReport the bug at https://github.com/dotnet/install-scripts/issues."
|
||||
say_err "\`$asset_name\` with version = $specific_product_version failed to install with an unknown error."
|
||||
return 1
|
||||
}
|
||||
|
||||
@@ -899,6 +994,7 @@ runtime=""
|
||||
runtime_id=""
|
||||
override_non_versioned_files=true
|
||||
non_dynamic_parameters=""
|
||||
user_defined_os=""
|
||||
|
||||
while [ $# -ne 0 ]
|
||||
do
|
||||
@@ -920,6 +1016,10 @@ do
|
||||
shift
|
||||
architecture="$1"
|
||||
;;
|
||||
--os|-[Oo][SS])
|
||||
shift
|
||||
user_defined_os="$1"
|
||||
;;
|
||||
--shared-runtime|-[Ss]hared[Rr]untime)
|
||||
say_warning "The --shared-runtime flag is obsolete and may be removed in a future version of this script. The recommended usage is to specify '--runtime dotnet'."
|
||||
if [ -z "$runtime" ]; then
|
||||
@@ -971,6 +1071,7 @@ do
|
||||
shift
|
||||
runtime_id="$1"
|
||||
non_dynamic_parameters+=" $name "\""$1"\"""
|
||||
say_warning "Use of --runtime-id is obsolete and should be limited to the versions below 2.1. To override architecture, use --architecture option instead. To override OS, use --os option instead."
|
||||
;;
|
||||
--jsonfile|-[Jj][Ss]on[Ff]ile)
|
||||
shift
|
||||
@@ -1003,8 +1104,6 @@ do
|
||||
echo " -Version"
|
||||
echo " Possible values:"
|
||||
echo " - latest - most latest build on specific channel"
|
||||
echo " - coherent - most latest coherent build on specific channel"
|
||||
echo " coherent applies only to SDK downloads"
|
||||
echo " - 3-part version in a format A.B.C - represents specific version of build"
|
||||
echo " examples: 2.0.0-preview2-006120; 1.1.0"
|
||||
echo " -i,--install-dir <DIR> Install under specified location (see Install Location below)"
|
||||
@@ -1012,6 +1111,11 @@ do
|
||||
echo " --architecture <ARCHITECTURE> Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`."
|
||||
echo " --arch,-Architecture,-Arch"
|
||||
echo " Possible values: x64, arm, and arm64"
|
||||
echo " --os <system> Specifies operating system to be used when selecting the installer."
|
||||
echo " Overrides the OS determination approach used by the script. Supported values: osx, linux, linux-musl, freebsd, rhel.6."
|
||||
echo " In case any other value is provided, the platform will be determined by the script based on machine configuration."
|
||||
echo " Not supported for legacy links. Use --runtime-id to specify platform for legacy links."
|
||||
echo " Refer to: https://aka.ms/dotnet-os-lifecycle for more information."
|
||||
echo " --runtime <RUNTIME> Installs a shared runtime only, without the SDK."
|
||||
echo " -Runtime"
|
||||
echo " Possible values:"
|
||||
@@ -1028,14 +1132,15 @@ do
|
||||
echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly."
|
||||
echo " --jsonfile <JSONFILE> Determines the SDK version from a user specified global.json file."
|
||||
echo " Note: global.json must have a value for 'SDK:Version'"
|
||||
echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)."
|
||||
echo " -RuntimeId"
|
||||
echo " -?,--?,-h,--help,-Help Shows this help message"
|
||||
echo ""
|
||||
echo "Obsolete parameters:"
|
||||
echo " --shared-runtime The recommended alternative is '--runtime dotnet'."
|
||||
echo " This parameter is obsolete and may be removed in a future version of this script."
|
||||
echo " Installs just the shared runtime bits, not the entire SDK."
|
||||
echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)."
|
||||
echo " -RuntimeId" The parameter is obsolete and may be removed in a future version of this script. Should be used only for versions below 2.1.
|
||||
echo " For primary links to override OS or/and architecture, use --os and --architecture option instead."
|
||||
echo ""
|
||||
echo "Install Location:"
|
||||
echo " Location is chosen in following order:"
|
||||
@@ -1057,6 +1162,11 @@ if [ "$no_cdn" = true ]; then
|
||||
azure_feed="$uncached_feed"
|
||||
fi
|
||||
|
||||
say "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:"
|
||||
say "- The SDK needs to be installed without user interaction and without admin rights."
|
||||
say "- The SDK installation doesn't need to persist across multiple CI runs."
|
||||
say "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.\n"
|
||||
|
||||
check_min_reqs
|
||||
calculate_vars
|
||||
script_name=$(basename "$0")
|
||||
@@ -1067,7 +1177,7 @@ if [ "$dry_run" = true ]; then
|
||||
if [ "$valid_legacy_download_link" = true ]; then
|
||||
say "Legacy named payload URL: $legacy_download_link"
|
||||
fi
|
||||
repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"""
|
||||
repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"" --os "\""$normalized_os"\"""
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
repeatable_command+=" --runtime "\""dotnet"\"""
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
@@ -1078,7 +1188,6 @@ if [ "$dry_run" = true ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
check_pre_reqs
|
||||
install_dotnet
|
||||
|
||||
bin_path="$(get_absolute_path "$(combine_paths "$install_root" "$bin_folder_relative_path")")"
|
||||
@@ -1089,4 +1198,6 @@ else
|
||||
say "Binaries of dotnet can be found in $bin_path"
|
||||
fi
|
||||
|
||||
say "Note that the script does not resolve dependencies during installation."
|
||||
say "To check the list of dependencies, go to https://docs.microsoft.com/dotnet/core/install, select your operating system and check the \"Dependencies\" section."
|
||||
say "Installation finished successfully."
|
||||
|
||||
1087
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
1087
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -27,7 +27,7 @@
|
||||
"@types/node": "^12.7.12",
|
||||
"@typescript-eslint/parser": "^2.8.0",
|
||||
"@zeit/ncc": "^0.20.5",
|
||||
"eslint": "^5.16.0",
|
||||
"eslint": "^6.8.0",
|
||||
"eslint-plugin-github": "^2.0.0",
|
||||
"prettier": "^1.19.1",
|
||||
"typescript": "^3.6.4"
|
||||
|
||||
@@ -23,5 +23,7 @@
|
||||
<key>ACTIONS_RUNNER_SVC</key>
|
||||
<string>1</string>
|
||||
</dict>
|
||||
<key>ProcessType</key>
|
||||
<string>Interactive</string>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
115
src/Misc/layoutbin/checkScripts/downloadCert.js
Normal file
115
src/Misc/layoutbin/checkScripts/downloadCert.js
Normal file
@@ -0,0 +1,115 @@
|
||||
const https = require('https')
|
||||
const fs = require('fs')
|
||||
const http = require('http')
|
||||
const hostname = process.env['HOSTNAME'] || ''
|
||||
const port = process.env['PORT'] || ''
|
||||
const path = process.env['PATH'] || ''
|
||||
const pat = process.env['PAT'] || ''
|
||||
const proxyHost = process.env['PROXYHOST'] || ''
|
||||
const proxyPort = process.env['PROXYPORT'] || ''
|
||||
const proxyUsername = process.env['PROXYUSERNAME'] || ''
|
||||
const proxyPassword = process.env['PROXYPASSWORD'] || ''
|
||||
|
||||
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = '0'
|
||||
|
||||
if (proxyHost === '') {
|
||||
const options = {
|
||||
hostname: hostname,
|
||||
port: port,
|
||||
path: path,
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'User-Agent': 'GitHubActionsRunnerCheck/1.0',
|
||||
'Authorization': `token ${pat}`
|
||||
},
|
||||
}
|
||||
const req = https.request(options, res => {
|
||||
console.log(`statusCode: ${res.statusCode}`)
|
||||
console.log(`headers: ${JSON.stringify(res.headers)}`)
|
||||
let cert = socket.getPeerCertificate(true)
|
||||
let certPEM = ''
|
||||
let fingerprints = {}
|
||||
while (cert != null && fingerprints[cert.fingerprint] != '1') {
|
||||
fingerprints[cert.fingerprint] = '1'
|
||||
certPEM = certPEM + '-----BEGIN CERTIFICATE-----\n'
|
||||
let certEncoded = cert.raw.toString('base64')
|
||||
for (let i = 0; i < certEncoded.length; i++) {
|
||||
certPEM = certPEM + certEncoded[i]
|
||||
if (i != certEncoded.length - 1 && (i + 1) % 64 == 0) {
|
||||
certPEM = certPEM + '\n'
|
||||
}
|
||||
}
|
||||
certPEM = certPEM + '\n-----END CERTIFICATE-----\n'
|
||||
cert = cert.issuerCertificate
|
||||
}
|
||||
console.log(certPEM)
|
||||
fs.writeFileSync('./download_ca_cert.pem', certPEM)
|
||||
res.on('data', d => {
|
||||
process.stdout.write(d)
|
||||
})
|
||||
})
|
||||
req.on('error', error => {
|
||||
console.error(error)
|
||||
})
|
||||
req.end()
|
||||
}
|
||||
else {
|
||||
const auth = 'Basic ' + Buffer.from(proxyUsername + ':' + proxyPassword).toString('base64')
|
||||
|
||||
const options = {
|
||||
host: proxyHost,
|
||||
port: proxyPort,
|
||||
method: 'CONNECT',
|
||||
path: `${hostname}:${port}`,
|
||||
}
|
||||
|
||||
if (proxyUsername != '' || proxyPassword != '') {
|
||||
options.headers = {
|
||||
'Proxy-Authorization': auth,
|
||||
}
|
||||
}
|
||||
|
||||
http.request(options).on('connect', (res, socket) => {
|
||||
if (res.statusCode != 200) {
|
||||
throw new Error(`Proxy returns code: ${res.statusCode}`)
|
||||
}
|
||||
|
||||
https.get({
|
||||
host: hostname,
|
||||
port: port,
|
||||
socket: socket,
|
||||
agent: false,
|
||||
path: '/',
|
||||
headers: {
|
||||
'User-Agent': 'GitHubActionsRunnerCheck/1.0',
|
||||
'Authorization': `token ${pat}`
|
||||
}
|
||||
}, (res) => {
|
||||
let cert = res.socket.getPeerCertificate(true)
|
||||
let certPEM = ''
|
||||
let fingerprints = {}
|
||||
while (cert != null && fingerprints[cert.fingerprint] != '1') {
|
||||
fingerprints[cert.fingerprint] = '1'
|
||||
certPEM = certPEM + '-----BEGIN CERTIFICATE-----\n'
|
||||
let certEncoded = cert.raw.toString('base64')
|
||||
for (let i = 0; i < certEncoded.length; i++) {
|
||||
certPEM = certPEM + certEncoded[i]
|
||||
if (i != certEncoded.length - 1 && (i + 1) % 64 == 0) {
|
||||
certPEM = certPEM + '\n'
|
||||
}
|
||||
}
|
||||
certPEM = certPEM + '\n-----END CERTIFICATE-----\n'
|
||||
cert = cert.issuerCertificate
|
||||
}
|
||||
console.log(certPEM)
|
||||
fs.writeFileSync('./download_ca_cert.pem', certPEM)
|
||||
console.log(`statusCode: ${res.statusCode}`)
|
||||
console.log(`headers: ${JSON.stringify(res.headers)}`)
|
||||
res.on('data', d => {
|
||||
process.stdout.write(d)
|
||||
})
|
||||
})
|
||||
}).on('error', (err) => {
|
||||
console.error('error', err)
|
||||
}).end()
|
||||
}
|
||||
75
src/Misc/layoutbin/checkScripts/makeWebRequest.js
Normal file
75
src/Misc/layoutbin/checkScripts/makeWebRequest.js
Normal file
@@ -0,0 +1,75 @@
|
||||
const https = require('https')
|
||||
const http = require('http')
|
||||
const hostname = process.env['HOSTNAME'] || ''
|
||||
const port = process.env['PORT'] || ''
|
||||
const path = process.env['PATH'] || ''
|
||||
const pat = process.env['PAT'] || ''
|
||||
const proxyHost = process.env['PROXYHOST'] || ''
|
||||
const proxyPort = process.env['PROXYPORT'] || ''
|
||||
const proxyUsername = process.env['PROXYUSERNAME'] || ''
|
||||
const proxyPassword = process.env['PROXYPASSWORD'] || ''
|
||||
|
||||
if (proxyHost === '') {
|
||||
const options = {
|
||||
hostname: hostname,
|
||||
port: port,
|
||||
path: path,
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'User-Agent': 'GitHubActionsRunnerCheck/1.0',
|
||||
'Authorization': `token ${pat}`,
|
||||
}
|
||||
}
|
||||
const req = https.request(options, res => {
|
||||
console.log(`statusCode: ${res.statusCode}`)
|
||||
console.log(`headers: ${JSON.stringify(res.headers)}`)
|
||||
|
||||
res.on('data', d => {
|
||||
process.stdout.write(d)
|
||||
})
|
||||
})
|
||||
req.on('error', error => {
|
||||
console.error(error)
|
||||
})
|
||||
req.end()
|
||||
}
|
||||
else {
|
||||
const proxyAuth = 'Basic ' + Buffer.from(proxyUsername + ':' + proxyPassword).toString('base64')
|
||||
const options = {
|
||||
hostname: proxyHost,
|
||||
port: proxyPort,
|
||||
method: 'CONNECT',
|
||||
path: `${hostname}:${port}`
|
||||
}
|
||||
|
||||
if (proxyUsername != '' || proxyPassword != '') {
|
||||
options.headers = {
|
||||
'Proxy-Authorization': proxyAuth,
|
||||
}
|
||||
}
|
||||
http.request(options).on('connect', (res, socket) => {
|
||||
if (res.statusCode != 200) {
|
||||
throw new Error(`Proxy returns code: ${res.statusCode}`)
|
||||
}
|
||||
https.get({
|
||||
host: hostname,
|
||||
port: port,
|
||||
socket: socket,
|
||||
agent: false,
|
||||
path: path,
|
||||
headers: {
|
||||
'User-Agent': 'GitHubActionsRunnerCheck/1.0',
|
||||
'Authorization': `token ${pat}`,
|
||||
}
|
||||
}, (res) => {
|
||||
console.log(`statusCode: ${res.statusCode}`)
|
||||
console.log(`headers: ${JSON.stringify(res.headers)}`)
|
||||
|
||||
res.on('data', d => {
|
||||
process.stdout.write(d)
|
||||
})
|
||||
})
|
||||
}).on('error', (err) => {
|
||||
console.error('error', err)
|
||||
}).end()
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
SVC_NAME="{{SvcNameVar}}"
|
||||
SVC_NAME=${SVC_NAME// /_}
|
||||
SVC_DESCRIPTION="{{SvcDescription}}"
|
||||
|
||||
user_id=`id -u`
|
||||
|
||||
@@ -9,19 +9,19 @@ fi
|
||||
|
||||
# Determine OS type
|
||||
# Debian based OS (Debian, Ubuntu, Linux Mint) has /etc/debian_version
|
||||
# Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7) has /etc/redhat-release
|
||||
# Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7) has /etc/redhat-release
|
||||
# SUSE based OS (OpenSUSE, SUSE Enterprise) has ID_LIKE=suse in /etc/os-release
|
||||
|
||||
function print_errormessage()
|
||||
{
|
||||
echo "Can't install dotnet core dependencies."
|
||||
echo "Can't install dotnet 5 dependencies."
|
||||
echo "You can manually install all required dependencies based on following documentation"
|
||||
echo "https://docs.microsoft.com/en-us/dotnet/core/linux-prerequisites?tabs=netcore2x"
|
||||
}
|
||||
|
||||
function print_rhel6message()
|
||||
{
|
||||
echo "We did our best effort to install dotnet core dependencies"
|
||||
echo "We did our best effort to install dotnet 5 dependencies"
|
||||
echo "However, there are some dependencies which require manual installation"
|
||||
echo "You can install all remaining required dependencies based on the following documentation"
|
||||
echo "https://github.com/dotnet/core/blob/master/Documentation/build-and-install-rhel6-prerequisites.md"
|
||||
@@ -29,7 +29,7 @@ function print_rhel6message()
|
||||
|
||||
function print_rhel6errormessage()
|
||||
{
|
||||
echo "We couldn't install dotnet core dependencies"
|
||||
echo "We couldn't install dotnet 5 dependencies"
|
||||
echo "You can manually install all required dependencies based on following documentation"
|
||||
echo "https://docs.microsoft.com/en-us/dotnet/core/linux-prerequisites?tabs=netcore2x"
|
||||
echo "In addition, there are some dependencies which require manual installation. Please follow this documentation"
|
||||
@@ -49,79 +49,77 @@ then
|
||||
cat /etc/debian_version
|
||||
echo "------------------------------"
|
||||
|
||||
# prefer apt over apt-get
|
||||
command -v apt
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
apt update && apt install -y liblttng-ust0 libkrb5-3 zlib1g
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'apt' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# libissl version prefer: libssl1.1 -> libssl1.0.2 -> libssl1.0.0
|
||||
apt install -y libssl1.1$ || apt install -y libssl1.0.2$ || apt install -y libssl1.0.0$
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'apt' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
|
||||
apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'apt' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# prefer apt-get over apt
|
||||
command -v apt-get
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
apt-get update && apt-get install -y liblttng-ust0 libkrb5-3 zlib1g
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'apt-get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# libissl version prefer: libssl1.1 -> libssl1.0.2 -> libssl1.0.0
|
||||
apt-get install -y libssl1.1$ || apt-get install -y libssl1.0.2$ || apt install -y libssl1.0.0$
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'apt-get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
|
||||
apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'apt-get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
apt_get=apt-get
|
||||
else
|
||||
echo "Can not find 'apt' or 'apt-get'"
|
||||
command -v apt
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
apt_get=apt
|
||||
else
|
||||
echo "Found neither 'apt-get' nor 'apt'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
$apt_get update && $apt_get install -y liblttng-ust0 libkrb5-3 zlib1g
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'$apt_get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
apt_get_with_fallbacks() {
|
||||
$apt_get install -y $1
|
||||
fail=$?
|
||||
if [ $fail -eq 0 ]
|
||||
then
|
||||
if [ "${1#"${1%?}"}" = '$' ]; then
|
||||
dpkg -l "${1%?}" > /dev/null 2> /dev/null
|
||||
fail=$?
|
||||
fi
|
||||
fi
|
||||
if [ $fail -ne 0 ]
|
||||
then
|
||||
shift
|
||||
if [ -n "$1" ]
|
||||
then
|
||||
apt_get_with_fallbacks "$@"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# libssl version prefer: libssl1.1 -> libssl1.0.2 -> libssl1.0.0
|
||||
apt_get_with_fallbacks libssl1.1$ libssl1.0.2$ libssl1.0.0$
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'$apt_get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
|
||||
apt_get_with_fallbacks libicu66 libicu63 libicu60 libicu57 libicu55 libicu52
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'$apt_get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
elif [ -e /etc/redhat-release ]
|
||||
then
|
||||
echo "The current OS is Fedora based"
|
||||
echo "--------Redhat Version--------"
|
||||
echo "--Fedora/RHEL/CentOS Version--"
|
||||
cat /etc/redhat-release
|
||||
echo "------------------------------"
|
||||
|
||||
# use dnf on fedora
|
||||
# use yum on centos and redhat
|
||||
# use yum on centos and rhel
|
||||
if [ -e /etc/fedora-release ]
|
||||
then
|
||||
command -v dnf
|
||||
@@ -191,7 +189,7 @@ then
|
||||
redhatRelease=$(</etc/redhat-release)
|
||||
if [[ $redhatRelease == "CentOS release 6."* || $redhatRelease == "Red Hat Enterprise Linux Server release 6."* ]]
|
||||
then
|
||||
echo "The current OS is Red Hat Enterprise Linux 6 or Centos 6"
|
||||
echo "The current OS is Red Hat Enterprise Linux 6 or CentOS 6"
|
||||
|
||||
# Install known dependencies, as a best effort.
|
||||
# The remaining dependencies are covered by the GitHub doc that will be shown by `print_rhel6message`
|
||||
|
||||
13
src/Misc/layoutbin/macos-run-invoker.js
Normal file
13
src/Misc/layoutbin/macos-run-invoker.js
Normal file
@@ -0,0 +1,13 @@
|
||||
const { spawn } = require('child_process');
|
||||
// argv[0] = node
|
||||
// argv[1] = macos-run-invoker.js
|
||||
var shell = process.argv[2];
|
||||
var args = process.argv.slice(3);
|
||||
console.log(`::debug::macos-run-invoker: ${shell}`);
|
||||
console.log(`::debug::macos-run-invoker: ${JSON.stringify(args)}`);
|
||||
var launch = spawn(shell, args, { stdio: 'inherit' });
|
||||
launch.on('exit', function (code) {
|
||||
if (code !== 0) {
|
||||
process.exit(code);
|
||||
}
|
||||
});
|
||||
@@ -1,6 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
SVC_NAME="{{SvcNameVar}}"
|
||||
SVC_NAME=${SVC_NAME// /_}
|
||||
SVC_DESCRIPTION="{{SvcDescription}}"
|
||||
|
||||
SVC_CMD=$1
|
||||
@@ -63,8 +64,21 @@ function install()
|
||||
sed "s/{{User}}/${run_as_user}/g; s/{{Description}}/$(echo ${SVC_DESCRIPTION} | sed -e 's/[\/&]/\\&/g')/g; s/{{RunnerRoot}}/$(echo ${RUNNER_ROOT} | sed -e 's/[\/&]/\\&/g')/g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
|
||||
mv "${TEMP_PATH}" "${UNIT_PATH}" || failed "failed to copy unit file"
|
||||
|
||||
# Recent Fedora based Linux (CentOS/Redhat) has SELinux enabled by default
|
||||
# We need to restore security context on the unit file we added otherwise SystemD have no access to it.
|
||||
command -v getenforce > /dev/null
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
selinuxEnabled=$(getenforce)
|
||||
if [[ $selinuxEnabled == "Enforcing" ]]
|
||||
then
|
||||
# SELinux is enabled, we will need to Restore SELinux Context for the service file
|
||||
restorecon -r -v "${UNIT_PATH}" || failed "failed to restore SELinux context on ${UNIT_PATH}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# unit file should not be executable and world writable
|
||||
chmod 664 ${UNIT_PATH} || failed "failed to set permissions on ${UNIT_PATH}"
|
||||
chmod 664 "${UNIT_PATH}" || failed "failed to set permissions on ${UNIT_PATH}"
|
||||
systemctl daemon-reload || failed "failed to reload daemons"
|
||||
|
||||
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
|
||||
|
||||
4
src/Misc/layoutbin/update.sh.template
Normal file → Executable file
4
src/Misc/layoutbin/update.sh.template
Normal file → Executable file
@@ -28,13 +28,13 @@ date "+[%F %T-%4N] Waiting for $runnerprocessname ($runnerpid) to complete" >> "
|
||||
while [ -e /proc/$runnerpid ]
|
||||
do
|
||||
date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1
|
||||
ping -c 2 127.0.0.1 >nul
|
||||
sleep 2
|
||||
done
|
||||
date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1
|
||||
|
||||
# start re-organize folders
|
||||
date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1
|
||||
ping -c 2 127.0.0.1 >nul
|
||||
sleep 1
|
||||
|
||||
# the folder structure under runner root will be
|
||||
# ./bin -> bin.2.100.0 (junction folder)
|
||||
|
||||
@@ -8,7 +8,7 @@ if [ $user_id -eq 0 -a -z "$RUNNER_ALLOW_RUNASROOT" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check dotnet core 3.0 dependencies for Linux
|
||||
# Check dotnet 5 dependencies for Linux
|
||||
if [[ (`uname` == "Linux") ]]
|
||||
then
|
||||
command -v ldd > /dev/null
|
||||
@@ -18,24 +18,26 @@ then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
message="Execute sudo ./bin/installdependencies.sh to install any missing Dotnet 5 dependencies."
|
||||
|
||||
ldd ./bin/libcoreclr.so | grep 'not found'
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Dependencies is missing for Dotnet Core 3.0"
|
||||
echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies."
|
||||
echo "Dependencies is missing for Dotnet 5"
|
||||
echo $message
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ldd ./bin/System.Security.Cryptography.Native.OpenSsl.so | grep 'not found'
|
||||
ldd ./bin/libSystem.Security.Cryptography.Native.OpenSsl.so | grep 'not found'
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Dependencies is missing for Dotnet Core 3.0"
|
||||
echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies."
|
||||
echo "Dependencies is missing for Dotnet 5"
|
||||
echo $message
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ldd ./bin/System.IO.Compression.Native.so | grep 'not found'
|
||||
ldd ./bin/libSystem.IO.Compression.Native.so | grep 'not found'
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Dependencies is missing for Dotnet Core 3.0"
|
||||
echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies."
|
||||
echo "Dependencies is missing for Dotnet 5"
|
||||
echo $message
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -52,8 +54,8 @@ then
|
||||
libpath=${LD_LIBRARY_PATH:-}
|
||||
$LDCONFIG_COMMAND -NXv ${libpath//:/ } 2>&1 | grep libicu >/dev/null 2>&1
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Libicu's dependencies is missing for Dotnet Core 3.0"
|
||||
echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies."
|
||||
echo "Libicu's dependencies is missing for Dotnet 5"
|
||||
echo $message
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
@@ -67,7 +69,7 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
||||
done
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
cd $DIR
|
||||
cd "$DIR"
|
||||
|
||||
source ./env.sh
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ varCheckList=(
|
||||
'ANT_HOME'
|
||||
'M2_HOME'
|
||||
'ANDROID_HOME'
|
||||
'ANDROID_SDK_ROOT'
|
||||
'GRADLE_HOME'
|
||||
'NVM_BIN'
|
||||
'NVM_PATH'
|
||||
|
||||
@@ -15,6 +15,9 @@ namespace GitHub.Runner.Common
|
||||
[DataContract]
|
||||
public sealed class RunnerSettings
|
||||
{
|
||||
[DataMember(Name = "IsHostedServer", EmitDefaultValue = false)]
|
||||
private bool? _isHostedServer;
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public int AgentId { get; set; }
|
||||
|
||||
@@ -42,6 +45,21 @@ namespace GitHub.Runner.Common
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string MonitorSocketAddress { get; set; }
|
||||
|
||||
[IgnoreDataMember]
|
||||
public bool IsHostedServer
|
||||
{
|
||||
get
|
||||
{
|
||||
// Old runners do not have this property. Hosted runners likely don't have this property either.
|
||||
return _isHostedServer ?? true;
|
||||
}
|
||||
|
||||
set
|
||||
{
|
||||
_isHostedServer = value;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
// Computed property for convenience. Can either return:
|
||||
// 1. If runner was configured at the repo level, returns something like: "myorg/myrepo"
|
||||
@@ -69,6 +87,15 @@ namespace GitHub.Runner.Common
|
||||
return repoOrOrgName;
|
||||
}
|
||||
}
|
||||
|
||||
[OnSerializing]
|
||||
private void OnSerializing(StreamingContext context)
|
||||
{
|
||||
if (_isHostedServer.HasValue && _isHostedServer.Value)
|
||||
{
|
||||
_isHostedServer = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[ServiceLocator(Default = typeof(ConfigurationStore))]
|
||||
@@ -78,10 +105,12 @@ namespace GitHub.Runner.Common
|
||||
bool IsServiceConfigured();
|
||||
bool HasCredentials();
|
||||
CredentialData GetCredentials();
|
||||
CredentialData GetMigratedCredentials();
|
||||
RunnerSettings GetSettings();
|
||||
void SaveCredential(CredentialData credential);
|
||||
void SaveSettings(RunnerSettings settings);
|
||||
void DeleteCredential();
|
||||
void DeleteMigratedCredential();
|
||||
void DeleteSettings();
|
||||
}
|
||||
|
||||
@@ -90,9 +119,11 @@ namespace GitHub.Runner.Common
|
||||
private string _binPath;
|
||||
private string _configFilePath;
|
||||
private string _credFilePath;
|
||||
private string _migratedCredFilePath;
|
||||
private string _serviceConfigFilePath;
|
||||
|
||||
private CredentialData _creds;
|
||||
private CredentialData _migratedCreds;
|
||||
private RunnerSettings _settings;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
@@ -114,6 +145,9 @@ namespace GitHub.Runner.Common
|
||||
_credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials);
|
||||
Trace.Info("CredFilePath: {0}", _credFilePath);
|
||||
|
||||
_migratedCredFilePath = hostContext.GetConfigFile(WellKnownConfigFile.MigratedCredentials);
|
||||
Trace.Info("MigratedCredFilePath: {0}", _migratedCredFilePath);
|
||||
|
||||
_serviceConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Service);
|
||||
Trace.Info("ServiceConfigFilePath: {0}", _serviceConfigFilePath);
|
||||
}
|
||||
@@ -123,7 +157,7 @@ namespace GitHub.Runner.Common
|
||||
public bool HasCredentials()
|
||||
{
|
||||
Trace.Info("HasCredentials()");
|
||||
bool credsStored = (new FileInfo(_credFilePath)).Exists;
|
||||
bool credsStored = (new FileInfo(_credFilePath)).Exists || (new FileInfo(_migratedCredFilePath)).Exists;
|
||||
Trace.Info("stored {0}", credsStored);
|
||||
return credsStored;
|
||||
}
|
||||
@@ -154,6 +188,16 @@ namespace GitHub.Runner.Common
|
||||
return _creds;
|
||||
}
|
||||
|
||||
public CredentialData GetMigratedCredentials()
|
||||
{
|
||||
if (_migratedCreds == null && File.Exists(_migratedCredFilePath))
|
||||
{
|
||||
_migratedCreds = IOUtil.LoadObject<CredentialData>(_migratedCredFilePath);
|
||||
}
|
||||
|
||||
return _migratedCreds;
|
||||
}
|
||||
|
||||
public RunnerSettings GetSettings()
|
||||
{
|
||||
if (_settings == null)
|
||||
@@ -206,6 +250,12 @@ namespace GitHub.Runner.Common
|
||||
public void DeleteCredential()
|
||||
{
|
||||
IOUtil.Delete(_credFilePath, default(CancellationToken));
|
||||
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
|
||||
}
|
||||
|
||||
public void DeleteMigratedCredential()
|
||||
{
|
||||
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
|
||||
}
|
||||
|
||||
public void DeleteSettings()
|
||||
|
||||
@@ -19,11 +19,13 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Runner,
|
||||
Credentials,
|
||||
MigratedCredentials,
|
||||
RSACredentials,
|
||||
Service,
|
||||
CredentialStore,
|
||||
Certificates,
|
||||
Options,
|
||||
SetupInfo,
|
||||
}
|
||||
|
||||
public static class Constants
|
||||
@@ -85,9 +87,10 @@ namespace GitHub.Runner.Common
|
||||
public static class Args
|
||||
{
|
||||
public static readonly string Auth = "auth";
|
||||
public static readonly string Labels = "labels";
|
||||
public static readonly string MonitorSocketAddress = "monitorsocketaddress";
|
||||
public static readonly string Name = "name";
|
||||
public static readonly string Pool = "pool";
|
||||
public static readonly string RunnerGroup = "runnergroup";
|
||||
public static readonly string StartupType = "startuptype";
|
||||
public static readonly string Url = "url";
|
||||
public static readonly string UserName = "username";
|
||||
@@ -96,9 +99,11 @@ namespace GitHub.Runner.Common
|
||||
|
||||
// Secret args. Must be added to the "Secrets" getter as well.
|
||||
public static readonly string Token = "token";
|
||||
public static readonly string PAT = "pat";
|
||||
public static readonly string WindowsLogonPassword = "windowslogonpassword";
|
||||
public static string[] Secrets => new[]
|
||||
{
|
||||
PAT,
|
||||
Token,
|
||||
WindowsLogonPassword,
|
||||
};
|
||||
@@ -116,6 +121,7 @@ namespace GitHub.Runner.Common
|
||||
//validFlags array as well present in the CommandSettings.cs
|
||||
public static class Flags
|
||||
{
|
||||
public static readonly string Check = "check";
|
||||
public static readonly string Commit = "commit";
|
||||
public static readonly string Help = "help";
|
||||
public static readonly string Replace = "replace";
|
||||
@@ -134,6 +140,23 @@ namespace GitHub.Runner.Common
|
||||
public const int RunnerUpdating = 3;
|
||||
public const int RunOnceRunnerUpdating = 4;
|
||||
}
|
||||
|
||||
public static class Features
|
||||
{
|
||||
public static readonly string DiskSpaceWarning = "runner.diskspace.warning";
|
||||
}
|
||||
|
||||
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
|
||||
public static readonly string WorkerCrash = "WORKER_CRASH";
|
||||
public static readonly string LowDiskSpace = "LOW_DISK_SPACE";
|
||||
public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND";
|
||||
public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/";
|
||||
}
|
||||
|
||||
public static class RunnerEvent
|
||||
{
|
||||
public static readonly string Register = "register";
|
||||
public static readonly string Remove = "remove";
|
||||
}
|
||||
|
||||
public static class Pipeline
|
||||
@@ -186,6 +209,7 @@ namespace GitHub.Runner.Common
|
||||
//
|
||||
// Keep alphabetical
|
||||
//
|
||||
public static readonly string AllowUnsupportedCommands = "ACTIONS_ALLOW_UNSECURE_COMMANDS";
|
||||
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
|
||||
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
|
||||
}
|
||||
|
||||
@@ -56,6 +56,16 @@ namespace GitHub.Runner.Common
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.EndGroupCommandExtension, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.EchoCommandExtension, Runner.Worker");
|
||||
break;
|
||||
case "GitHub.Runner.Worker.IFileCommandExtension":
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.AddPathFileCommand, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.SetEnvFileCommand, Runner.Worker");
|
||||
break;
|
||||
case "GitHub.Runner.Listener.Check.ICheckExtension":
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.InternetCheck, Runner.Listener");
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.ActionsCheck, Runner.Listener");
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.GitCheck, Runner.Listener");
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.NodeJsCheck, Runner.Listener");
|
||||
break;
|
||||
default:
|
||||
// This should never happen.
|
||||
throw new NotSupportedException($"Unexpected extension type: '{typeof(T).FullName}'");
|
||||
|
||||
@@ -1,19 +1,18 @@
|
||||
using GitHub.Runner.Common.Util;
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.Tracing;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Reflection;
|
||||
using System.Runtime.Loader;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Diagnostics;
|
||||
using System.Net.Http;
|
||||
using System.Diagnostics.Tracing;
|
||||
using GitHub.DistributedTask.Logging;
|
||||
using System.Net.Http.Headers;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
@@ -24,7 +23,7 @@ namespace GitHub.Runner.Common
|
||||
CancellationToken RunnerShutdownToken { get; }
|
||||
ShutdownReason RunnerShutdownReason { get; }
|
||||
ISecretMasker SecretMasker { get; }
|
||||
ProductInfoHeaderValue UserAgent { get; }
|
||||
List<ProductInfoHeaderValue> UserAgents { get; }
|
||||
RunnerWebProxy WebProxy { get; }
|
||||
string GetDirectory(WellKnownDirectory directory);
|
||||
string GetConfigFile(WellKnownConfigFile configFile);
|
||||
@@ -54,7 +53,7 @@ namespace GitHub.Runner.Common
|
||||
private readonly ConcurrentDictionary<Type, object> _serviceInstances = new ConcurrentDictionary<Type, object>();
|
||||
private readonly ConcurrentDictionary<Type, Type> _serviceTypes = new ConcurrentDictionary<Type, Type>();
|
||||
private readonly ISecretMasker _secretMasker = new SecretMasker();
|
||||
private readonly ProductInfoHeaderValue _userAgent = new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version);
|
||||
private readonly List<ProductInfoHeaderValue> _userAgents = new List<ProductInfoHeaderValue>() { new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version) };
|
||||
private CancellationTokenSource _runnerShutdownTokenSource = new CancellationTokenSource();
|
||||
private object _perfLock = new object();
|
||||
private Tracing _trace;
|
||||
@@ -72,7 +71,7 @@ namespace GitHub.Runner.Common
|
||||
public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token;
|
||||
public ShutdownReason RunnerShutdownReason { get; private set; }
|
||||
public ISecretMasker SecretMasker => _secretMasker;
|
||||
public ProductInfoHeaderValue UserAgent => _userAgent;
|
||||
public List<ProductInfoHeaderValue> UserAgents => _userAgents;
|
||||
public RunnerWebProxy WebProxy => _webProxy;
|
||||
public HostContext(string hostType, string logFile = null)
|
||||
{
|
||||
@@ -89,6 +88,7 @@ namespace GitHub.Runner.Common
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.UriDataEscape);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.XmlDataEscape);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.TrimDoubleQuotes);
|
||||
|
||||
// Create the trace manager.
|
||||
if (string.IsNullOrEmpty(logFile))
|
||||
@@ -189,6 +189,17 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
|
||||
}
|
||||
|
||||
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
|
||||
if (File.Exists(credFile))
|
||||
{
|
||||
var credData = IOUtil.LoadObject<CredentialData>(credFile);
|
||||
if (credData != null &&
|
||||
credData.Data.TryGetValue("clientId", out var clientId))
|
||||
{
|
||||
_userAgents.Add(new ProductInfoHeaderValue($"RunnerId", clientId));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public string GetDirectory(WellKnownDirectory directory)
|
||||
@@ -281,6 +292,12 @@ namespace GitHub.Runner.Common
|
||||
".credentials");
|
||||
break;
|
||||
|
||||
case WellKnownConfigFile.MigratedCredentials:
|
||||
path = Path.Combine(
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
".credentials_migrated");
|
||||
break;
|
||||
|
||||
case WellKnownConfigFile.RSACredentials:
|
||||
path = Path.Combine(
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
@@ -316,6 +333,13 @@ namespace GitHub.Runner.Common
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
".options");
|
||||
break;
|
||||
|
||||
case WellKnownConfigFile.SetupInfo:
|
||||
path = Path.Combine(
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
".setup_info");
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new NotSupportedException($"Unexpected well known config file: '{configFile}'");
|
||||
}
|
||||
@@ -590,9 +614,8 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
public static HttpClientHandler CreateHttpClientHandler(this IHostContext context)
|
||||
{
|
||||
HttpClientHandler clientHandler = new HttpClientHandler();
|
||||
clientHandler.Proxy = context.WebProxy;
|
||||
return clientHandler;
|
||||
var handlerFactory = context.GetService<IHttpClientHandlerFactory>();
|
||||
return handlerFactory.CreateClientHandler(context.WebProxy);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
19
src/Runner.Common/HttpClientHandlerFactory.cs
Normal file
19
src/Runner.Common/HttpClientHandlerFactory.cs
Normal file
@@ -0,0 +1,19 @@
|
||||
using System.Net.Http;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
[ServiceLocator(Default = typeof(HttpClientHandlerFactory))]
|
||||
public interface IHttpClientHandlerFactory : IRunnerService
|
||||
{
|
||||
HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy);
|
||||
}
|
||||
|
||||
public class HttpClientHandlerFactory : RunnerService, IHttpClientHandlerFactory
|
||||
{
|
||||
public HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy)
|
||||
{
|
||||
return new HttpClientHandler() { Proxy = webProxy };
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -16,12 +16,14 @@ namespace GitHub.Runner.Common
|
||||
// logging and console
|
||||
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
|
||||
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken);
|
||||
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken);
|
||||
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
|
||||
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
|
||||
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
|
||||
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
|
||||
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
|
||||
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
|
||||
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class JobServer : RunnerService, IJobServer
|
||||
@@ -78,6 +80,12 @@ namespace GitHub.Runner.Common
|
||||
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
@@ -113,5 +121,14 @@ namespace GitHub.Runner.Common
|
||||
CheckConnection();
|
||||
return _taskClient.GetTimelineAsync(scopeIdentifier, hubName, planId, timelineId, includeRecords: true, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------
|
||||
// Action download info
|
||||
//-----------------------------------------------------------------
|
||||
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, actions, cancellationToken: cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common
|
||||
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
||||
Task ShutdownAsync();
|
||||
void Start(Pipelines.AgentJobRequestMessage jobRequest);
|
||||
void QueueWebConsoleLine(Guid stepRecordId, string line);
|
||||
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
|
||||
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
|
||||
void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord);
|
||||
}
|
||||
@@ -155,10 +155,10 @@ namespace GitHub.Runner.Common
|
||||
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
|
||||
}
|
||||
|
||||
public void QueueWebConsoleLine(Guid stepRecordId, string line)
|
||||
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
|
||||
{
|
||||
Trace.Verbose("Enqueue web console line queue: {0}", line);
|
||||
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line));
|
||||
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line, lineNumber));
|
||||
}
|
||||
|
||||
public void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource)
|
||||
@@ -214,7 +214,7 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
|
||||
// Group consolelines by timeline record of each step
|
||||
Dictionary<Guid, List<string>> stepsConsoleLines = new Dictionary<Guid, List<string>>();
|
||||
Dictionary<Guid, List<TimelineRecordLogLine>> stepsConsoleLines = new Dictionary<Guid, List<TimelineRecordLogLine>>();
|
||||
List<Guid> stepRecordIds = new List<Guid>(); // We need to keep lines in order
|
||||
int linesCounter = 0;
|
||||
ConsoleLineInfo lineInfo;
|
||||
@@ -222,7 +222,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
if (!stepsConsoleLines.ContainsKey(lineInfo.StepRecordId))
|
||||
{
|
||||
stepsConsoleLines[lineInfo.StepRecordId] = new List<string>();
|
||||
stepsConsoleLines[lineInfo.StepRecordId] = new List<TimelineRecordLogLine>();
|
||||
stepRecordIds.Add(lineInfo.StepRecordId);
|
||||
}
|
||||
|
||||
@@ -232,7 +232,7 @@ namespace GitHub.Runner.Common
|
||||
lineInfo.Line = $"{lineInfo.Line.Substring(0, 1024)}...";
|
||||
}
|
||||
|
||||
stepsConsoleLines[lineInfo.StepRecordId].Add(lineInfo.Line);
|
||||
stepsConsoleLines[lineInfo.StepRecordId].Add(new TimelineRecordLogLine(lineInfo.Line, lineInfo.LineNumber));
|
||||
linesCounter++;
|
||||
|
||||
// process at most about 500 lines of web console line during regular timer dequeue task.
|
||||
@@ -247,13 +247,13 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
// Split consolelines into batch, each batch will container at most 100 lines.
|
||||
int batchCounter = 0;
|
||||
List<List<string>> batchedLines = new List<List<string>>();
|
||||
List<List<TimelineRecordLogLine>> batchedLines = new List<List<TimelineRecordLogLine>>();
|
||||
foreach (var line in stepsConsoleLines[stepRecordId])
|
||||
{
|
||||
var currentBatch = batchedLines.ElementAtOrDefault(batchCounter);
|
||||
if (currentBatch == null)
|
||||
{
|
||||
batchedLines.Add(new List<string>());
|
||||
batchedLines.Add(new List<TimelineRecordLogLine>());
|
||||
currentBatch = batchedLines.ElementAt(batchCounter);
|
||||
}
|
||||
|
||||
@@ -275,7 +275,6 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Trace.Info($"Skip {batchedLines.Count - 2} batches web console lines for last run");
|
||||
batchedLines = batchedLines.TakeLast(2).ToList();
|
||||
batchedLines[0].Insert(0, "...");
|
||||
}
|
||||
|
||||
int errorCount = 0;
|
||||
@@ -284,7 +283,15 @@ namespace GitHub.Runner.Common
|
||||
try
|
||||
{
|
||||
// we will not requeue failed batch, since the web console lines are time sensitive.
|
||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch, default(CancellationToken));
|
||||
if (batch[0].LineNumber.HasValue)
|
||||
{
|
||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber.Value, default(CancellationToken));
|
||||
}
|
||||
else
|
||||
{
|
||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), default(CancellationToken));
|
||||
}
|
||||
|
||||
if (_firstConsoleOutputs)
|
||||
{
|
||||
HostContext.WritePerfCounter($"WorkerJobServerQueueAppendFirstConsoleOutput_{_planId.ToString()}");
|
||||
@@ -653,13 +660,15 @@ namespace GitHub.Runner.Common
|
||||
|
||||
internal class ConsoleLineInfo
|
||||
{
|
||||
public ConsoleLineInfo(Guid recordId, string line)
|
||||
public ConsoleLineInfo(Guid recordId, string line, long? lineNumber)
|
||||
{
|
||||
this.StepRecordId = recordId;
|
||||
this.Line = line;
|
||||
this.LineNumber = lineNumber;
|
||||
}
|
||||
|
||||
public Guid StepRecordId { get; set; }
|
||||
public string Line { get; set; }
|
||||
public long? LineNumber { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ namespace GitHub.Runner.Common
|
||||
|
||||
// job request
|
||||
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
|
||||
Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, CancellationToken cancellationToken);
|
||||
Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, CancellationToken cancellationToken);
|
||||
Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken);
|
||||
|
||||
// agent package
|
||||
@@ -296,10 +296,10 @@ namespace GitHub.Runner.Common
|
||||
// JobRequest
|
||||
//-----------------------------------------------------------------
|
||||
|
||||
public Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, CancellationToken cancellationToken = default(CancellationToken))
|
||||
public Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId = null, CancellationToken cancellationToken = default(CancellationToken))
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.JobRequest);
|
||||
return _requestTaskAgentClient.RenewAgentRequestAsync(poolId, requestId, lockToken, cancellationToken: cancellationToken);
|
||||
return _requestTaskAgentClient.RenewAgentRequestAsync(poolId, requestId, lockToken, orchestrationId: orchestrationId, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken = default(CancellationToken))
|
||||
@@ -334,5 +334,20 @@ namespace GitHub.Runner.Common
|
||||
CheckConnection(RunnerConnectionType.Generic);
|
||||
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState);
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------
|
||||
// Runner Auth Url
|
||||
//-----------------------------------------------------------------
|
||||
public Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.MessageQueue);
|
||||
return _messageTaskAgentClient.GetAgentAuthUrlAsync(runnerPoolId, runnerId);
|
||||
}
|
||||
|
||||
public Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.MessageQueue);
|
||||
return _messageTaskAgentClient.ReportAgentAuthUrlMigrationErrorAsync(runnerPoolId, runnerId, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -102,7 +102,8 @@ namespace GitHub.Runner.Common
|
||||
Console.Write(message);
|
||||
Console.ResetColor();
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
Console.Write(message);
|
||||
}
|
||||
}
|
||||
@@ -126,7 +127,8 @@ namespace GitHub.Runner.Common
|
||||
Console.WriteLine(line);
|
||||
Console.ResetColor();
|
||||
}
|
||||
else {
|
||||
else
|
||||
{
|
||||
Console.WriteLine(line);
|
||||
}
|
||||
}
|
||||
|
||||
51
src/Runner.Common/Util/EncodingUtil.cs
Normal file
51
src/Runner.Common/Util/EncodingUtil.cs
Normal file
@@ -0,0 +1,51 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Common;
|
||||
|
||||
namespace GitHub.Runner.Common.Util
|
||||
{
|
||||
public static class EncodingUtil
|
||||
{
|
||||
public static async Task SetEncoding(IHostContext hostContext, Tracing trace, CancellationToken cancellationToken)
|
||||
{
|
||||
#if OS_WINDOWS
|
||||
try
|
||||
{
|
||||
if (Console.InputEncoding.CodePage != 65001)
|
||||
{
|
||||
using (var p = hostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
// Use UTF8 code page
|
||||
int exitCode = await p.ExecuteAsync(workingDirectory: hostContext.GetDirectory(WellKnownDirectory.Work),
|
||||
fileName: WhichUtil.Which("chcp", true, trace),
|
||||
arguments: "65001",
|
||||
environment: null,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: false,
|
||||
redirectStandardIn: null,
|
||||
inheritConsoleHandler: true,
|
||||
cancellationToken: cancellationToken);
|
||||
if (exitCode == 0)
|
||||
{
|
||||
trace.Info("Successfully returned to code page 65001 (UTF8)");
|
||||
}
|
||||
else
|
||||
{
|
||||
trace.Warning($"'chcp 65001' failed with exit code {exitCode}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
trace.Warning($"'chcp 65001' failed with exception {ex.Message}");
|
||||
}
|
||||
#endif
|
||||
// Dummy variable to prevent compiler error CS1998: "This async method lacks 'await' operators and will run synchronously..."
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
90
src/Runner.Listener/Checks/ActionsCheck.cs
Normal file
90
src/Runner.Listener/Checks/ActionsCheck.cs
Normal file
@@ -0,0 +1,90 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public sealed class ActionsCheck : RunnerService, ICheckExtension
|
||||
{
|
||||
private string _logFile = null;
|
||||
|
||||
public int Order => 2;
|
||||
|
||||
public string CheckName => "GitHub Actions Connection";
|
||||
|
||||
public string CheckDescription => "Make sure the actions runner have access to the GitHub Actions Service.";
|
||||
|
||||
public string CheckLog => _logFile;
|
||||
|
||||
public string HelpLink => "https://github.com/actions/runner/blob/main/docs/checks/actions.md";
|
||||
|
||||
public Type ExtensionType => typeof(ICheckExtension);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_logFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), StringUtil.Format("{0}_{1:yyyyMMdd-HHmmss}-utc.log", nameof(ActionsCheck), DateTime.UtcNow));
|
||||
}
|
||||
|
||||
// runner access to actions service
|
||||
public async Task<bool> RunCheck(string url, string pat)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
|
||||
|
||||
var checkTasks = new List<Task<CheckResult>>();
|
||||
string githubApiUrl = null;
|
||||
string actionsTokenServiceUrl = null;
|
||||
string actionsPipelinesServiceUrl = null;
|
||||
var urlBuilder = new UriBuilder(url);
|
||||
if (UrlUtil.IsHostedServer(urlBuilder))
|
||||
{
|
||||
urlBuilder.Host = $"api.{urlBuilder.Host}";
|
||||
urlBuilder.Path = "";
|
||||
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health";
|
||||
actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health";
|
||||
}
|
||||
else
|
||||
{
|
||||
urlBuilder.Path = "api/v3";
|
||||
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
urlBuilder.Path = "_services/vstoken/_apis/health";
|
||||
actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
urlBuilder.Path = "_services/pipelines/_apis/health";
|
||||
actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
}
|
||||
|
||||
// check github api
|
||||
checkTasks.Add(CheckUtil.CheckDns(githubApiUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(githubApiUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||
|
||||
// check actions token service
|
||||
checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsRequests(actionsTokenServiceUrl, pat, expectedHeader: "x-vss-e2eid"));
|
||||
|
||||
// check actions pipelines service
|
||||
checkTasks.Add(CheckUtil.CheckDns(actionsPipelinesServiceUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(actionsPipelinesServiceUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsRequests(actionsPipelinesServiceUrl, pat, expectedHeader: "x-vss-e2eid"));
|
||||
|
||||
var result = true;
|
||||
while (checkTasks.Count > 0)
|
||||
{
|
||||
var finishedCheckTask = await Task.WhenAny<CheckResult>(checkTasks);
|
||||
var finishedCheck = await finishedCheckTask;
|
||||
result = result && finishedCheck.Pass;
|
||||
await File.AppendAllLinesAsync(_logFile, finishedCheck.Logs);
|
||||
checkTasks.Remove(finishedCheckTask);
|
||||
}
|
||||
|
||||
await Task.WhenAll(checkTasks);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
351
src/Runner.Listener/Checks/CheckUtil.cs
Normal file
351
src/Runner.Listener/Checks/CheckUtil.cs
Normal file
@@ -0,0 +1,351 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics.Tracing;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Net.NetworkInformation;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public static class CheckUtil
|
||||
{
|
||||
public static List<string> WarnLog(this IHostContext hostContext)
|
||||
{
|
||||
var logs = new List<string>();
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** !!! WARNING !!! ");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** DO NOT share the log in public place! The log may contains secrets in plain text. ");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** !!! WARNING !!! ");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
return logs;
|
||||
}
|
||||
|
||||
public static List<string> CheckProxy(this IHostContext hostContext)
|
||||
{
|
||||
var logs = new List<string>();
|
||||
if (!string.IsNullOrEmpty(hostContext.WebProxy.HttpProxyAddress) ||
|
||||
!string.IsNullOrEmpty(hostContext.WebProxy.HttpsProxyAddress))
|
||||
{
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** Runner is behind web proxy {hostContext.WebProxy.HttpsProxyAddress ?? hostContext.WebProxy.HttpProxyAddress} ");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return logs;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> CheckDns(string targetUrl)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
var url = new Uri(targetUrl);
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Try DNS lookup for {url.Host} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
IPHostEntry host = await Dns.GetHostEntryAsync(url.Host);
|
||||
foreach (var address in host.AddressList)
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Resolved DNS for {url.Host} to '{address}'");
|
||||
}
|
||||
|
||||
result.Pass = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Resolved DNS for {url.Host} failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> CheckPing(string targetUrl)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
var url = new Uri(targetUrl);
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Try ping {url.Host} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
using (var ping = new Ping())
|
||||
{
|
||||
var reply = await ping.SendPingAsync(url.Host);
|
||||
if (reply.Status == IPStatus.Success)
|
||||
{
|
||||
result.Pass = true;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Ping {url.Host} ({reply.Address}) succeed within to '{reply.RoundtripTime} ms'");
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Ping {url.Host} ({reply.Address}) failed with '{reply.Status}'");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Ping api.github.com failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> CheckHttpsRequests(this IHostContext hostContext, string url, string pat, string expectedHeader)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Send HTTPS Request to {url} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
using (var _ = new HttpEventSourceListener(result.Logs))
|
||||
using (var httpClientHandler = hostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(hostContext.UserAgents);
|
||||
if (!string.IsNullOrEmpty(pat))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("token", pat);
|
||||
}
|
||||
|
||||
var response = await httpClient.GetAsync(url);
|
||||
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http status code: {response.StatusCode}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http response headers: {response.Headers}");
|
||||
|
||||
var responseContent = await response.Content.ReadAsStringAsync();
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http response body: {responseContent}");
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
if (response.Headers.Contains(expectedHeader))
|
||||
{
|
||||
result.Pass = true;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'GET' to {url} succeed");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'GET' to {url} succeed but doesn't have expected HTTP Header.");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'GET' to {url} failed with {response.StatusCode}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Https request 'GET' to {url} failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> DownloadExtraCA(this IHostContext hostContext, string url, string pat)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Download SSL Certificate from {url} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
|
||||
var uri = new Uri(url);
|
||||
var env = new Dictionary<string, string>()
|
||||
{
|
||||
{ "HOSTNAME", uri.Host },
|
||||
{ "PORT", uri.IsDefaultPort ? (uri.Scheme.ToLowerInvariant() == "https" ? "443" : "80") : uri.Port.ToString() },
|
||||
{ "PATH", uri.AbsolutePath },
|
||||
{ "PAT", pat }
|
||||
};
|
||||
|
||||
var proxy = hostContext.WebProxy.GetProxy(uri);
|
||||
if (proxy != null)
|
||||
{
|
||||
env["PROXYHOST"] = proxy.Host;
|
||||
env["PROXYPORT"] = proxy.IsDefaultPort ? (proxy.Scheme.ToLowerInvariant() == "https" ? "443" : "80") : proxy.Port.ToString();
|
||||
if (hostContext.WebProxy.HttpProxyUsername != null ||
|
||||
hostContext.WebProxy.HttpsProxyUsername != null)
|
||||
{
|
||||
env["PROXYUSERNAME"] = hostContext.WebProxy.HttpProxyUsername ?? hostContext.WebProxy.HttpsProxyUsername;
|
||||
env["PROXYPASSWORD"] = hostContext.WebProxy.HttpProxyPassword ?? hostContext.WebProxy.HttpsProxyPassword;
|
||||
}
|
||||
else
|
||||
{
|
||||
env["PROXYUSERNAME"] = "";
|
||||
env["PROXYPASSWORD"] = "";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
env["PROXYHOST"] = "";
|
||||
env["PROXYPORT"] = "";
|
||||
env["PROXYUSERNAME"] = "";
|
||||
env["PROXYPASSWORD"] = "";
|
||||
}
|
||||
|
||||
using (var processInvoker = hostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} [STDOUT] {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} [STDERR] {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
var downloadCertScript = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "downloadCert");
|
||||
var node12 = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{downloadCertScript}\"' ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
|
||||
await processInvoker.ExecuteAsync(
|
||||
hostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
node12,
|
||||
$"\"{downloadCertScript}\"",
|
||||
env,
|
||||
true,
|
||||
CancellationToken.None);
|
||||
}
|
||||
|
||||
result.Pass = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Download SSL Certificate from '{url}' failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// EventSource listener for dotnet debug trace for HTTP and SSL
|
||||
public sealed class HttpEventSourceListener : EventListener
|
||||
{
|
||||
private readonly List<string> _logs;
|
||||
private readonly object _lock = new object();
|
||||
private readonly Dictionary<string, HashSet<string>> _ignoredEvent = new Dictionary<string, HashSet<string>>
|
||||
{
|
||||
{
|
||||
"Private.InternalDiagnostics.System.Net.Http",
|
||||
new HashSet<string>
|
||||
{
|
||||
"Info",
|
||||
"Associate"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Private.InternalDiagnostics.System.Net.Security",
|
||||
new HashSet<string>
|
||||
{
|
||||
"Info",
|
||||
"SslStreamCtor",
|
||||
"SecureChannelCtor",
|
||||
"NoDelegateNoClientCert",
|
||||
"CertsAfterFiltering",
|
||||
"UsingCachedCredential",
|
||||
"SspiSelectedCipherSuite"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public HttpEventSourceListener(List<string> logs)
|
||||
{
|
||||
_logs = logs;
|
||||
if (Environment.GetEnvironmentVariable("ACTIONS_RUNNER_TRACE_ALL_HTTP_EVENT") == "1")
|
||||
{
|
||||
_ignoredEvent.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
protected override void OnEventSourceCreated(EventSource eventSource)
|
||||
{
|
||||
base.OnEventSourceCreated(eventSource);
|
||||
|
||||
if (eventSource.Name == "Private.InternalDiagnostics.System.Net.Http" ||
|
||||
eventSource.Name == "Private.InternalDiagnostics.System.Net.Security")
|
||||
{
|
||||
EnableEvents(eventSource, EventLevel.Verbose, EventKeywords.All);
|
||||
}
|
||||
}
|
||||
|
||||
protected override void OnEventWritten(EventWrittenEventArgs eventData)
|
||||
{
|
||||
base.OnEventWritten(eventData);
|
||||
lock (_lock)
|
||||
{
|
||||
if (_ignoredEvent.TryGetValue(eventData.EventSource.Name, out var ignored) &&
|
||||
ignored.Contains(eventData.EventName))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_logs.Add($"{DateTime.UtcNow.ToString("O")} [START {eventData.EventSource.Name} - {eventData.EventName}]");
|
||||
_logs.AddRange(eventData.Payload.Select(x => string.Join(Environment.NewLine, x.ToString().Split(Environment.NewLine).Select(y => $"{DateTime.UtcNow.ToString("O")} {y}"))));
|
||||
_logs.Add($"{DateTime.UtcNow.ToString("O")} [END {eventData.EventSource.Name} - {eventData.EventName}]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
171
src/Runner.Listener/Checks/GitCheck.cs
Normal file
171
src/Runner.Listener/Checks/GitCheck.cs
Normal file
@@ -0,0 +1,171 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public sealed class GitCheck : RunnerService, ICheckExtension
|
||||
{
|
||||
private string _logFile = null;
|
||||
private string _gitPath = null;
|
||||
|
||||
public int Order => 3;
|
||||
|
||||
public string CheckName => "Git Certificate/Proxy Validation";
|
||||
|
||||
public string CheckDescription => "Make sure the git cli can access to GitHub.com or the GitHub Enterprise Server.";
|
||||
|
||||
public string CheckLog => _logFile;
|
||||
|
||||
public string HelpLink => "https://github.com/actions/runner/blob/main/docs/checks/git.md";
|
||||
|
||||
public Type ExtensionType => typeof(ICheckExtension);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_logFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), StringUtil.Format("{0}_{1:yyyyMMdd-HHmmss}-utc.log", nameof(GitCheck), DateTime.UtcNow));
|
||||
_gitPath = WhichUtil.Which("git");
|
||||
}
|
||||
|
||||
// git access to ghes/gh
|
||||
public async Task<bool> RunCheck(string url, string pat)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
|
||||
|
||||
if (string.IsNullOrEmpty(_gitPath))
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, new[] { $"{DateTime.UtcNow.ToString("O")} Can't verify git with GitHub.com or GitHub Enterprise Server since git is not installed." });
|
||||
return false;
|
||||
}
|
||||
|
||||
var checkGit = await CheckGit(url, pat);
|
||||
var result = checkGit.Pass;
|
||||
await File.AppendAllLinesAsync(_logFile, checkGit.Logs);
|
||||
|
||||
// try fix SSL error by providing extra CA certificate.
|
||||
if (checkGit.SslError)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, new[] { $"{DateTime.UtcNow.ToString("O")} Try fix SSL error by providing extra CA certificate." });
|
||||
var downloadCert = await HostContext.DownloadExtraCA(url, pat);
|
||||
await File.AppendAllLinesAsync(_logFile, downloadCert.Logs);
|
||||
|
||||
if (downloadCert.Pass)
|
||||
{
|
||||
var recheckGit = await CheckGit(url, pat, extraCA: true);
|
||||
await File.AppendAllLinesAsync(_logFile, recheckGit.Logs);
|
||||
if (recheckGit.Pass)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, new[] { $"{DateTime.UtcNow.ToString("O")} Fixed SSL error by providing extra CA certs." });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task<CheckResult> CheckGit(string url, string pat, bool extraCA = false)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Validate server cert and proxy configuration with Git ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
var repoUrlBuilder = new UriBuilder(url);
|
||||
repoUrlBuilder.Path = "actions/checkout";
|
||||
repoUrlBuilder.UserName = "gh";
|
||||
repoUrlBuilder.Password = pat;
|
||||
|
||||
var gitProxy = "";
|
||||
var proxy = HostContext.WebProxy.GetProxy(repoUrlBuilder.Uri);
|
||||
if (proxy != null)
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Runner is behind http proxy '{proxy.AbsoluteUri}'");
|
||||
if (HostContext.WebProxy.HttpProxyUsername != null ||
|
||||
HostContext.WebProxy.HttpsProxyUsername != null)
|
||||
{
|
||||
var proxyUrlWithCred = UrlUtil.GetCredentialEmbeddedUrl(
|
||||
proxy,
|
||||
HostContext.WebProxy.HttpProxyUsername ?? HostContext.WebProxy.HttpsProxyUsername,
|
||||
HostContext.WebProxy.HttpProxyPassword ?? HostContext.WebProxy.HttpsProxyPassword);
|
||||
gitProxy = $"-c http.proxy={proxyUrlWithCred}";
|
||||
}
|
||||
else
|
||||
{
|
||||
gitProxy = $"-c http.proxy={proxy.AbsoluteUri}";
|
||||
}
|
||||
}
|
||||
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
var gitArgs = $"{gitProxy} ls-remote --exit-code {repoUrlBuilder.Uri.AbsoluteUri} HEAD";
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run 'git {gitArgs}' ");
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
{ "GIT_TRACE", "1" },
|
||||
{ "GIT_CURL_VERBOSE", "1" }
|
||||
};
|
||||
|
||||
if (extraCA)
|
||||
{
|
||||
env["GIT_SSL_CAINFO"] = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), "download_ca_cert.pem");
|
||||
}
|
||||
|
||||
await processInvoker.ExecuteAsync(
|
||||
HostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
_gitPath,
|
||||
gitArgs,
|
||||
env,
|
||||
true,
|
||||
CancellationToken.None);
|
||||
}
|
||||
|
||||
result.Pass = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** git ls-remote failed with error: {ex}");
|
||||
if (result.Logs.Any(x => x.Contains("SSL Certificate problem", StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** git ls-remote failed due to SSL cert issue.");
|
||||
result.SslError = true;
|
||||
}
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
30
src/Runner.Listener/Checks/ICheckExtension.cs
Normal file
30
src/Runner.Listener/Checks/ICheckExtension.cs
Normal file
@@ -0,0 +1,30 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public interface ICheckExtension : IExtension
|
||||
{
|
||||
int Order { get; }
|
||||
string CheckName { get; }
|
||||
string CheckDescription { get; }
|
||||
string CheckLog { get; }
|
||||
string HelpLink { get; }
|
||||
Task<bool> RunCheck(string url, string pat);
|
||||
}
|
||||
|
||||
public class CheckResult
|
||||
{
|
||||
public CheckResult()
|
||||
{
|
||||
Logs = new List<string>();
|
||||
}
|
||||
|
||||
public bool Pass { get; set; }
|
||||
|
||||
public bool SslError { get; set; }
|
||||
|
||||
public List<string> Logs { get; set; }
|
||||
}
|
||||
}
|
||||
59
src/Runner.Listener/Checks/InternetCheck.cs
Normal file
59
src/Runner.Listener/Checks/InternetCheck.cs
Normal file
@@ -0,0 +1,59 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public sealed class InternetCheck : RunnerService, ICheckExtension
|
||||
{
|
||||
private string _logFile = null;
|
||||
|
||||
public int Order => 1;
|
||||
|
||||
public string CheckName => "Internet Connection";
|
||||
|
||||
public string CheckDescription => "Make sure the actions runner have access to public internet.";
|
||||
|
||||
public string CheckLog => _logFile;
|
||||
|
||||
public string HelpLink => "https://github.com/actions/runner/blob/main/docs/checks/internet.md";
|
||||
|
||||
public Type ExtensionType => typeof(ICheckExtension);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_logFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), StringUtil.Format("{0}_{1:yyyyMMdd-HHmmss}-utc.log", nameof(InternetCheck), DateTime.UtcNow));
|
||||
}
|
||||
|
||||
// check runner access to api.github.com
|
||||
public async Task<bool> RunCheck(string url, string pat)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
|
||||
|
||||
var checkTasks = new List<Task<CheckResult>>();
|
||||
checkTasks.Add(CheckUtil.CheckDns("https://api.github.com"));
|
||||
checkTasks.Add(CheckUtil.CheckPing("https://api.github.com"));
|
||||
|
||||
// We don't need to pass a PAT since it might be a token for GHES.
|
||||
checkTasks.Add(HostContext.CheckHttpsRequests("https://api.github.com", pat: null, expectedHeader: "X-GitHub-Request-Id"));
|
||||
|
||||
var result = true;
|
||||
while (checkTasks.Count > 0)
|
||||
{
|
||||
var finishedCheckTask = await Task.WhenAny<CheckResult>(checkTasks);
|
||||
var finishedCheck = await finishedCheckTask;
|
||||
result = result && finishedCheck.Pass;
|
||||
await File.AppendAllLinesAsync(_logFile, finishedCheck.Logs);
|
||||
checkTasks.Remove(finishedCheckTask);
|
||||
}
|
||||
|
||||
await Task.WhenAll(checkTasks);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
181
src/Runner.Listener/Checks/NodeJsCheck.cs
Normal file
181
src/Runner.Listener/Checks/NodeJsCheck.cs
Normal file
@@ -0,0 +1,181 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public sealed class NodeJsCheck : RunnerService, ICheckExtension
|
||||
{
|
||||
private string _logFile = null;
|
||||
|
||||
public int Order => 4;
|
||||
|
||||
public string CheckName => "Node.js Certificate/Proxy Validation";
|
||||
|
||||
public string CheckDescription => "Make sure the node.js have access to GitHub.com or the GitHub Enterprise Server.";
|
||||
|
||||
public string CheckLog => _logFile;
|
||||
|
||||
public string HelpLink => "https://github.com/actions/runner/blob/main/docs/checks/nodejs.md";
|
||||
|
||||
public Type ExtensionType => typeof(ICheckExtension);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_logFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), StringUtil.Format("{0}_{1:yyyyMMdd-HHmmss}-utc.log", nameof(NodeJsCheck), DateTime.UtcNow));
|
||||
}
|
||||
|
||||
// node access to ghes/gh
|
||||
public async Task<bool> RunCheck(string url, string pat)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
|
||||
|
||||
// Request to github.com or ghes server
|
||||
var urlBuilder = new UriBuilder(url);
|
||||
if (UrlUtil.IsHostedServer(urlBuilder))
|
||||
{
|
||||
urlBuilder.Host = $"api.{urlBuilder.Host}";
|
||||
urlBuilder.Path = "";
|
||||
}
|
||||
else
|
||||
{
|
||||
urlBuilder.Path = "api/v3";
|
||||
}
|
||||
|
||||
var checkNode = await CheckNodeJs(urlBuilder.Uri.AbsoluteUri, pat);
|
||||
var result = checkNode.Pass;
|
||||
await File.AppendAllLinesAsync(_logFile, checkNode.Logs);
|
||||
|
||||
// try fix SSL error by providing extra CA certificate.
|
||||
if (checkNode.SslError)
|
||||
{
|
||||
var downloadCert = await HostContext.DownloadExtraCA(urlBuilder.Uri.AbsoluteUri, pat);
|
||||
await File.AppendAllLinesAsync(_logFile, downloadCert.Logs);
|
||||
|
||||
if (downloadCert.Pass)
|
||||
{
|
||||
var recheckNode = await CheckNodeJs(urlBuilder.Uri.AbsoluteUri, pat, extraCA: true);
|
||||
await File.AppendAllLinesAsync(_logFile, recheckNode.Logs);
|
||||
if (recheckNode.Pass)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, new[] { $"{DateTime.UtcNow.ToString("O")} Fixed SSL error by providing extra CA certs." });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task<CheckResult> CheckNodeJs(string url, string pat, bool extraCA = false)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Make Http request to {url} using node.js ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
|
||||
// Request to github.com or ghes server
|
||||
Uri requestUrl = new Uri(url);
|
||||
var env = new Dictionary<string, string>()
|
||||
{
|
||||
{ "HOSTNAME", requestUrl.Host },
|
||||
{ "PORT", requestUrl.IsDefaultPort ? (requestUrl.Scheme.ToLowerInvariant() == "https" ? "443" : "80") : requestUrl.Port.ToString() },
|
||||
{ "PATH", requestUrl.AbsolutePath },
|
||||
{ "PAT", pat }
|
||||
};
|
||||
|
||||
var proxy = HostContext.WebProxy.GetProxy(requestUrl);
|
||||
if (proxy != null)
|
||||
{
|
||||
env["PROXYHOST"] = proxy.Host;
|
||||
env["PROXYPORT"] = proxy.IsDefaultPort ? (proxy.Scheme.ToLowerInvariant() == "https" ? "443" : "80") : proxy.Port.ToString();
|
||||
if (HostContext.WebProxy.HttpProxyUsername != null ||
|
||||
HostContext.WebProxy.HttpsProxyUsername != null)
|
||||
{
|
||||
env["PROXYUSERNAME"] = HostContext.WebProxy.HttpProxyUsername ?? HostContext.WebProxy.HttpsProxyUsername;
|
||||
env["PROXYPASSWORD"] = HostContext.WebProxy.HttpProxyPassword ?? HostContext.WebProxy.HttpsProxyPassword;
|
||||
}
|
||||
else
|
||||
{
|
||||
env["PROXYUSERNAME"] = "";
|
||||
env["PROXYPASSWORD"] = "";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
env["PROXYHOST"] = "";
|
||||
env["PROXYPORT"] = "";
|
||||
env["PROXYUSERNAME"] = "";
|
||||
env["PROXYPASSWORD"] = "";
|
||||
}
|
||||
|
||||
if (extraCA)
|
||||
{
|
||||
env["NODE_EXTRA_CA_CERTS"] = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), "download_ca_cert.pem");
|
||||
}
|
||||
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} [STDOUT] {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} [STDERR] {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
var makeWebRequestScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "makeWebRequest.js");
|
||||
var node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{makeWebRequestScript}\"' ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
|
||||
await processInvoker.ExecuteAsync(
|
||||
HostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
node12,
|
||||
$"\"{makeWebRequestScript}\"",
|
||||
env,
|
||||
true,
|
||||
CancellationToken.None);
|
||||
}
|
||||
|
||||
result.Pass = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Make https request to {url} using node.js failed with error: {ex}");
|
||||
if (result.Logs.Any(x => x.Contains("UNABLE_TO_VERIFY_LEAF_SIGNATURE") ||
|
||||
x.Contains("UNABLE_TO_GET_ISSUER_CERT_LOCALLY") ||
|
||||
x.Contains("SELF_SIGNED_CERT_IN_CHAIN")))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Https request failed due to SSL cert issue.");
|
||||
result.SslError = true;
|
||||
}
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -27,6 +27,7 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
private readonly string[] validFlags =
|
||||
{
|
||||
Constants.Runner.CommandLine.Flags.Check,
|
||||
Constants.Runner.CommandLine.Flags.Commit,
|
||||
Constants.Runner.CommandLine.Flags.Help,
|
||||
Constants.Runner.CommandLine.Flags.Replace,
|
||||
@@ -39,9 +40,11 @@ namespace GitHub.Runner.Listener
|
||||
private readonly string[] validArgs =
|
||||
{
|
||||
Constants.Runner.CommandLine.Args.Auth,
|
||||
Constants.Runner.CommandLine.Args.Labels,
|
||||
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
|
||||
Constants.Runner.CommandLine.Args.Name,
|
||||
Constants.Runner.CommandLine.Args.Pool,
|
||||
Constants.Runner.CommandLine.Args.PAT,
|
||||
Constants.Runner.CommandLine.Args.RunnerGroup,
|
||||
Constants.Runner.CommandLine.Args.StartupType,
|
||||
Constants.Runner.CommandLine.Args.Token,
|
||||
Constants.Runner.CommandLine.Args.Url,
|
||||
@@ -58,6 +61,7 @@ namespace GitHub.Runner.Listener
|
||||
public bool Warmup => TestCommand(Constants.Runner.CommandLine.Commands.Warmup);
|
||||
|
||||
// Flags.
|
||||
public bool Check => TestFlag(Constants.Runner.CommandLine.Flags.Check);
|
||||
public bool Commit => TestFlag(Constants.Runner.CommandLine.Flags.Commit);
|
||||
public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help);
|
||||
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
|
||||
@@ -168,6 +172,15 @@ namespace GitHub.Runner.Listener
|
||||
validator: Validators.NonEmptyValidator);
|
||||
}
|
||||
|
||||
public string GetRunnerGroupName(string defaultPoolName = null)
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
name: Constants.Runner.CommandLine.Args.RunnerGroup,
|
||||
description: "Enter the name of the runner group to add this runner to:",
|
||||
defaultValue: defaultPoolName ?? "default",
|
||||
validator: Validators.NonEmptyValidator);
|
||||
}
|
||||
|
||||
public string GetToken()
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
@@ -177,6 +190,22 @@ namespace GitHub.Runner.Listener
|
||||
validator: Validators.NonEmptyValidator);
|
||||
}
|
||||
|
||||
public string GetGitHubPersonalAccessToken(bool required = false)
|
||||
{
|
||||
if (required)
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
name: Constants.Runner.CommandLine.Args.PAT,
|
||||
description: "What is your GitHub personal access token?",
|
||||
defaultValue: string.Empty,
|
||||
validator: Validators.NonEmptyValidator);
|
||||
}
|
||||
else
|
||||
{
|
||||
return GetArg(name: Constants.Runner.CommandLine.Args.PAT);
|
||||
}
|
||||
}
|
||||
|
||||
public string GetRunnerRegisterToken()
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
@@ -249,6 +278,24 @@ namespace GitHub.Runner.Listener
|
||||
return GetArg(Constants.Runner.CommandLine.Args.StartupType);
|
||||
}
|
||||
|
||||
public ISet<string> GetLabels()
|
||||
{
|
||||
var labelSet = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
string labels = GetArgOrPrompt(
|
||||
name: Constants.Runner.CommandLine.Args.Labels,
|
||||
description: $"This runner will have the following labels: 'self-hosted', '{VarUtil.OS}', '{VarUtil.OSArchitecture}' \nEnter any additional labels (ex. label-1,label-2):",
|
||||
defaultValue: string.Empty,
|
||||
validator: Validators.LabelsValidator,
|
||||
isOptional: true);
|
||||
|
||||
if (!string.IsNullOrEmpty(labels))
|
||||
{
|
||||
labelSet = labels.Split(',').Where(x => !string.IsNullOrEmpty(x)).ToHashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
return labelSet;
|
||||
}
|
||||
|
||||
//
|
||||
// Private helpers.
|
||||
//
|
||||
@@ -280,7 +327,8 @@ namespace GitHub.Runner.Listener
|
||||
string name,
|
||||
string description,
|
||||
string defaultValue,
|
||||
Func<string, bool> validator)
|
||||
Func<string, bool> validator,
|
||||
bool isOptional = false)
|
||||
{
|
||||
// Check for the arg in the command line parser.
|
||||
ArgUtil.NotNull(validator, nameof(validator));
|
||||
@@ -311,7 +359,8 @@ namespace GitHub.Runner.Listener
|
||||
secret: Constants.Runner.CommandLine.Args.Secrets.Any(x => string.Equals(x, name, StringComparison.OrdinalIgnoreCase)),
|
||||
defaultValue: defaultValue,
|
||||
validator: validator,
|
||||
unattended: Unattended);
|
||||
unattended: Unattended,
|
||||
isOptional: isOptional);
|
||||
}
|
||||
|
||||
private string GetEnvArg(string name)
|
||||
|
||||
@@ -1,19 +1,18 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.OAuth;
|
||||
using GitHub.Services.WebApi;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Threading.Tasks;
|
||||
using System.Runtime.InteropServices;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
@@ -87,17 +86,17 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
RunnerSettings runnerSettings = new RunnerSettings();
|
||||
|
||||
bool isHostedServer = false;
|
||||
// Loop getting url and creds until you can connect
|
||||
ICredentialProvider credProvider = null;
|
||||
VssCredentials creds = null;
|
||||
_term.WriteSection("Authentication");
|
||||
while (true)
|
||||
{
|
||||
// Get the URL
|
||||
// When testing against a dev deployment of Actions Service, set this environment variable
|
||||
var useDevActionsServiceUrl = Environment.GetEnvironmentVariable("USE_DEV_ACTIONS_SERVICE_URL");
|
||||
var inputUrl = command.GetUrl();
|
||||
if (!inputUrl.Contains("github.com", StringComparison.OrdinalIgnoreCase) &&
|
||||
!inputUrl.Contains("github.localhost", StringComparison.OrdinalIgnoreCase))
|
||||
if (inputUrl.Contains("codedev.ms", StringComparison.OrdinalIgnoreCase)
|
||||
|| useDevActionsServiceUrl != null)
|
||||
{
|
||||
runnerSettings.ServerUrl = inputUrl;
|
||||
// Get the credentials
|
||||
@@ -108,8 +107,8 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
else
|
||||
{
|
||||
runnerSettings.GitHubUrl = inputUrl;
|
||||
var githubToken = command.GetRunnerRegisterToken();
|
||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken);
|
||||
var registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
|
||||
runnerSettings.ServerUrl = authResult.TenantUrl;
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
@@ -118,7 +117,20 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
try
|
||||
{
|
||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
||||
isHostedServer = await IsHostedServer(runnerSettings.ServerUrl, creds);
|
||||
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || UrlUtil.IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
|
||||
|
||||
// Warn if the Actions server url and GHES server url has different Host
|
||||
if (!runnerSettings.IsHostedServer)
|
||||
{
|
||||
// Example actionsServerUrl is https://my-ghes/_services/pipelines/[...]
|
||||
// Example githubServerUrl is https://my-ghes
|
||||
var actionsServerUrl = new Uri(runnerSettings.ServerUrl);
|
||||
var githubServerUrl = new Uri(runnerSettings.GitHubUrl);
|
||||
if (!string.Equals(actionsServerUrl.Authority, githubServerUrl.Authority, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new InvalidOperationException($"GitHub Actions is not properly configured in GHES. GHES url: {runnerSettings.GitHubUrl}, Actions url: {runnerSettings.ServerUrl}.");
|
||||
}
|
||||
}
|
||||
|
||||
// Validate can connect.
|
||||
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), creds);
|
||||
@@ -147,17 +159,34 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
_term.WriteSection("Runner Registration");
|
||||
|
||||
//Get all the agent pools, and select the first private pool
|
||||
// If we have more than one runner group available, allow the user to specify which one to be added into
|
||||
string poolName = null;
|
||||
TaskAgentPool agentPool = null;
|
||||
List<TaskAgentPool> agentPools = await _runnerServer.GetAgentPoolsAsync();
|
||||
TaskAgentPool agentPool = agentPools?.Where(x => x.IsHosted == false).FirstOrDefault();
|
||||
TaskAgentPool defaultPool = agentPools?.Where(x => x.IsInternal).FirstOrDefault();
|
||||
|
||||
if (agentPool == null)
|
||||
if (agentPools?.Where(x => !x.IsHosted).Count() > 1)
|
||||
{
|
||||
throw new TaskAgentPoolNotFoundException($"Could not find any private pool. Contact support.");
|
||||
poolName = command.GetRunnerGroupName(defaultPool?.Name);
|
||||
_term.WriteLine();
|
||||
agentPool = agentPools.Where(x => string.Equals(poolName, x.Name, StringComparison.OrdinalIgnoreCase) && !x.IsHosted).FirstOrDefault();
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info("Found a private pool with id {1} and name {2}", agentPool.Id, agentPool.Name);
|
||||
agentPool = defaultPool;
|
||||
}
|
||||
|
||||
if (agentPool == null && poolName == null)
|
||||
{
|
||||
throw new TaskAgentPoolNotFoundException($"Could not find any self-hosted runner groups. Contact support.");
|
||||
}
|
||||
else if (agentPool == null && poolName != null)
|
||||
{
|
||||
throw new TaskAgentPoolNotFoundException($"Could not find any self-hosted runner group named \"{poolName}\".");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info("Found a self-hosted runner group with id {1} and name {2}", agentPool.Id, agentPool.Name);
|
||||
runnerSettings.PoolId = agentPool.Id;
|
||||
runnerSettings.PoolName = agentPool.Name;
|
||||
}
|
||||
@@ -169,6 +198,9 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
_term.WriteLine();
|
||||
|
||||
var userLabels = command.GetLabels();
|
||||
_term.WriteLine();
|
||||
|
||||
var agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
|
||||
Trace.Verbose("Returns {0} agents", agents.Count);
|
||||
agent = agents.FirstOrDefault();
|
||||
@@ -178,7 +210,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
if (command.GetReplace())
|
||||
{
|
||||
// Update existing agent with new PublicKey, agent version.
|
||||
agent = UpdateExistingAgent(agent, publicKey);
|
||||
agent = UpdateExistingAgent(agent, publicKey, userLabels);
|
||||
|
||||
try
|
||||
{
|
||||
@@ -195,13 +227,13 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
else if (command.Unattended)
|
||||
{
|
||||
// if not replace and it is unattended config.
|
||||
throw new TaskAgentExistsException($"Pool {runnerSettings.PoolId} already contains a runner with name {runnerSettings.AgentName}.");
|
||||
throw new TaskAgentExistsException($"A runner exists with the same name {runnerSettings.AgentName}.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Create a new agent.
|
||||
agent = CreateNewAgent(runnerSettings.AgentName, publicKey);
|
||||
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels);
|
||||
|
||||
try
|
||||
{
|
||||
@@ -219,44 +251,11 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
// Add Agent Id to settings
|
||||
runnerSettings.AgentId = agent.Id;
|
||||
|
||||
// respect the serverUrl resolve by server.
|
||||
// in case of agent configured using collection url instead of account url.
|
||||
string agentServerUrl;
|
||||
if (agent.Properties.TryGetValidatedValue<string>("ServerUrl", out agentServerUrl) &&
|
||||
!string.IsNullOrEmpty(agentServerUrl))
|
||||
{
|
||||
Trace.Info($"Agent server url resolve by server: '{agentServerUrl}'.");
|
||||
|
||||
// we need make sure the Schema/Host/Port component of the url remain the same.
|
||||
UriBuilder inputServerUrl = new UriBuilder(runnerSettings.ServerUrl);
|
||||
UriBuilder serverReturnedServerUrl = new UriBuilder(agentServerUrl);
|
||||
if (Uri.Compare(inputServerUrl.Uri, serverReturnedServerUrl.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0)
|
||||
{
|
||||
inputServerUrl.Path = serverReturnedServerUrl.Path;
|
||||
Trace.Info($"Replace server returned url's scheme://host:port component with user input server url's scheme://host:port: '{inputServerUrl.Uri.AbsoluteUri}'.");
|
||||
runnerSettings.ServerUrl = inputServerUrl.Uri.AbsoluteUri;
|
||||
}
|
||||
else
|
||||
{
|
||||
runnerSettings.ServerUrl = agentServerUrl;
|
||||
}
|
||||
}
|
||||
|
||||
// See if the server supports our OAuth key exchange for credentials
|
||||
if (agent.Authorization != null &&
|
||||
agent.Authorization.ClientId != Guid.Empty &&
|
||||
agent.Authorization.AuthorizationUrl != null)
|
||||
{
|
||||
UriBuilder configServerUrl = new UriBuilder(runnerSettings.ServerUrl);
|
||||
UriBuilder oauthEndpointUrlBuilder = new UriBuilder(agent.Authorization.AuthorizationUrl);
|
||||
if (!isHostedServer && Uri.Compare(configServerUrl.Uri, oauthEndpointUrlBuilder.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0)
|
||||
{
|
||||
oauthEndpointUrlBuilder.Scheme = configServerUrl.Scheme;
|
||||
oauthEndpointUrlBuilder.Host = configServerUrl.Host;
|
||||
oauthEndpointUrlBuilder.Port = configServerUrl.Port;
|
||||
Trace.Info($"Set oauth endpoint url's scheme://host:port component to match runner configure url's scheme://host:port: '{oauthEndpointUrlBuilder.Uri.AbsoluteUri}'.");
|
||||
}
|
||||
|
||||
var credentialData = new CredentialData
|
||||
{
|
||||
Scheme = Constants.Configuration.OAuth,
|
||||
@@ -264,7 +263,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
{ "clientId", agent.Authorization.ClientId.ToString("D") },
|
||||
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
|
||||
{ "oauthEndpointUrl", oauthEndpointUrlBuilder.Uri.AbsoluteUri },
|
||||
{ "requireFipsCryptography", agent.Properties.GetValue("RequireFipsCryptography", false).ToString() }
|
||||
},
|
||||
};
|
||||
|
||||
@@ -375,14 +374,13 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
else
|
||||
{
|
||||
var githubToken = command.GetRunnerDeletionToken();
|
||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken);
|
||||
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove);
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
}
|
||||
|
||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
||||
bool isHostedServer = await IsHostedServer(settings.ServerUrl, creds);
|
||||
await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
|
||||
|
||||
var agents = await _runnerServer.GetAgentsAsync(settings.PoolId, settings.AgentName);
|
||||
@@ -460,7 +458,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
|
||||
|
||||
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey)
|
||||
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels)
|
||||
{
|
||||
ArgUtil.NotNull(agent, nameof(agent));
|
||||
agent.Authorization = new TaskAgentAuthorization
|
||||
@@ -468,18 +466,25 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
PublicKey = new TaskAgentPublicKey(publicKey.Exponent, publicKey.Modulus),
|
||||
};
|
||||
|
||||
// update - update instead of delete so we don't lose labels etc...
|
||||
// update should replace the existing labels
|
||||
agent.Version = BuildConstants.RunnerPackage.Version;
|
||||
agent.OSDescription = RuntimeInformation.OSDescription;
|
||||
|
||||
agent.Labels.Add("self-hosted");
|
||||
agent.Labels.Add(VarUtil.OS);
|
||||
agent.Labels.Add(VarUtil.OSArchitecture);
|
||||
agent.Labels.Clear();
|
||||
|
||||
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
|
||||
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
|
||||
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
|
||||
|
||||
foreach (var userLabel in userLabels)
|
||||
{
|
||||
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
|
||||
}
|
||||
|
||||
return agent;
|
||||
}
|
||||
|
||||
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey)
|
||||
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels)
|
||||
{
|
||||
TaskAgent agent = new TaskAgent(agentName)
|
||||
{
|
||||
@@ -492,45 +497,140 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
OSDescription = RuntimeInformation.OSDescription,
|
||||
};
|
||||
|
||||
agent.Labels.Add("self-hosted");
|
||||
agent.Labels.Add(VarUtil.OS);
|
||||
agent.Labels.Add(VarUtil.OSArchitecture);
|
||||
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
|
||||
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
|
||||
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
|
||||
|
||||
foreach (var userLabel in userLabels)
|
||||
{
|
||||
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
|
||||
}
|
||||
|
||||
return agent;
|
||||
}
|
||||
|
||||
private async Task<bool> IsHostedServer(string serverUrl, VssCredentials credentials)
|
||||
private async Task<string> GetRunnerTokenAsync(CommandSettings command, string githubUrl, string tokenType)
|
||||
{
|
||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
||||
var locationServer = HostContext.GetService<ILocationServer>();
|
||||
VssConnection connection = VssUtil.CreateConnection(new Uri(serverUrl), credentials);
|
||||
await locationServer.ConnectAsync(connection);
|
||||
try
|
||||
var githubPAT = command.GetGitHubPersonalAccessToken();
|
||||
var runnerToken = string.Empty;
|
||||
if (!string.IsNullOrEmpty(githubPAT))
|
||||
{
|
||||
var connectionData = await locationServer.GetConnectionDataAsync();
|
||||
Trace.Info($"Server deployment type: {connectionData.DeploymentType}");
|
||||
return connectionData.DeploymentType.HasFlag(DeploymentFlags.Hosted);
|
||||
Trace.Info($"Retriving runner {tokenType} token using GitHub PAT.");
|
||||
var jitToken = await GetJITRunnerTokenAsync(githubUrl, githubPAT, tokenType);
|
||||
Trace.Info($"Retrived runner {tokenType} token is good to {jitToken.ExpiresAt}.");
|
||||
HostContext.SecretMasker.AddValue(jitToken.Token);
|
||||
runnerToken = jitToken.Token;
|
||||
}
|
||||
catch (Exception ex)
|
||||
|
||||
if (string.IsNullOrEmpty(runnerToken))
|
||||
{
|
||||
// Since the DeploymentType is Enum, deserialization exception means there is a new Enum member been added.
|
||||
// It's more likely to be Hosted since OnPremises is always behind and customer can update their agent if are on-prem
|
||||
Trace.Error(ex);
|
||||
return true;
|
||||
if (string.Equals("registration", tokenType, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
runnerToken = command.GetRunnerRegisterToken();
|
||||
}
|
||||
else
|
||||
{
|
||||
runnerToken = command.GetRunnerDeletionToken();
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken)
|
||||
return runnerToken;
|
||||
}
|
||||
|
||||
private async Task<GitHubRunnerRegisterToken> GetJITRunnerTokenAsync(string githubUrl, string githubToken, string tokenType)
|
||||
{
|
||||
var gitHubUrl = new UriBuilder(githubUrl);
|
||||
var githubApiUrl = $"https://api.{gitHubUrl.Host}/repos/{gitHubUrl.Path.Trim('/')}/actions-runners/registration";
|
||||
var githubApiUrl = "";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||
if (path.Length == 1)
|
||||
{
|
||||
// org runner
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runners/{tokenType}-token";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runners/{tokenType}-token";
|
||||
}
|
||||
}
|
||||
else if (path.Length == 2)
|
||||
{
|
||||
// repo or enterprise runner.
|
||||
var repoScope = "repos/";
|
||||
if (string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
repoScope = "";
|
||||
}
|
||||
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{repoScope}{path[0]}/{path[1]}/actions/runners/{tokenType}-token";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{repoScope}{path[0]}/{path[1]}/actions/runners/{tokenType}-token";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentException($"'{githubUrl}' should point to an org or repository.");
|
||||
}
|
||||
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
|
||||
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var jsonResponse = await response.Content.ReadAsStringAsync();
|
||||
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var errorResponse = await response.Content.ReadAsStringAsync();
|
||||
_term.WriteError(errorResponse);
|
||||
response.EnsureSuccessStatusCode();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
|
||||
{
|
||||
var githubApiUrl = "";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runner-registration";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runner-registration";
|
||||
}
|
||||
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.Add(HostContext.UserAgent);
|
||||
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.github.shuri-preview+json"));
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent("", null, "application/json"));
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
|
||||
var bodyObject = new Dictionary<string, string>()
|
||||
{
|
||||
{"url", githubUrl},
|
||||
{"runner_event", runnerEvent}
|
||||
};
|
||||
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
|
||||
@@ -50,6 +50,18 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
|
||||
CredentialData credData = store.GetCredentials();
|
||||
var migratedCred = store.GetMigratedCredentials();
|
||||
if (migratedCred != null)
|
||||
{
|
||||
credData = migratedCred;
|
||||
|
||||
// Re-write .credentials with Token URL
|
||||
store.SaveCredential(credData);
|
||||
|
||||
// Delete .credentials_migrated
|
||||
store.DeleteMigratedCredential();
|
||||
}
|
||||
|
||||
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);
|
||||
credProv.CredentialData = credData;
|
||||
|
||||
@@ -59,6 +71,16 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
}
|
||||
|
||||
[DataContract]
|
||||
public sealed class GitHubRunnerRegisterToken
|
||||
{
|
||||
[DataMember(Name = "token")]
|
||||
public string Token { get; set; }
|
||||
|
||||
[DataMember(Name = "expires_at")]
|
||||
public string ExpiresAt { get; set; }
|
||||
}
|
||||
|
||||
[DataContract]
|
||||
public sealed class GitHubAuthResult
|
||||
{
|
||||
|
||||
@@ -20,7 +20,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
/// key is returned to the caller.
|
||||
/// </summary>
|
||||
/// <returns>An <c>RSACryptoServiceProvider</c> instance representing the key for the runner</returns>
|
||||
RSACryptoServiceProvider CreateKey();
|
||||
RSA CreateKey();
|
||||
|
||||
/// <summary>
|
||||
/// Deletes the RSA key managed by the key manager.
|
||||
@@ -32,7 +32,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
/// </summary>
|
||||
/// <returns>An <c>RSACryptoServiceProvider</c> instance representing the key for the runner</returns>
|
||||
/// <exception cref="CryptographicException">No key exists in the store</exception>
|
||||
RSACryptoServiceProvider GetKey();
|
||||
RSA GetKey();
|
||||
}
|
||||
|
||||
// Newtonsoft 10 is not working properly with dotnet RSAParameters class
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using System;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.OAuth;
|
||||
@@ -29,7 +28,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null);
|
||||
|
||||
// For back compat with .credential file that doesn't has 'oauthEndpointUrl' section
|
||||
var oathEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
|
||||
var oauthEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
|
||||
|
||||
ArgUtil.NotNullOrEmpty(clientId, nameof(clientId));
|
||||
ArgUtil.NotNullOrEmpty(authorizationUrl, nameof(authorizationUrl));
|
||||
@@ -37,9 +36,9 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
// We expect the key to be in the machine store at this point. Configuration should have set all of
|
||||
// this up correctly so we can use the key to generate access tokens.
|
||||
var keyManager = context.GetService<IRSAKeyManager>();
|
||||
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
|
||||
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey(), StringUtil.ConvertToBoolean(CredentialData.Data.GetValueOrDefault("requireFipsCryptography"), false));
|
||||
var clientCredential = new VssOAuthJwtBearerClientCredential(clientId, authorizationUrl, signingCredentials);
|
||||
var agentCredential = new VssOAuthCredential(new Uri(oathEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
|
||||
var agentCredential = new VssOAuthCredential(new Uri(oauthEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
|
||||
|
||||
// Construct a credentials cache with a single OAuth credential for communication. The windows credential
|
||||
// is explicitly set to null to ensure we never do that negotiation.
|
||||
|
||||
@@ -20,7 +20,8 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
bool secret,
|
||||
string defaultValue,
|
||||
Func<String, bool> validator,
|
||||
bool unattended);
|
||||
bool unattended,
|
||||
bool isOptional = false);
|
||||
}
|
||||
|
||||
public sealed class PromptManager : RunnerService, IPromptManager
|
||||
@@ -56,7 +57,8 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
bool secret,
|
||||
string defaultValue,
|
||||
Func<string, bool> validator,
|
||||
bool unattended)
|
||||
bool unattended,
|
||||
bool isOptional = false)
|
||||
{
|
||||
Trace.Info(nameof(ReadValue));
|
||||
ArgUtil.NotNull(validator, nameof(validator));
|
||||
@@ -70,6 +72,10 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
return defaultValue;
|
||||
}
|
||||
else if (isOptional)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
// Otherwise throw.
|
||||
throw new Exception($"Invalid configuration provided for {argName}. Terminating unattended configuration.");
|
||||
@@ -85,17 +91,27 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
_terminal.Write($"[press Enter for {defaultValue}] ");
|
||||
}
|
||||
else if (isOptional){
|
||||
_terminal.Write($"[press Enter to skip] ");
|
||||
}
|
||||
|
||||
// Read and trim the value.
|
||||
value = secret ? _terminal.ReadSecret() : _terminal.ReadLine();
|
||||
value = value?.Trim() ?? string.Empty;
|
||||
|
||||
// Return the default if not specified.
|
||||
if (string.IsNullOrEmpty(value) && !string.IsNullOrEmpty(defaultValue))
|
||||
if (string.IsNullOrEmpty(value))
|
||||
{
|
||||
if (!string.IsNullOrEmpty(defaultValue))
|
||||
{
|
||||
Trace.Info($"Falling back to the default: '{defaultValue}'");
|
||||
return defaultValue;
|
||||
}
|
||||
else if (isOptional)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
}
|
||||
|
||||
// Return the value if it is not empty and it is valid.
|
||||
// Otherwise try the loop again.
|
||||
|
||||
@@ -13,14 +13,14 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
private string _keyFile;
|
||||
private IHostContext _context;
|
||||
|
||||
public RSACryptoServiceProvider CreateKey()
|
||||
public RSA CreateKey()
|
||||
{
|
||||
RSACryptoServiceProvider rsa = null;
|
||||
RSA rsa = null;
|
||||
if (!File.Exists(_keyFile))
|
||||
{
|
||||
Trace.Info("Creating new RSA key using 2048-bit key length");
|
||||
|
||||
rsa = new RSACryptoServiceProvider(2048);
|
||||
rsa = RSA.Create(2048);
|
||||
|
||||
// Now write the parameters to disk
|
||||
SaveParameters(rsa.ExportParameters(true));
|
||||
@@ -30,7 +30,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
Trace.Info("Found existing RSA key parameters file {0}", _keyFile);
|
||||
|
||||
rsa = new RSACryptoServiceProvider();
|
||||
rsa = RSA.Create();
|
||||
rsa.ImportParameters(LoadParameters());
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
}
|
||||
|
||||
public RSACryptoServiceProvider GetKey()
|
||||
public RSA GetKey()
|
||||
{
|
||||
if (!File.Exists(_keyFile))
|
||||
{
|
||||
@@ -55,7 +55,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
Trace.Info("Loading RSA key parameters from file {0}", _keyFile);
|
||||
|
||||
var rsa = new RSACryptoServiceProvider();
|
||||
var rsa = RSA.Create();
|
||||
rsa.ImportParameters(LoadParameters());
|
||||
return rsa;
|
||||
}
|
||||
|
||||
@@ -14,14 +14,14 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
private string _keyFile;
|
||||
private IHostContext _context;
|
||||
|
||||
public RSACryptoServiceProvider CreateKey()
|
||||
public RSA CreateKey()
|
||||
{
|
||||
RSACryptoServiceProvider rsa = null;
|
||||
RSA rsa = null;
|
||||
if (!File.Exists(_keyFile))
|
||||
{
|
||||
Trace.Info("Creating new RSA key using 2048-bit key length");
|
||||
|
||||
rsa = new RSACryptoServiceProvider(2048);
|
||||
rsa = RSA.Create(2048);
|
||||
|
||||
// Now write the parameters to disk
|
||||
IOUtil.SaveObject(new RSAParametersSerializable(rsa.ExportParameters(true)), _keyFile);
|
||||
@@ -54,7 +54,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
Trace.Info("Found existing RSA key parameters file {0}", _keyFile);
|
||||
|
||||
rsa = new RSACryptoServiceProvider();
|
||||
rsa = RSA.Create();
|
||||
rsa.ImportParameters(IOUtil.LoadObject<RSAParametersSerializable>(_keyFile).RSAParameters);
|
||||
}
|
||||
|
||||
@@ -70,7 +70,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
}
|
||||
|
||||
public RSACryptoServiceProvider GetKey()
|
||||
public RSA GetKey()
|
||||
{
|
||||
if (!File.Exists(_keyFile))
|
||||
{
|
||||
@@ -80,7 +80,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
Trace.Info("Loading RSA key parameters from file {0}", _keyFile);
|
||||
|
||||
var parameters = IOUtil.LoadObject<RSAParametersSerializable>(_keyFile).RSAParameters;
|
||||
var rsa = new RSACryptoServiceProvider();
|
||||
var rsa = RSA.Create();
|
||||
rsa.ImportParameters(parameters);
|
||||
return rsa;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System;
|
||||
using System.Linq;
|
||||
using System.IO;
|
||||
using System.Security.Principal;
|
||||
|
||||
@@ -46,6 +47,21 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
string.Equals(value, "N", StringComparison.CurrentCultureIgnoreCase);
|
||||
}
|
||||
|
||||
public static bool LabelsValidator(string labels)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(labels))
|
||||
{
|
||||
var labelSet = labels.Split(',').Where(x => !string.IsNullOrEmpty(x)).ToHashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (labelSet.Any(x => x.Length > 256))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public static bool NonEmptyValidator(string value)
|
||||
{
|
||||
return !string.IsNullOrEmpty(value);
|
||||
|
||||
@@ -12,12 +12,14 @@ using System.Linq;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.WebApi.Jwt;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
[ServiceLocator(Default = typeof(JobDispatcher))]
|
||||
public interface IJobDispatcher : IRunnerService
|
||||
{
|
||||
bool Busy { get; }
|
||||
TaskCompletionSource<bool> RunOnceJobCompleted { get; }
|
||||
void Run(Pipelines.AgentJobRequestMessage message, bool runOnce = false);
|
||||
bool Cancel(JobCancelMessage message);
|
||||
@@ -69,6 +71,8 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
public TaskCompletionSource<bool> RunOnceJobCompleted => _runOnceJobCompleted;
|
||||
|
||||
public bool Busy { get; private set; }
|
||||
|
||||
public void Run(Pipelines.AgentJobRequestMessage jobRequestMessage, bool runOnce = false)
|
||||
{
|
||||
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
|
||||
@@ -83,15 +87,30 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
var orchestrationId = string.Empty;
|
||||
var systemConnection = jobRequestMessage.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
if (systemConnection?.Authorization != null &&
|
||||
systemConnection.Authorization.Parameters.TryGetValue("AccessToken", out var accessToken) &&
|
||||
!string.IsNullOrEmpty(accessToken))
|
||||
{
|
||||
var jwt = JsonWebToken.Create(accessToken);
|
||||
var claims = jwt.ExtractClaims();
|
||||
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
|
||||
if (!string.IsNullOrEmpty(orchestrationId))
|
||||
{
|
||||
Trace.Info($"Pull OrchestrationId {orchestrationId} from JWT claims");
|
||||
}
|
||||
}
|
||||
|
||||
WorkerDispatcher newDispatch = new WorkerDispatcher(jobRequestMessage.JobId, jobRequestMessage.RequestId);
|
||||
if (runOnce)
|
||||
{
|
||||
Trace.Info("Start dispatcher for one time used runner.");
|
||||
newDispatch.WorkerDispatch = RunOnceAsync(jobRequestMessage, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
|
||||
newDispatch.WorkerDispatch = RunOnceAsync(jobRequestMessage, orchestrationId, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
|
||||
}
|
||||
else
|
||||
{
|
||||
newDispatch.WorkerDispatch = RunAsync(jobRequestMessage, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
|
||||
newDispatch.WorkerDispatch = RunAsync(jobRequestMessage, orchestrationId, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
|
||||
}
|
||||
|
||||
_jobInfos.TryAdd(newDispatch.JobId, newDispatch);
|
||||
@@ -247,7 +266,7 @@ namespace GitHub.Runner.Listener
|
||||
Task completedTask = await Task.WhenAny(jobDispatch.WorkerDispatch, Task.Delay(TimeSpan.FromSeconds(45)));
|
||||
if (completedTask != jobDispatch.WorkerDispatch)
|
||||
{
|
||||
// at this point, the job exectuion might encounter some dead lock and even not able to be canclled.
|
||||
// at this point, the job execution might encounter some dead lock and even not able to be cancelled.
|
||||
// no need to localize the exception string should never happen.
|
||||
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds.");
|
||||
}
|
||||
@@ -281,11 +300,11 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RunOnceAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
|
||||
private async Task RunOnceAsync(Pipelines.AgentJobRequestMessage message, string orchestrationId, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
await RunAsync(message, previousJobDispatch, jobRequestCancellationToken, workerCancelTimeoutKillToken);
|
||||
await RunAsync(message, orchestrationId, previousJobDispatch, jobRequestCancellationToken, workerCancelTimeoutKillToken);
|
||||
}
|
||||
finally
|
||||
{
|
||||
@@ -294,7 +313,10 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RunAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
|
||||
private async Task RunAsync(Pipelines.AgentJobRequestMessage message, string orchestrationId, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
|
||||
{
|
||||
Busy = true;
|
||||
try
|
||||
{
|
||||
if (previousJobDispatch != null)
|
||||
{
|
||||
@@ -322,7 +344,7 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
// start renew job request
|
||||
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
|
||||
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, firstJobRequestRenewed, lockRenewalTokenSource.Token);
|
||||
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
|
||||
|
||||
// wait till first renew succeed or job request is canceled
|
||||
// not even start worker if the first renew fail
|
||||
@@ -595,8 +617,13 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
Busy = false;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
|
||||
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
|
||||
{
|
||||
var runnerServer = HostContext.GetService<IRunnerServer>();
|
||||
TaskAgentJobRequest request = null;
|
||||
@@ -609,7 +636,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
try
|
||||
{
|
||||
request = await runnerServer.RenewAgentRequestAsync(poolId, requestId, lockToken, token);
|
||||
request = await runnerServer.RenewAgentRequestAsync(poolId, requestId, lockToken, orchestrationId, token);
|
||||
|
||||
Trace.Info($"Successfully renew job request {requestId}, job is valid till {request.LockedUntil.Value}");
|
||||
|
||||
@@ -831,7 +858,6 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: We need send detailInfo back to DT in order to add an issue for the job
|
||||
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null)
|
||||
{
|
||||
Trace.Entering();
|
||||
@@ -925,8 +951,10 @@ namespace GitHub.Runner.Listener
|
||||
ArgUtil.NotNull(timeline, nameof(timeline));
|
||||
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
||||
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
|
||||
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
|
||||
jobRecord.ErrorCount++;
|
||||
jobRecord.Issues.Add(new Issue() { Type = IssueType.Error, Message = errorMessage });
|
||||
jobRecord.Issues.Add(unhandledExceptionIssue);
|
||||
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
|
||||
}
|
||||
catch (Exception ex)
|
||||
|
||||
@@ -118,6 +118,20 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Error("Catch exception during create session.");
|
||||
Trace.Error(ex);
|
||||
|
||||
if (ex is VssOAuthTokenRequestException && creds.Federated is VssOAuthCredential vssOAuthCred)
|
||||
{
|
||||
// Check whether we get 401 because the runner registration already removed by the service.
|
||||
// If the runner registration get deleted, we can't exchange oauth token.
|
||||
Trace.Error("Test oauth app registration.");
|
||||
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl));
|
||||
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
|
||||
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!IsSessionCreationExceptionRetriable(ex))
|
||||
{
|
||||
_term.WriteError($"Failed to create session. {ex.Message}");
|
||||
@@ -305,7 +319,8 @@ namespace GitHub.Runner.Listener
|
||||
var keyManager = HostContext.GetService<IRSAKeyManager>();
|
||||
using (var rsa = keyManager.GetKey())
|
||||
{
|
||||
return aes.CreateDecryptor(rsa.Decrypt(_session.EncryptionKey.Value, RSAEncryptionPadding.OaepSHA1), message.IV);
|
||||
var padding = _session.UseFipsEncryption ? RSAEncryptionPadding.OaepSHA256 : RSAEncryptionPadding.OaepSHA1;
|
||||
return aes.CreateDecryptor(rsa.Decrypt(_session.EncryptionKey.Value, padding), message.IV);
|
||||
}
|
||||
}
|
||||
else
|
||||
|
||||
@@ -102,7 +102,9 @@ namespace GitHub.Runner.Listener
|
||||
IRunner runner = context.GetService<IRunner>();
|
||||
try
|
||||
{
|
||||
return await runner.ExecuteCommand(command);
|
||||
var returnCode = await runner.ExecuteCommand(command);
|
||||
trace.Info($"Runner execution has finished with return code {returnCode}");
|
||||
return returnCode;
|
||||
}
|
||||
catch (OperationCanceledException) when (context.RunnerShutdownToken.IsCancellationRequested)
|
||||
{
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
@@ -11,6 +10,8 @@ using System.Reflection;
|
||||
using System.Runtime.CompilerServices;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Linq;
|
||||
using GitHub.Runner.Listener.Check;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -37,7 +38,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
try
|
||||
{
|
||||
VssUtil.InitializeVssClientSettings(HostContext.UserAgent, HostContext.WebProxy);
|
||||
VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy);
|
||||
|
||||
_inConfigStage = true;
|
||||
_completedCommand.Reset();
|
||||
@@ -72,6 +73,46 @@ namespace GitHub.Runner.Listener
|
||||
return Constants.Runner.ReturnCode.Success;
|
||||
}
|
||||
|
||||
if (command.Check)
|
||||
{
|
||||
var url = command.GetUrl();
|
||||
var pat = command.GetGitHubPersonalAccessToken(required: true);
|
||||
var checkExtensions = HostContext.GetService<IExtensionManager>().GetExtensions<ICheckExtension>();
|
||||
var sortedChecks = checkExtensions.OrderBy(x => x.Order);
|
||||
foreach (var check in sortedChecks)
|
||||
{
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
_term.WriteLine($"** Check: {check.CheckName}");
|
||||
_term.WriteLine($"** Description: {check.CheckDescription}");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
var result = await check.RunCheck(url, pat);
|
||||
if (!result)
|
||||
{
|
||||
_term.WriteLine($"** **");
|
||||
_term.WriteLine($"** F A I L **");
|
||||
_term.WriteLine($"** **");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
_term.WriteLine($"** Log: {check.CheckLog}");
|
||||
_term.WriteLine($"** Help Doc: {check.HelpLink}");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteLine($"** **");
|
||||
_term.WriteLine($"** P A S S **");
|
||||
_term.WriteLine($"** **");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
_term.WriteLine($"** Log: {check.CheckLog}");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
}
|
||||
|
||||
_term.WriteLine();
|
||||
_term.WriteLine();
|
||||
}
|
||||
|
||||
return Constants.Runner.ReturnCode.Success;
|
||||
}
|
||||
|
||||
// Configure runner prompt for args if not supplied
|
||||
// Unattended configure mode will not prompt for args if not supplied and error on any missing or invalid value.
|
||||
if (command.Configure)
|
||||
@@ -466,6 +507,8 @@ Config Options:
|
||||
--url string Repository to add the runner to. Required if unattended
|
||||
--token string Registration token. Required if unattended
|
||||
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
|
||||
--runnergroup string Name of the runner group to add this runner to (defaults to the default runner group)
|
||||
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
|
||||
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
|
||||
--replace Replace any existing runner with the same name (default false)");
|
||||
#if OS_WINDOWS
|
||||
@@ -478,7 +521,9 @@ Examples:
|
||||
Configure a runner non-interactively:
|
||||
.{separator}config.{ext} --unattended --url <url> --token <token>
|
||||
Configure a runner non-interactively, replacing any existing runner with the same name:
|
||||
.{separator}config.{ext} --unattended --url <url> --token <token> --replace [--name <name>]");
|
||||
.{separator}config.{ext} --unattended --url <url> --token <token> --replace [--name <name>]
|
||||
Configure a runner non-interactively with three extra labels:
|
||||
.{separator}config.{ext} --unattended --url <url> --token <token> --labels L1,L2,L3");
|
||||
#if OS_WINDOWS
|
||||
_term.WriteLine($@" Configure a runner to run as a service:");
|
||||
_term.WriteLine($@" .{separator}config.{ext} --url <url> --token <token> --runasservice");
|
||||
|
||||
@@ -17,6 +17,7 @@ namespace GitHub.Runner.Listener
|
||||
[ServiceLocator(Default = typeof(SelfUpdater))]
|
||||
public interface ISelfUpdater : IRunnerService
|
||||
{
|
||||
bool Busy { get; }
|
||||
Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token);
|
||||
}
|
||||
|
||||
@@ -31,6 +32,8 @@ namespace GitHub.Runner.Listener
|
||||
private int _poolId;
|
||||
private int _agentId;
|
||||
|
||||
public bool Busy { get; private set; }
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
@@ -44,6 +47,9 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
|
||||
{
|
||||
Busy = true;
|
||||
try
|
||||
{
|
||||
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
|
||||
{
|
||||
@@ -92,6 +98,11 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
return true;
|
||||
}
|
||||
finally
|
||||
{
|
||||
Busy = false;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<bool> UpdateNeeded(string targetVersion, CancellationToken token)
|
||||
{
|
||||
|
||||
@@ -80,7 +80,12 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
|
||||
// Validate args.
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
executionContext.Output($"Syncing repository: {repoFullName}");
|
||||
Uri repositoryUrl = new Uri($"https://github.com/{repoFullName}");
|
||||
|
||||
// Repository URL
|
||||
var githubUrl = executionContext.GetGitHubContext("server_url");
|
||||
var githubUri = new Uri(!string.IsNullOrEmpty(githubUrl) ? githubUrl : "https://github.com");
|
||||
var portInfo = githubUri.IsDefaultPort ? string.Empty : $":{githubUri.Port}";
|
||||
Uri repositoryUrl = new Uri($"{githubUri.Scheme}://{githubUri.Host}{portInfo}/{repoFullName}");
|
||||
if (!repositoryUrl.IsAbsoluteUri)
|
||||
{
|
||||
throw new InvalidOperationException("Repository url need to be an absolute uri.");
|
||||
|
||||
@@ -271,6 +271,14 @@ namespace GitHub.Runner.Sdk
|
||||
// Indicate GitHub Actions process.
|
||||
_proc.StartInfo.Environment["GITHUB_ACTIONS"] = "true";
|
||||
|
||||
// Set CI=true when no one else already set it.
|
||||
// CI=true is common set in most CI provider in GitHub
|
||||
if (!_proc.StartInfo.Environment.ContainsKey("CI") &&
|
||||
Environment.GetEnvironmentVariable("CI") == null)
|
||||
{
|
||||
_proc.StartInfo.Environment["CI"] = "true";
|
||||
}
|
||||
|
||||
// Hook up the events.
|
||||
_proc.EnableRaisingEvents = true;
|
||||
_proc.Exited += ProcessExitedHandler;
|
||||
@@ -310,7 +318,12 @@ namespace GitHub.Runner.Sdk
|
||||
}
|
||||
}
|
||||
|
||||
using (var registration = cancellationToken.Register(async () => await CancelAndKillProcessTree(killProcessOnCancel)))
|
||||
var cancellationFinished = new TaskCompletionSource<bool>();
|
||||
using (var registration = cancellationToken.Register(async () =>
|
||||
{
|
||||
await CancelAndKillProcessTree(killProcessOnCancel);
|
||||
cancellationFinished.TrySetResult(true);
|
||||
}))
|
||||
{
|
||||
Trace.Info($"Process started with process id {_proc.Id}, waiting for process exit.");
|
||||
while (true)
|
||||
@@ -333,6 +346,13 @@ namespace GitHub.Runner.Sdk
|
||||
// data buffers one last time before returning
|
||||
ProcessOutput();
|
||||
|
||||
if (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
// Ensure cancellation also finish on the cancellationToken.Register thread.
|
||||
await cancellationFinished.Task;
|
||||
Trace.Info($"Process Cancellation finished.");
|
||||
}
|
||||
|
||||
Trace.Info($"Finished process {_proc.Id} with exit code {_proc.ExitCode}, and elapsed time {_stopWatch.Elapsed}.");
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
@@ -71,7 +71,7 @@ namespace GitHub.Runner.Sdk
|
||||
|
||||
if (!string.IsNullOrEmpty(httpProxyAddress) && Uri.TryCreate(httpProxyAddress, UriKind.Absolute, out var proxyHttpUri))
|
||||
{
|
||||
_httpProxyAddress = proxyHttpUri.AbsoluteUri;
|
||||
_httpProxyAddress = proxyHttpUri.OriginalString;
|
||||
|
||||
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
|
||||
Environment.SetEnvironmentVariable("HTTP_PROXY", _httpProxyAddress);
|
||||
@@ -101,7 +101,7 @@ namespace GitHub.Runner.Sdk
|
||||
|
||||
if (!string.IsNullOrEmpty(httpsProxyAddress) && Uri.TryCreate(httpsProxyAddress, UriKind.Absolute, out var proxyHttpsUri))
|
||||
{
|
||||
_httpsProxyAddress = proxyHttpsUri.AbsoluteUri;
|
||||
_httpsProxyAddress = proxyHttpsUri.OriginalString;
|
||||
|
||||
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
|
||||
Environment.SetEnvironmentVariable("HTTPS_PROXY", _httpsProxyAddress);
|
||||
|
||||
@@ -30,7 +30,7 @@ namespace GitHub.Runner.Sdk
|
||||
//
|
||||
// For example, on an en-US box, this is required for loading the encoding for the
|
||||
// default console output code page '437'. Without loading the correct encoding for
|
||||
// code page IBM437, some characters cannot be translated correctly, e.g. write 'ç'
|
||||
// code page IBM437, some characters cannot be translated correctly, e.g. write 'ç'
|
||||
// from powershell.exe.
|
||||
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
|
||||
#endif
|
||||
|
||||
@@ -4,6 +4,13 @@ namespace GitHub.Runner.Sdk
|
||||
{
|
||||
public static class UrlUtil
|
||||
{
|
||||
public static bool IsHostedServer(UriBuilder gitHubUrl)
|
||||
{
|
||||
return string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
public static Uri GetCredentialEmbeddedUrl(Uri baseUrl, string username, string password)
|
||||
{
|
||||
ArgUtil.NotNull(baseUrl, nameof(baseUrl));
|
||||
|
||||
@@ -14,10 +14,10 @@ namespace GitHub.Runner.Sdk
|
||||
{
|
||||
public static class VssUtil
|
||||
{
|
||||
public static void InitializeVssClientSettings(ProductInfoHeaderValue additionalUserAgent, IWebProxy proxy)
|
||||
public static void InitializeVssClientSettings(List<ProductInfoHeaderValue> additionalUserAgents, IWebProxy proxy)
|
||||
{
|
||||
var headerValues = new List<ProductInfoHeaderValue>();
|
||||
headerValues.Add(additionalUserAgent);
|
||||
headerValues.AddRange(additionalUserAgents);
|
||||
headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})"));
|
||||
|
||||
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)
|
||||
|
||||
@@ -11,6 +11,11 @@ namespace GitHub.Runner.Sdk
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(command, nameof(command));
|
||||
trace?.Info($"Which: '{command}'");
|
||||
if (Path.IsPathFullyQualified(command) && File.Exists(command))
|
||||
{
|
||||
trace?.Info($"Fully qualified path: '{command}'");
|
||||
return command;
|
||||
}
|
||||
string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
|
||||
if (string.IsNullOrEmpty(path))
|
||||
{
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
@@ -15,14 +17,14 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
void EnablePluginInternalCommand();
|
||||
void DisablePluginInternalCommand();
|
||||
bool TryProcessCommand(IExecutionContext context, string input);
|
||||
bool TryProcessCommand(IExecutionContext context, string input, ContainerInfo container);
|
||||
}
|
||||
|
||||
public sealed class ActionCommandManager : RunnerService, IActionCommandManager
|
||||
{
|
||||
private const string _stopCommand = "stop-commands";
|
||||
private readonly Dictionary<string, IActionCommandExtension> _commandExtensions = new Dictionary<string, IActionCommandExtension>(StringComparer.OrdinalIgnoreCase);
|
||||
private HashSet<string> _registeredCommands = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly HashSet<string> _registeredCommands = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly object _commandSerializeLock = new object();
|
||||
private bool _stopProcessCommand = false;
|
||||
private string _stopToken = null;
|
||||
@@ -58,7 +60,7 @@ namespace GitHub.Runner.Worker
|
||||
_registeredCommands.Remove("internal-set-repo-path");
|
||||
}
|
||||
|
||||
public bool TryProcessCommand(IExecutionContext context, string input)
|
||||
public bool TryProcessCommand(IExecutionContext context, string input, ContainerInfo container)
|
||||
{
|
||||
if (string.IsNullOrEmpty(input))
|
||||
{
|
||||
@@ -114,7 +116,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
try
|
||||
{
|
||||
extension.ProcessCommand(context, input, actionCommand);
|
||||
extension.ProcessCommand(context, input, actionCommand, container);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@@ -140,7 +142,7 @@ namespace GitHub.Runner.Worker
|
||||
string Command { get; }
|
||||
bool OmitEcho { get; }
|
||||
|
||||
void ProcessCommand(IExecutionContext context, string line, ActionCommand command);
|
||||
void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container);
|
||||
}
|
||||
|
||||
public sealed class InternalPluginSetRepoPathCommandExtension : RunnerService, IActionCommandExtension
|
||||
@@ -150,7 +152,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
if (!command.Properties.TryGetValue(SetRepoPathCommandProperties.repoFullName, out string repoFullName) || string.IsNullOrEmpty(repoFullName))
|
||||
{
|
||||
@@ -180,14 +182,51 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
var allowUnsecureCommands = false;
|
||||
bool.TryParse(Environment.GetEnvironmentVariable(Constants.Variables.Actions.AllowUnsupportedCommands), out allowUnsecureCommands);
|
||||
|
||||
// Apply environment from env context, env context contains job level env and action's env block
|
||||
#if OS_WINDOWS
|
||||
var envContext = context.ExpressionValues["env"] as DictionaryContextData;
|
||||
#else
|
||||
var envContext = context.ExpressionValues["env"] as CaseSensitiveDictionaryContextData;
|
||||
#endif
|
||||
if (!allowUnsecureCommands && envContext.ContainsKey(Constants.Variables.Actions.AllowUnsupportedCommands))
|
||||
{
|
||||
bool.TryParse(envContext[Constants.Variables.Actions.AllowUnsupportedCommands].ToString(), out allowUnsecureCommands);
|
||||
}
|
||||
|
||||
if (!allowUnsecureCommands)
|
||||
{
|
||||
throw new Exception(String.Format(Constants.Runner.UnsupportedCommandMessageDisabled, this.Command));
|
||||
}
|
||||
|
||||
if (!command.Properties.TryGetValue(SetEnvCommandProperties.Name, out string envName) || string.IsNullOrEmpty(envName))
|
||||
{
|
||||
throw new Exception("Required field 'name' is missing in ##[set-env] command.");
|
||||
}
|
||||
|
||||
context.EnvironmentVariables[envName] = command.Data;
|
||||
|
||||
foreach (var blocked in _setEnvBlockList)
|
||||
{
|
||||
if (string.Equals(blocked, envName, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// Log Telemetry and let user know they shouldn't do this
|
||||
var issue = new Issue()
|
||||
{
|
||||
Type = IssueType.Error,
|
||||
Message = $"Can't update {blocked} environment variable using ::set-env:: command."
|
||||
};
|
||||
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = $"{Constants.Runner.UnsupportedCommand}_{envName}";
|
||||
context.AddIssue(issue);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
context.Global.EnvironmentVariables[envName] = command.Data;
|
||||
context.SetEnvContext(envName, command.Data);
|
||||
context.Debug($"{envName}='{command.Data}'");
|
||||
}
|
||||
@@ -196,6 +235,11 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
public const String Name = "name";
|
||||
}
|
||||
|
||||
private string[] _setEnvBlockList =
|
||||
{
|
||||
"NODE_OPTIONS"
|
||||
};
|
||||
}
|
||||
|
||||
public sealed class SetOutputCommandExtension : RunnerService, IActionCommandExtension
|
||||
@@ -205,7 +249,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
if (!command.Properties.TryGetValue(SetOutputCommandProperties.Name, out string outputName) || string.IsNullOrEmpty(outputName))
|
||||
{
|
||||
@@ -229,7 +273,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
if (!command.Properties.TryGetValue(SaveStateCommandProperties.Name, out string stateName) || string.IsNullOrEmpty(stateName))
|
||||
{
|
||||
@@ -253,7 +297,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(command.Data))
|
||||
{
|
||||
@@ -279,11 +323,30 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
var allowUnsecureCommands = false;
|
||||
bool.TryParse(Environment.GetEnvironmentVariable(Constants.Variables.Actions.AllowUnsupportedCommands), out allowUnsecureCommands);
|
||||
|
||||
// Apply environment from env context, env context contains job level env and action's env block
|
||||
#if OS_WINDOWS
|
||||
var envContext = context.ExpressionValues["env"] as DictionaryContextData;
|
||||
#else
|
||||
var envContext = context.ExpressionValues["env"] as CaseSensitiveDictionaryContextData;
|
||||
#endif
|
||||
if (!allowUnsecureCommands && envContext.ContainsKey(Constants.Variables.Actions.AllowUnsupportedCommands))
|
||||
{
|
||||
bool.TryParse(envContext[Constants.Variables.Actions.AllowUnsupportedCommands].ToString(), out allowUnsecureCommands);
|
||||
}
|
||||
|
||||
if (!allowUnsecureCommands)
|
||||
{
|
||||
throw new Exception(String.Format(Constants.Runner.UnsupportedCommandMessageDisabled, this.Command));
|
||||
}
|
||||
|
||||
ArgUtil.NotNullOrEmpty(command.Data, "path");
|
||||
context.PrependPath.RemoveAll(x => string.Equals(x, command.Data, StringComparison.CurrentCulture));
|
||||
context.PrependPath.Add(command.Data);
|
||||
context.Global.PrependPath.RemoveAll(x => string.Equals(x, command.Data, StringComparison.CurrentCulture));
|
||||
context.Global.PrependPath.Add(command.Data);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -294,7 +357,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
var file = command.Data;
|
||||
|
||||
@@ -306,9 +369,9 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
// Translate file path back from container path
|
||||
if (context.Container != null)
|
||||
if (container != null)
|
||||
{
|
||||
file = context.Container.TranslateToHostPath(file);
|
||||
file = container.TranslateToHostPath(file);
|
||||
}
|
||||
|
||||
// Root the path
|
||||
@@ -341,7 +404,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
command.Properties.TryGetValue(RemoveMatcherCommandProperties.Owner, out string owner);
|
||||
var file = command.Data;
|
||||
@@ -369,9 +432,9 @@ namespace GitHub.Runner.Worker
|
||||
else
|
||||
{
|
||||
// Translate file path back from container path
|
||||
if (context.Container != null)
|
||||
if (container != null)
|
||||
{
|
||||
file = context.Container.TranslateToHostPath(file);
|
||||
file = container.TranslateToHostPath(file);
|
||||
}
|
||||
|
||||
// Root the path
|
||||
@@ -409,7 +472,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
context.Debug(command.Data);
|
||||
}
|
||||
@@ -437,7 +500,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
command.Properties.TryGetValue(IssueCommandProperties.File, out string file);
|
||||
command.Properties.TryGetValue(IssueCommandProperties.Line, out string line);
|
||||
@@ -454,10 +517,10 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
issue.Category = "Code";
|
||||
|
||||
if (context.Container != null)
|
||||
if (container != null)
|
||||
{
|
||||
// Translate file path back from container path
|
||||
file = context.Container.TranslateToHostPath(file);
|
||||
file = container.TranslateToHostPath(file);
|
||||
command.Properties[IssueCommandProperties.File] = file;
|
||||
}
|
||||
|
||||
@@ -484,9 +547,12 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
foreach (var property in command.Properties)
|
||||
{
|
||||
if (!string.Equals(property.Key, Constants.Runner.InternalTelemetryIssueDataKey, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
issue.Data[property.Key] = property.Value;
|
||||
}
|
||||
}
|
||||
|
||||
context.AddIssue(issue);
|
||||
}
|
||||
@@ -517,7 +583,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
var data = this is GroupCommandExtension ? command.Data : string.Empty;
|
||||
context.Output($"##[{Command}]{data}");
|
||||
@@ -531,7 +597,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(command.Data, "value");
|
||||
|
||||
|
||||
@@ -1,31 +1,43 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
using GitHub.Services.Common;
|
||||
using Newtonsoft.Json;
|
||||
using WebApi = GitHub.DistributedTask.WebApi;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
public class PrepareResult
|
||||
{
|
||||
public PrepareResult(List<JobExtensionRunner> containerSetupSteps, Dictionary<Guid, IActionRunner> preStepTracker)
|
||||
{
|
||||
this.ContainerSetupSteps = containerSetupSteps;
|
||||
this.PreStepTracker = preStepTracker;
|
||||
}
|
||||
|
||||
public List<JobExtensionRunner> ContainerSetupSteps { get; set; }
|
||||
|
||||
public Dictionary<Guid, IActionRunner> PreStepTracker { get; set; }
|
||||
}
|
||||
|
||||
[ServiceLocator(Default = typeof(ActionManager))]
|
||||
public interface IActionManager : IRunnerService
|
||||
{
|
||||
Dictionary<Guid, ContainerInfo> CachedActionContainers { get; }
|
||||
Task<List<JobExtensionRunner>> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps);
|
||||
Task<PrepareResult> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps);
|
||||
Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action);
|
||||
}
|
||||
|
||||
@@ -35,11 +47,11 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||
private const int _defaultCopyBufferSize = 81920;
|
||||
|
||||
private const string _dotcomApiUrl = "https://api.github.com";
|
||||
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new Dictionary<Guid, ContainerInfo>();
|
||||
|
||||
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
|
||||
public async Task<List<JobExtensionRunner>> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps)
|
||||
public async Task<PrepareResult> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps)
|
||||
{
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
ArgUtil.NotNull(steps, nameof(steps));
|
||||
@@ -49,18 +61,24 @@ namespace GitHub.Runner.Worker
|
||||
Dictionary<string, List<Guid>> imagesToBuild = new Dictionary<string, List<Guid>>(StringComparer.OrdinalIgnoreCase);
|
||||
Dictionary<string, ActionContainer> imagesToBuildInfo = new Dictionary<string, ActionContainer>(StringComparer.OrdinalIgnoreCase);
|
||||
List<JobExtensionRunner> containerSetupSteps = new List<JobExtensionRunner>();
|
||||
Dictionary<Guid, IActionRunner> preStepTracker = new Dictionary<Guid, IActionRunner>();
|
||||
IEnumerable<Pipelines.ActionStep> actions = steps.OfType<Pipelines.ActionStep>();
|
||||
|
||||
// TODO: Depreciate the PREVIEW_ACTION_TOKEN
|
||||
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
|
||||
// Log even if we aren't using it to ensure users know.
|
||||
if (!string.IsNullOrEmpty(executionContext.Variables.Get("PREVIEW_ACTION_TOKEN")))
|
||||
if (!string.IsNullOrEmpty(executionContext.Global.Variables.Get("PREVIEW_ACTION_TOKEN")))
|
||||
{
|
||||
executionContext.Warning("The 'PREVIEW_ACTION_TOKEN' secret is depreciated. Please remove it from the repository's secrets");
|
||||
executionContext.Warning("The 'PREVIEW_ACTION_TOKEN' secret is deprecated. Please remove it from the repository's secrets");
|
||||
}
|
||||
|
||||
// Clear the cache (local runner)
|
||||
// Clear the cache (for self-hosted runners)
|
||||
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
|
||||
|
||||
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||
var newActionMetadata = executionContext.Global.Variables.GetBoolean("DistributedTask.NewActionMetadata") ?? false;
|
||||
|
||||
var repositoryActions = new List<Pipelines.ActionStep>();
|
||||
|
||||
foreach (var action in actions)
|
||||
{
|
||||
if (action.Reference.Type == Pipelines.ActionSourceType.ContainerRegistry)
|
||||
@@ -78,7 +96,8 @@ namespace GitHub.Runner.Worker
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'");
|
||||
imagesToPull[containerReference.Image].Add(action.Id);
|
||||
}
|
||||
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository)
|
||||
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && !newActionMetadata)
|
||||
{
|
||||
// only download the repository archive
|
||||
await DownloadRepositoryActionAsync(executionContext, action);
|
||||
@@ -111,6 +130,97 @@ namespace GitHub.Runner.Worker
|
||||
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
|
||||
}
|
||||
}
|
||||
|
||||
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
|
||||
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
|
||||
{
|
||||
var definition = LoadAction(executionContext, action);
|
||||
if (definition.Data.Execution.HasPre)
|
||||
{
|
||||
var actionRunner = HostContext.CreateService<IActionRunner>();
|
||||
actionRunner.Action = action;
|
||||
actionRunner.Stage = ActionRunStage.Pre;
|
||||
actionRunner.Condition = definition.Data.Execution.InitCondition;
|
||||
|
||||
Trace.Info($"Add 'pre' execution for {action.Id}");
|
||||
preStepTracker[action.Id] = actionRunner;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && newActionMetadata)
|
||||
{
|
||||
repositoryActions.Add(action);
|
||||
}
|
||||
}
|
||||
|
||||
if (repositoryActions.Count > 0)
|
||||
{
|
||||
// Get the download info
|
||||
var downloadInfos = await GetDownloadInfoAsync(executionContext, repositoryActions);
|
||||
|
||||
// Download each action
|
||||
foreach (var action in repositoryActions)
|
||||
{
|
||||
var lookupKey = GetDownloadInfoLookupKey(action);
|
||||
if (string.IsNullOrEmpty(lookupKey))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!downloadInfos.TryGetValue(lookupKey, out var downloadInfo))
|
||||
{
|
||||
throw new Exception($"Missing download info for {lookupKey}");
|
||||
}
|
||||
|
||||
await DownloadRepositoryActionAsync(executionContext, downloadInfo);
|
||||
}
|
||||
|
||||
// More preparation based on content in the repository (action.yml)
|
||||
foreach (var action in repositoryActions)
|
||||
{
|
||||
var setupInfo = PrepareRepositoryActionAsync(executionContext, action);
|
||||
if (setupInfo != null)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(setupInfo.Image))
|
||||
{
|
||||
if (!imagesToPull.ContainsKey(setupInfo.Image))
|
||||
{
|
||||
imagesToPull[setupInfo.Image] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to pull image '{setupInfo.Image}'");
|
||||
imagesToPull[setupInfo.Image].Add(action.Id);
|
||||
}
|
||||
else
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(setupInfo.ActionRepository, nameof(setupInfo.ActionRepository));
|
||||
|
||||
if (!imagesToBuild.ContainsKey(setupInfo.ActionRepository))
|
||||
{
|
||||
imagesToBuild[setupInfo.ActionRepository] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to build image '{setupInfo.Dockerfile}'");
|
||||
imagesToBuild[setupInfo.ActionRepository].Add(action.Id);
|
||||
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
|
||||
}
|
||||
}
|
||||
|
||||
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
|
||||
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
|
||||
{
|
||||
var definition = LoadAction(executionContext, action);
|
||||
if (definition.Data.Execution.HasPre)
|
||||
{
|
||||
var actionRunner = HostContext.CreateService<IActionRunner>();
|
||||
actionRunner.Action = action;
|
||||
actionRunner.Stage = ActionRunStage.Pre;
|
||||
actionRunner.Condition = definition.Data.Execution.InitCondition;
|
||||
|
||||
Trace.Info($"Add 'pre' execution for {action.Id}");
|
||||
preStepTracker[action.Id] = actionRunner;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -147,7 +257,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
#endif
|
||||
|
||||
return containerSetupSteps;
|
||||
return new PrepareResult(containerSetupSteps, preStepTracker);
|
||||
}
|
||||
|
||||
public Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action)
|
||||
@@ -239,14 +349,19 @@ namespace GitHub.Runner.Worker
|
||||
Trace.Info($"Action container env: {StringUtil.ConvertToJson(containerAction.Environment)}.");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(containerAction.Pre))
|
||||
{
|
||||
Trace.Info($"Action container pre entrypoint: {containerAction.Pre}.");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(containerAction.EntryPoint))
|
||||
{
|
||||
Trace.Info($"Action container entrypoint: {containerAction.EntryPoint}.");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(containerAction.Cleanup))
|
||||
if (!string.IsNullOrEmpty(containerAction.Post))
|
||||
{
|
||||
Trace.Info($"Action container cleanup entrypoint: {containerAction.Cleanup}.");
|
||||
Trace.Info($"Action container post entrypoint: {containerAction.Post}.");
|
||||
}
|
||||
|
||||
if (CachedActionContainers.TryGetValue(action.Id, out var container))
|
||||
@@ -258,8 +373,9 @@ namespace GitHub.Runner.Worker
|
||||
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.NodeJS)
|
||||
{
|
||||
var nodeAction = definition.Data.Execution as NodeJSActionExecutionData;
|
||||
Trace.Info($"Action pre node.js file: {nodeAction.Pre ?? "N/A"}.");
|
||||
Trace.Info($"Action node.js file: {nodeAction.Script}.");
|
||||
Trace.Info($"Action cleanup node.js file: {nodeAction.Cleanup ?? "N/A"}.");
|
||||
Trace.Info($"Action post node.js file: {nodeAction.Post ?? "N/A"}.");
|
||||
}
|
||||
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Plugin)
|
||||
{
|
||||
@@ -275,10 +391,18 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
if (!string.IsNullOrEmpty(plugin.PostPluginTypeName))
|
||||
{
|
||||
pluginAction.Cleanup = plugin.PostPluginTypeName;
|
||||
pluginAction.Post = plugin.PostPluginTypeName;
|
||||
Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}.");
|
||||
}
|
||||
}
|
||||
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Composite)
|
||||
{
|
||||
var compositeAction = definition.Data.Execution as CompositeActionExecutionData;
|
||||
Trace.Info($"Load {compositeAction.Steps?.Count ?? 0} action steps.");
|
||||
Trace.Verbose($"Details: {StringUtil.ConvertToJson(compositeAction?.Steps)}");
|
||||
Trace.Info($"Load: {compositeAction.Outputs?.Count ?? 0} number of outputs");
|
||||
Trace.Info($"Details: {StringUtil.ConvertToJson(compositeAction?.Outputs)}");
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new NotSupportedException(definition.Data.Execution.ExecutionType.ToString());
|
||||
@@ -344,7 +468,7 @@ namespace GitHub.Runner.Worker
|
||||
ArgUtil.NotNull(setupInfo, nameof(setupInfo));
|
||||
ArgUtil.NotNullOrEmpty(setupInfo.Container.Image, nameof(setupInfo.Container.Image));
|
||||
|
||||
executionContext.Output($"Pull down action image '{setupInfo.Container.Image}'");
|
||||
executionContext.Output($"##[group]Pull down action image '{setupInfo.Container.Image}'");
|
||||
|
||||
// Pull down docker image with retry up to 3 times
|
||||
var dockerManger = HostContext.GetService<IDockerCommandManager>();
|
||||
@@ -368,6 +492,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
}
|
||||
executionContext.Output("##[endgroup]");
|
||||
|
||||
if (retryCount == 3 && pullExitCode != 0)
|
||||
{
|
||||
@@ -387,7 +512,7 @@ namespace GitHub.Runner.Worker
|
||||
ArgUtil.NotNull(setupInfo, nameof(setupInfo));
|
||||
ArgUtil.NotNullOrEmpty(setupInfo.Container.Dockerfile, nameof(setupInfo.Container.Dockerfile));
|
||||
|
||||
executionContext.Output($"Build container for action use: '{setupInfo.Container.Dockerfile}'.");
|
||||
executionContext.Output($"##[group]Build container for action use: '{setupInfo.Container.Dockerfile}'.");
|
||||
|
||||
// Build docker image with retry up to 3 times
|
||||
var dockerManger = HostContext.GetService<IDockerCommandManager>();
|
||||
@@ -396,7 +521,12 @@ namespace GitHub.Runner.Worker
|
||||
var imageName = $"{dockerManger.DockerInstanceLabel}:{Guid.NewGuid().ToString("N")}";
|
||||
while (retryCount < 3)
|
||||
{
|
||||
buildExitCode = await dockerManger.DockerBuild(executionContext, setupInfo.Container.WorkingDirectory, Directory.GetParent(setupInfo.Container.Dockerfile).FullName, imageName);
|
||||
buildExitCode = await dockerManger.DockerBuild(
|
||||
executionContext,
|
||||
setupInfo.Container.WorkingDirectory,
|
||||
setupInfo.Container.Dockerfile,
|
||||
Directory.GetParent(setupInfo.Container.Dockerfile).FullName,
|
||||
imageName);
|
||||
if (buildExitCode == 0)
|
||||
{
|
||||
break;
|
||||
@@ -412,6 +542,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
}
|
||||
executionContext.Output("##[endgroup]");
|
||||
|
||||
if (retryCount == 3 && buildExitCode != 0)
|
||||
{
|
||||
@@ -425,6 +556,98 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// This implementation is temporary and will be replaced with a REST API call to the service to resolve
|
||||
private async Task<IDictionary<string, WebApi.ActionDownloadInfo>> GetDownloadInfoAsync(IExecutionContext executionContext, List<Pipelines.ActionStep> actions)
|
||||
{
|
||||
executionContext.Output("Getting action download info");
|
||||
|
||||
// Convert to action reference
|
||||
var actionReferences = actions
|
||||
.GroupBy(x => GetDownloadInfoLookupKey(x))
|
||||
.Where(x => !string.IsNullOrEmpty(x.Key))
|
||||
.Select(x =>
|
||||
{
|
||||
var action = x.First();
|
||||
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
|
||||
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
|
||||
return new WebApi.ActionReference
|
||||
{
|
||||
NameWithOwner = repositoryReference.Name,
|
||||
Ref = repositoryReference.Ref,
|
||||
};
|
||||
})
|
||||
.ToList();
|
||||
|
||||
// Nothing to resolve?
|
||||
if (actionReferences.Count == 0)
|
||||
{
|
||||
return new Dictionary<string, WebApi.ActionDownloadInfo>();
|
||||
}
|
||||
|
||||
// Resolve download info
|
||||
var jobServer = HostContext.GetService<IJobServer>();
|
||||
var actionDownloadInfos = default(WebApi.ActionDownloadInfoCollection);
|
||||
for (var attempt = 1; attempt <= 3; attempt++)
|
||||
{
|
||||
try
|
||||
{
|
||||
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
|
||||
break;
|
||||
}
|
||||
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is canceled.
|
||||
{
|
||||
if (attempt < 3)
|
||||
{
|
||||
executionContext.Output($"Failed to resolve action download info. Error: {ex.Message}");
|
||||
executionContext.Debug(ex.ToString());
|
||||
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
|
||||
{
|
||||
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
|
||||
executionContext.Output($"Retrying in {backoff.TotalSeconds} seconds");
|
||||
await Task.Delay(backoff);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Some possible cases are:
|
||||
// * Repo is rate limited
|
||||
// * Repo or tag doesn't exist, or isn't public
|
||||
if (ex is WebApi.UnresolvableActionDownloadInfoException)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
else
|
||||
{
|
||||
// This exception will be traced as an infrastructure failure
|
||||
throw new WebApi.FailedToResolveActionDownloadInfoException("Failed to resolve action download info.", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ArgUtil.NotNull(actionDownloadInfos, nameof(actionDownloadInfos));
|
||||
ArgUtil.NotNull(actionDownloadInfos.Actions, nameof(actionDownloadInfos.Actions));
|
||||
var apiUrl = GetApiUrl(executionContext);
|
||||
var defaultAccessToken = executionContext.GetGitHubContext("token");
|
||||
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
||||
var runnerSettings = configurationStore.GetSettings();
|
||||
|
||||
foreach (var actionDownloadInfo in actionDownloadInfos.Actions.Values)
|
||||
{
|
||||
// Add secret
|
||||
HostContext.SecretMasker.AddValue(actionDownloadInfo.Authentication?.Token);
|
||||
|
||||
// Default auth token
|
||||
if (string.IsNullOrEmpty(actionDownloadInfo.Authentication?.Token))
|
||||
{
|
||||
actionDownloadInfo.Authentication = new WebApi.ActionDownloadAuthentication { Token = defaultAccessToken };
|
||||
}
|
||||
}
|
||||
|
||||
return actionDownloadInfos.Actions;
|
||||
}
|
||||
|
||||
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
|
||||
{
|
||||
Trace.Entering();
|
||||
@@ -448,7 +671,8 @@ namespace GitHub.Runner.Worker
|
||||
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
|
||||
|
||||
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref);
|
||||
if (File.Exists(destDirectory + ".completed"))
|
||||
string watermarkFile = GetWatermarkFilePath(destDirectory);
|
||||
if (File.Exists(watermarkFile))
|
||||
{
|
||||
executionContext.Debug($"Action '{repositoryReference.Name}@{repositoryReference.Ref}' already downloaded at '{destDirectory}'.");
|
||||
return;
|
||||
@@ -461,27 +685,116 @@ namespace GitHub.Runner.Worker
|
||||
executionContext.Output($"Download action repository '{repositoryReference.Name}@{repositoryReference.Ref}'");
|
||||
}
|
||||
|
||||
#if OS_WINDOWS
|
||||
string archiveLink = $"https://api.github.com/repos/{repositoryReference.Name}/zipball/{repositoryReference.Ref}";
|
||||
#else
|
||||
string archiveLink = $"https://api.github.com/repos/{repositoryReference.Name}/tarball/{repositoryReference.Ref}";
|
||||
#endif
|
||||
Trace.Info($"Download archive '{archiveLink}' to '{destDirectory}'.");
|
||||
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
||||
var isHostedServer = configurationStore.GetSettings().IsHostedServer;
|
||||
if (isHostedServer)
|
||||
{
|
||||
string apiUrl = GetApiUrl(executionContext);
|
||||
string archiveLink = BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref);
|
||||
var downloadDetails = new ActionDownloadDetails(archiveLink, ConfigureAuthorizationFromContext);
|
||||
await DownloadRepositoryActionAsync(executionContext, downloadDetails, null, destDirectory);
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
string apiUrl = GetApiUrl(executionContext);
|
||||
|
||||
// URLs to try:
|
||||
var downloadAttempts = new List<ActionDownloadDetails> {
|
||||
// A built-in action or an action the user has created, on their GHES instance
|
||||
// Example: https://my-ghes/api/v3/repos/my-org/my-action/tarball/v1
|
||||
new ActionDownloadDetails(
|
||||
BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref),
|
||||
ConfigureAuthorizationFromContext),
|
||||
|
||||
// The same action, on GitHub.com
|
||||
// Example: https://api.github.com/repos/my-org/my-action/tarball/v1
|
||||
new ActionDownloadDetails(
|
||||
BuildLinkToActionArchive(_dotcomApiUrl, repositoryReference.Name, repositoryReference.Ref),
|
||||
configureAuthorization: (e,h) => { /* no authorization for dotcom */ })
|
||||
};
|
||||
|
||||
foreach (var downloadAttempt in downloadAttempts)
|
||||
{
|
||||
try
|
||||
{
|
||||
await DownloadRepositoryActionAsync(executionContext, downloadAttempt, null, destDirectory);
|
||||
return;
|
||||
}
|
||||
catch (ActionNotFoundException)
|
||||
{
|
||||
Trace.Info($"Failed to find the action '{repositoryReference.Name}' at ref '{repositoryReference.Ref}' at {downloadAttempt.ArchiveLink}");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
throw new ActionNotFoundException($"Failed to find the action '{repositoryReference.Name}' at ref '{repositoryReference.Ref}'. Paths attempted: {string.Join(", ", downloadAttempts.Select(d => d.ArchiveLink))}");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo)
|
||||
{
|
||||
Trace.Entering();
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
ArgUtil.NotNull(downloadInfo, nameof(downloadInfo));
|
||||
ArgUtil.NotNullOrEmpty(downloadInfo.NameWithOwner, nameof(downloadInfo.NameWithOwner));
|
||||
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.Ref));
|
||||
|
||||
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), downloadInfo.NameWithOwner.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), downloadInfo.Ref);
|
||||
string watermarkFile = GetWatermarkFilePath(destDirectory);
|
||||
if (File.Exists(watermarkFile))
|
||||
{
|
||||
executionContext.Debug($"Action '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' already downloaded at '{destDirectory}'.");
|
||||
return;
|
||||
}
|
||||
else
|
||||
{
|
||||
// make sure we get a clean folder ready to use.
|
||||
IOUtil.DeleteDirectory(destDirectory, executionContext.CancellationToken);
|
||||
Directory.CreateDirectory(destDirectory);
|
||||
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}'");
|
||||
}
|
||||
|
||||
await DownloadRepositoryActionAsync(executionContext, null, downloadInfo, destDirectory);
|
||||
}
|
||||
|
||||
private string GetApiUrl(IExecutionContext executionContext)
|
||||
{
|
||||
string apiUrl = executionContext.GetGitHubContext("api_url");
|
||||
if (!string.IsNullOrEmpty(apiUrl))
|
||||
{
|
||||
return apiUrl;
|
||||
}
|
||||
// Once the api_url is set for hosted, we can remove this fallback (it doesn't make sense for GHES)
|
||||
return _dotcomApiUrl;
|
||||
}
|
||||
|
||||
private static string BuildLinkToActionArchive(string apiUrl, string repository, string @ref)
|
||||
{
|
||||
#if OS_WINDOWS
|
||||
return $"{apiUrl}/repos/{repository}/zipball/{@ref}";
|
||||
#else
|
||||
return $"{apiUrl}/repos/{repository}/tarball/{@ref}";
|
||||
#endif
|
||||
}
|
||||
|
||||
// todo: Remove the parameter "actionDownloadDetails" when feature flag DistributedTask.NewActionMetadata is removed
|
||||
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, ActionDownloadDetails actionDownloadDetails, WebApi.ActionDownloadInfo downloadInfo, string destDirectory)
|
||||
{
|
||||
//download and extract action in a temp folder and rename it on success
|
||||
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
|
||||
Directory.CreateDirectory(tempDirectory);
|
||||
|
||||
|
||||
#if OS_WINDOWS
|
||||
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.zip");
|
||||
string link = downloadInfo?.ZipballUrl ?? actionDownloadDetails.ArchiveLink;
|
||||
#else
|
||||
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.tar.gz");
|
||||
string link = downloadInfo?.TarballUrl ?? actionDownloadDetails.ArchiveLink;
|
||||
#endif
|
||||
Trace.Info($"Save archive '{archiveLink}' into {archiveFile}.");
|
||||
|
||||
Trace.Info($"Save archive '{link}' into {archiveFile}.");
|
||||
try
|
||||
{
|
||||
|
||||
int retryCount = 0;
|
||||
|
||||
// Allow up to 20 * 60s for any action to be downloaded from github graph.
|
||||
@@ -498,28 +811,23 @@ namespace GitHub.Runner.Worker
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
|
||||
if (string.IsNullOrEmpty(authToken))
|
||||
// Legacy
|
||||
if (downloadInfo == null)
|
||||
{
|
||||
// TODO: Depreciate the PREVIEW_ACTION_TOKEN
|
||||
authToken = executionContext.Variables.Get("PREVIEW_ACTION_TOKEN");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(authToken))
|
||||
{
|
||||
HostContext.SecretMasker.AddValue(authToken);
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
actionDownloadDetails.ConfigureAuthorization(executionContext, httpClient);
|
||||
}
|
||||
// FF DistributedTask.NewActionMetadata
|
||||
else
|
||||
{
|
||||
var accessToken = executionContext.GetGitHubContext("token");
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadInfo.Authentication?.Token);
|
||||
}
|
||||
|
||||
httpClient.DefaultRequestHeaders.UserAgent.Add(HostContext.UserAgent);
|
||||
using (var result = await httpClient.GetStreamAsync(archiveLink))
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
using (var response = await httpClient.GetAsync(link))
|
||||
{
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
using (var result = await response.Content.ReadAsStreamAsync())
|
||||
{
|
||||
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
|
||||
await fs.FlushAsync(actionDownloadCancellation.Token);
|
||||
@@ -528,25 +836,42 @@ namespace GitHub.Runner.Worker
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
// It doesn't make sense to retry in this case, so just stop
|
||||
throw new ActionNotFoundException(new Uri(link));
|
||||
}
|
||||
else
|
||||
{
|
||||
// Something else bad happened, let's go to our retry logic
|
||||
response.EnsureSuccessStatusCode();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info($"Action download has been cancelled.");
|
||||
Trace.Info("Action download has been cancelled.");
|
||||
throw;
|
||||
}
|
||||
catch (ActionNotFoundException)
|
||||
{
|
||||
Trace.Info($"The action at '{link}' does not exist");
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex) when (retryCount < 2)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Fail to download archive '{archiveLink}' -- Attempt: {retryCount}");
|
||||
Trace.Error($"Fail to download archive '{link}' -- Attempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
if (actionDownloadTimeout.Token.IsCancellationRequested)
|
||||
{
|
||||
// action download didn't finish within timeout
|
||||
executionContext.Warning($"Action '{archiveLink}' didn't finish download within {timeoutSeconds} seconds.");
|
||||
executionContext.Warning($"Action '{link}' didn't finish download within {timeoutSeconds} seconds.");
|
||||
}
|
||||
else
|
||||
{
|
||||
executionContext.Warning($"Failed to download action '{archiveLink}'. Error {ex.Message}");
|
||||
executionContext.Warning($"Failed to download action '{link}'. Error: {ex.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -560,7 +885,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
|
||||
executionContext.Debug($"Download '{archiveLink}' to '{archiveFile}'");
|
||||
executionContext.Debug($"Download '{link}' to '{archiveFile}'");
|
||||
|
||||
var stagingDirectory = Path.Combine(tempDirectory, "_staging");
|
||||
Directory.CreateDirectory(stagingDirectory);
|
||||
@@ -610,7 +935,8 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
Trace.Verbose("Create watermark file indicate action download succeed.");
|
||||
File.WriteAllText(destDirectory + ".completed", DateTime.UtcNow.ToString());
|
||||
string watermarkFile = GetWatermarkFilePath(destDirectory);
|
||||
File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString());
|
||||
|
||||
executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'.");
|
||||
Trace.Info("Finished getting action repository.");
|
||||
@@ -634,6 +960,32 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||
private void ConfigureAuthorizationFromContext(IExecutionContext executionContext, HttpClient httpClient)
|
||||
{
|
||||
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
|
||||
if (string.IsNullOrEmpty(authToken))
|
||||
{
|
||||
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
|
||||
authToken = executionContext.Global.Variables.Get("PREVIEW_ACTION_TOKEN");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(authToken))
|
||||
{
|
||||
HostContext.SecretMasker.AddValue(authToken);
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
}
|
||||
else
|
||||
{
|
||||
var accessToken = executionContext.GetGitHubContext("token");
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
}
|
||||
}
|
||||
|
||||
private string GetWatermarkFilePath(string directory) => directory + ".completed";
|
||||
|
||||
private ActionContainer PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
|
||||
{
|
||||
var repositoryReference = repositoryAction.Reference as Pipelines.RepositoryPathReference;
|
||||
@@ -714,6 +1066,11 @@ namespace GitHub.Runner.Worker
|
||||
Trace.Info($"Action plugin: {(actionDefinitionData.Execution as PluginActionExecutionData).Plugin}, no more preparation.");
|
||||
return null;
|
||||
}
|
||||
else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Composite)
|
||||
{
|
||||
Trace.Info($"Action composite: {(actionDefinitionData.Execution as CompositeActionExecutionData).Steps}, no more preparation.");
|
||||
return null;
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new NotSupportedException(actionDefinitionData.Execution.ExecutionType.ToString());
|
||||
@@ -739,6 +1096,64 @@ namespace GitHub.Runner.Worker
|
||||
throw new InvalidOperationException($"Can't find 'action.yml', 'action.yaml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
|
||||
}
|
||||
}
|
||||
|
||||
private static string GetDownloadInfoLookupKey(Pipelines.ActionStep action)
|
||||
{
|
||||
if (action.Reference.Type != Pipelines.ActionSourceType.Repository)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
|
||||
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
|
||||
|
||||
if (string.Equals(repositoryReference.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!string.Equals(repositoryReference.RepositoryType, Pipelines.RepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new NotSupportedException(repositoryReference.RepositoryType);
|
||||
}
|
||||
|
||||
ArgUtil.NotNullOrEmpty(repositoryReference.Name, nameof(repositoryReference.Name));
|
||||
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
|
||||
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
|
||||
}
|
||||
|
||||
private static string GetDownloadInfoLookupKey(WebApi.ActionDownloadInfo info)
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(info.NameWithOwner, nameof(info.NameWithOwner));
|
||||
ArgUtil.NotNullOrEmpty(info.Ref, nameof(info.Ref));
|
||||
return $"{info.NameWithOwner}@{info.Ref}";
|
||||
}
|
||||
|
||||
private AuthenticationHeaderValue CreateAuthHeader(string token)
|
||||
{
|
||||
if (string.IsNullOrEmpty(token))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
}
|
||||
|
||||
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||
private class ActionDownloadDetails
|
||||
{
|
||||
public string ArchiveLink { get; }
|
||||
|
||||
public Action<IExecutionContext, HttpClient> ConfigureAuthorization { get; }
|
||||
|
||||
public ActionDownloadDetails(string archiveLink, Action<IExecutionContext, HttpClient> configureAuthorization)
|
||||
{
|
||||
ArchiveLink = archiveLink;
|
||||
ConfigureAuthorization = configureAuthorization;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class Definition
|
||||
@@ -766,13 +1181,15 @@ namespace GitHub.Runner.Worker
|
||||
NodeJS,
|
||||
Plugin,
|
||||
Script,
|
||||
Composite,
|
||||
}
|
||||
|
||||
public sealed class ContainerActionExecutionData : ActionExecutionData
|
||||
{
|
||||
public override ActionExecutionType ExecutionType => ActionExecutionType.Container;
|
||||
|
||||
public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup);
|
||||
public override bool HasPre => !string.IsNullOrEmpty(Pre);
|
||||
public override bool HasPost => !string.IsNullOrEmpty(Post);
|
||||
|
||||
public string Image { get; set; }
|
||||
|
||||
@@ -782,51 +1199,75 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public MappingToken Environment { get; set; }
|
||||
|
||||
public string Cleanup { get; set; }
|
||||
public string Pre { get; set; }
|
||||
|
||||
public string Post { get; set; }
|
||||
}
|
||||
|
||||
public sealed class NodeJSActionExecutionData : ActionExecutionData
|
||||
{
|
||||
public override ActionExecutionType ExecutionType => ActionExecutionType.NodeJS;
|
||||
|
||||
public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup);
|
||||
public override bool HasPre => !string.IsNullOrEmpty(Pre);
|
||||
public override bool HasPost => !string.IsNullOrEmpty(Post);
|
||||
|
||||
public string Script { get; set; }
|
||||
|
||||
public string Cleanup { get; set; }
|
||||
public string Pre { get; set; }
|
||||
|
||||
public string Post { get; set; }
|
||||
}
|
||||
|
||||
public sealed class PluginActionExecutionData : ActionExecutionData
|
||||
{
|
||||
public override ActionExecutionType ExecutionType => ActionExecutionType.Plugin;
|
||||
|
||||
public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup);
|
||||
public override bool HasPre => false;
|
||||
|
||||
public override bool HasPost => !string.IsNullOrEmpty(Post);
|
||||
|
||||
public string Plugin { get; set; }
|
||||
|
||||
public string Cleanup { get; set; }
|
||||
public string Post { get; set; }
|
||||
}
|
||||
|
||||
public sealed class ScriptActionExecutionData : ActionExecutionData
|
||||
{
|
||||
public override ActionExecutionType ExecutionType => ActionExecutionType.Script;
|
||||
public override bool HasPre => false;
|
||||
public override bool HasPost => false;
|
||||
}
|
||||
|
||||
public override bool HasCleanup => false;
|
||||
public sealed class CompositeActionExecutionData : ActionExecutionData
|
||||
{
|
||||
public override ActionExecutionType ExecutionType => ActionExecutionType.Composite;
|
||||
public override bool HasPre => false;
|
||||
public override bool HasPost => false;
|
||||
public List<Pipelines.ActionStep> Steps { get; set; }
|
||||
public MappingToken Outputs { get; set; }
|
||||
}
|
||||
|
||||
public abstract class ActionExecutionData
|
||||
{
|
||||
private string _initCondition = $"{Constants.Expressions.Always}()";
|
||||
private string _cleanupCondition = $"{Constants.Expressions.Always}()";
|
||||
|
||||
public abstract ActionExecutionType ExecutionType { get; }
|
||||
|
||||
public abstract bool HasCleanup { get; }
|
||||
public abstract bool HasPre { get; }
|
||||
public abstract bool HasPost { get; }
|
||||
|
||||
public string CleanupCondition
|
||||
{
|
||||
get { return _cleanupCondition; }
|
||||
set { _cleanupCondition = value; }
|
||||
}
|
||||
|
||||
public string InitCondition
|
||||
{
|
||||
get { return _initCondition; }
|
||||
set { _initCondition = value; }
|
||||
}
|
||||
}
|
||||
|
||||
public class ContainerSetupInfo
|
||||
@@ -863,4 +1304,3 @@ namespace GitHub.Runner.Worker
|
||||
public string ActionRepository { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ using YamlDotNet.Core;
|
||||
using YamlDotNet.Core.Events;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
@@ -22,17 +23,18 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
|
||||
|
||||
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> contextData);
|
||||
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
|
||||
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> contextData);
|
||||
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
|
||||
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token, IDictionary<string, PipelineContextData> contextData);
|
||||
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
|
||||
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
|
||||
}
|
||||
|
||||
public sealed class ActionManifestManager : RunnerService, IActionManifestManager
|
||||
{
|
||||
private TemplateSchema _actionManifestSchema;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
@@ -53,22 +55,45 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
|
||||
{
|
||||
var context = CreateContext(executionContext, null);
|
||||
var templateContext = CreateTemplateContext(executionContext);
|
||||
ActionDefinitionData actionDefinition = new ActionDefinitionData();
|
||||
|
||||
// Clean up file name real quick
|
||||
// Instead of using Regex which can be computationally expensive,
|
||||
// we can just remove the # of characters from the fileName according to the length of the basePath
|
||||
string basePath = HostContext.GetDirectory(WellKnownDirectory.Actions);
|
||||
string fileRelativePath = manifestFile;
|
||||
if (manifestFile.Contains(basePath))
|
||||
{
|
||||
fileRelativePath = manifestFile.Remove(0, basePath.Length + 1);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var token = default(TemplateToken);
|
||||
|
||||
// Get the file ID
|
||||
var fileId = context.GetFileId(manifestFile);
|
||||
var fileId = templateContext.GetFileId(fileRelativePath);
|
||||
|
||||
// Add this file to the FileTable in executionContext if it hasn't been added already
|
||||
// we use > since fileID is 1 indexed
|
||||
if (fileId > executionContext.Global.FileTable.Count)
|
||||
{
|
||||
executionContext.Global.FileTable.Add(fileRelativePath);
|
||||
}
|
||||
|
||||
// Read the file
|
||||
var fileContent = File.ReadAllText(manifestFile);
|
||||
using (var stringReader = new StringReader(fileContent))
|
||||
{
|
||||
var yamlObjectReader = new YamlObjectReader(null, stringReader);
|
||||
token = TemplateReader.Read(context, "action-root", yamlObjectReader, fileId, out _);
|
||||
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
|
||||
token = TemplateReader.Read(templateContext, "action-root", yamlObjectReader, fileId, out _);
|
||||
}
|
||||
|
||||
var actionMapping = token.AssertMapping("action manifest root");
|
||||
var actionOutputs = default(MappingToken);
|
||||
var actionRunValueToken = default(TemplateToken);
|
||||
|
||||
foreach (var actionPair in actionMapping)
|
||||
{
|
||||
var propertyName = actionPair.Key.AssertString($"action.yml property key");
|
||||
@@ -79,44 +104,56 @@ namespace GitHub.Runner.Worker
|
||||
actionDefinition.Name = actionPair.Value.AssertString("name").Value;
|
||||
break;
|
||||
|
||||
case "outputs":
|
||||
actionOutputs = actionPair.Value.AssertMapping("outputs");
|
||||
break;
|
||||
|
||||
case "description":
|
||||
actionDefinition.Description = actionPair.Value.AssertString("description").Value;
|
||||
break;
|
||||
|
||||
case "inputs":
|
||||
ConvertInputs(context, actionPair.Value, actionDefinition);
|
||||
ConvertInputs(actionPair.Value, actionDefinition);
|
||||
break;
|
||||
|
||||
case "runs":
|
||||
actionDefinition.Execution = ConvertRuns(context, actionPair.Value);
|
||||
// Defer runs token evaluation to after for loop to ensure that order of outputs doesn't matter.
|
||||
actionRunValueToken = actionPair.Value;
|
||||
break;
|
||||
|
||||
default:
|
||||
Trace.Info($"Ignore action property {propertyName}.");
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Evaluate Runs Last
|
||||
if (actionRunValueToken != null)
|
||||
{
|
||||
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionRunValueToken, fileRelativePath, actionOutputs);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error(ex);
|
||||
context.Errors.Add(ex);
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
if (context.Errors.Count > 0)
|
||||
if (templateContext.Errors.Count > 0)
|
||||
{
|
||||
foreach (var error in context.Errors)
|
||||
foreach (var error in templateContext.Errors)
|
||||
{
|
||||
Trace.Error($"Action.yml load error: {error.Message}");
|
||||
executionContext.Error(error.Message);
|
||||
}
|
||||
|
||||
throw new ArgumentException($"Fail to load {manifestFile}");
|
||||
throw new ArgumentException($"Fail to load {fileRelativePath}");
|
||||
}
|
||||
|
||||
if (actionDefinition.Execution == null)
|
||||
{
|
||||
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
|
||||
throw new ArgumentException($"Top level 'runs:' section is required for {manifestFile}");
|
||||
throw new ArgumentException($"Top level 'runs:' section is required for {fileRelativePath}");
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -126,20 +163,47 @@ namespace GitHub.Runner.Worker
|
||||
return actionDefinition;
|
||||
}
|
||||
|
||||
public DictionaryContextData EvaluateCompositeOutputs(
|
||||
IExecutionContext executionContext,
|
||||
TemplateToken token,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||
{
|
||||
var result = default(DictionaryContextData);
|
||||
|
||||
if (token != null)
|
||||
{
|
||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||
try
|
||||
{
|
||||
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null, omitHeader: true);
|
||||
templateContext.Errors.Check();
|
||||
result = token.ToContextData().AssertDictionary("composite outputs");
|
||||
}
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result ?? new DictionaryContextData();
|
||||
}
|
||||
|
||||
public List<string> EvaluateContainerArguments(
|
||||
IExecutionContext executionContext,
|
||||
SequenceToken token,
|
||||
IDictionary<string, PipelineContextData> contextData)
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||
{
|
||||
var result = new List<string>();
|
||||
|
||||
if (token != null)
|
||||
{
|
||||
var context = CreateContext(executionContext, contextData);
|
||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||
try
|
||||
{
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-args", token, 0, null, omitHeader: true);
|
||||
context.Errors.Check();
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null, omitHeader: true);
|
||||
templateContext.Errors.Check();
|
||||
|
||||
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||
|
||||
@@ -156,10 +220,10 @@ namespace GitHub.Runner.Worker
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
context.Errors.Add(ex);
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
context.Errors.Check();
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -168,17 +232,17 @@ namespace GitHub.Runner.Worker
|
||||
public Dictionary<string, string> EvaluateContainerEnvironment(
|
||||
IExecutionContext executionContext,
|
||||
MappingToken token,
|
||||
IDictionary<string, PipelineContextData> contextData)
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues)
|
||||
{
|
||||
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (token != null)
|
||||
{
|
||||
var context = CreateContext(executionContext, contextData);
|
||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||
try
|
||||
{
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-env", token, 0, null, omitHeader: true);
|
||||
context.Errors.Check();
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null, omitHeader: true);
|
||||
templateContext.Errors.Check();
|
||||
|
||||
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||
|
||||
@@ -200,10 +264,10 @@ namespace GitHub.Runner.Worker
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
context.Errors.Add(ex);
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
context.Errors.Check();
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -212,17 +276,16 @@ namespace GitHub.Runner.Worker
|
||||
public string EvaluateDefaultInput(
|
||||
IExecutionContext executionContext,
|
||||
string inputName,
|
||||
TemplateToken token,
|
||||
IDictionary<string, PipelineContextData> contextData)
|
||||
TemplateToken token)
|
||||
{
|
||||
string result = "";
|
||||
if (token != null)
|
||||
{
|
||||
var context = CreateContext(executionContext, contextData);
|
||||
var templateContext = CreateTemplateContext(executionContext);
|
||||
try
|
||||
{
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(context, "input-default-context", token, 0, null, omitHeader: true);
|
||||
context.Errors.Check();
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null, omitHeader: true);
|
||||
templateContext.Errors.Check();
|
||||
|
||||
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||
|
||||
@@ -232,18 +295,18 @@ namespace GitHub.Runner.Worker
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
context.Errors.Add(ex);
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
context.Errors.Check();
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private TemplateContext CreateContext(
|
||||
private TemplateContext CreateTemplateContext(
|
||||
IExecutionContext executionContext,
|
||||
IDictionary<string, PipelineContextData> contextData)
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues = null)
|
||||
{
|
||||
var result = new TemplateContext
|
||||
{
|
||||
@@ -257,20 +320,42 @@ namespace GitHub.Runner.Worker
|
||||
TraceWriter = executionContext.ToTemplateTraceWriter(),
|
||||
};
|
||||
|
||||
if (contextData?.Count > 0)
|
||||
{
|
||||
foreach (var pair in contextData)
|
||||
// Expression values from execution context
|
||||
foreach (var pair in executionContext.ExpressionValues)
|
||||
{
|
||||
result.ExpressionValues[pair.Key] = pair.Value;
|
||||
}
|
||||
|
||||
// Extra expression values
|
||||
if (extraExpressionValues?.Count > 0)
|
||||
{
|
||||
foreach (var pair in extraExpressionValues)
|
||||
{
|
||||
result.ExpressionValues[pair.Key] = pair.Value;
|
||||
}
|
||||
}
|
||||
|
||||
// Expression functions from execution context
|
||||
foreach (var item in executionContext.ExpressionFunctions)
|
||||
{
|
||||
result.ExpressionFunctions.Add(item);
|
||||
}
|
||||
|
||||
// Add the file table from the Execution Context
|
||||
for (var i = 0; i < executionContext.Global.FileTable.Count; i++)
|
||||
{
|
||||
result.GetFileId(executionContext.Global.FileTable[i]);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private ActionExecutionData ConvertRuns(
|
||||
TemplateContext context,
|
||||
TemplateToken inputsToken)
|
||||
IExecutionContext executionContext,
|
||||
TemplateContext templateContext,
|
||||
TemplateToken inputsToken,
|
||||
String fileRelativePath,
|
||||
MappingToken outputs = null)
|
||||
{
|
||||
var runsMapping = inputsToken.AssertMapping("runs");
|
||||
var usingToken = default(StringToken);
|
||||
@@ -280,9 +365,14 @@ namespace GitHub.Runner.Worker
|
||||
var envToken = default(MappingToken);
|
||||
var mainToken = default(StringToken);
|
||||
var pluginToken = default(StringToken);
|
||||
var preToken = default(StringToken);
|
||||
var preEntrypointToken = default(StringToken);
|
||||
var preIfToken = default(StringToken);
|
||||
var postToken = default(StringToken);
|
||||
var postEntrypointToken = default(StringToken);
|
||||
var postIfToken = default(StringToken);
|
||||
var steps = default(List<Pipelines.Step>);
|
||||
|
||||
foreach (var run in runsMapping)
|
||||
{
|
||||
var runsKey = run.Key.AssertString("runs key").Value;
|
||||
@@ -318,6 +408,20 @@ namespace GitHub.Runner.Worker
|
||||
case "post-if":
|
||||
postIfToken = run.Value.AssertString("post-if");
|
||||
break;
|
||||
case "pre":
|
||||
preToken = run.Value.AssertString("pre");
|
||||
break;
|
||||
case "pre-entrypoint":
|
||||
preEntrypointToken = run.Value.AssertString("pre-entrypoint");
|
||||
break;
|
||||
case "pre-if":
|
||||
preIfToken = run.Value.AssertString("pre-if");
|
||||
break;
|
||||
case "steps":
|
||||
var stepsToken = run.Value.AssertSequence("steps");
|
||||
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
|
||||
templateContext.Errors.Check();
|
||||
break;
|
||||
default:
|
||||
Trace.Info($"Ignore run property {runsKey}.");
|
||||
break;
|
||||
@@ -330,7 +434,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
if (string.IsNullOrEmpty(imageToken?.Value))
|
||||
{
|
||||
throw new ArgumentNullException($"Image is not provided.");
|
||||
throw new ArgumentNullException($"You are using a Container Action but an image is not provided in {fileRelativePath}.");
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -340,7 +444,9 @@ namespace GitHub.Runner.Worker
|
||||
Arguments = argsToken,
|
||||
EntryPoint = entrypointToken?.Value,
|
||||
Environment = envToken,
|
||||
Cleanup = postEntrypointToken?.Value,
|
||||
Pre = preEntrypointToken?.Value,
|
||||
InitCondition = preIfToken?.Value ?? "always()",
|
||||
Post = postEntrypointToken?.Value,
|
||||
CleanupCondition = postIfToken?.Value ?? "always()"
|
||||
};
|
||||
}
|
||||
@@ -349,18 +455,35 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
if (string.IsNullOrEmpty(mainToken?.Value))
|
||||
{
|
||||
throw new ArgumentNullException($"Entry javascript fils is not provided.");
|
||||
throw new ArgumentNullException($"You are using a JavaScript Action but there is not an entry JavaScript file provided in {fileRelativePath}.");
|
||||
}
|
||||
else
|
||||
{
|
||||
return new NodeJSActionExecutionData()
|
||||
{
|
||||
Script = mainToken.Value,
|
||||
Cleanup = postToken?.Value,
|
||||
Pre = preToken?.Value,
|
||||
InitCondition = preIfToken?.Value ?? "always()",
|
||||
Post = postToken?.Value,
|
||||
CleanupCondition = postIfToken?.Value ?? "always()"
|
||||
};
|
||||
}
|
||||
}
|
||||
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (steps == null)
|
||||
{
|
||||
throw new ArgumentNullException($"You are using a composite action but there are no steps provided in {fileRelativePath}.");
|
||||
}
|
||||
else
|
||||
{
|
||||
return new CompositeActionExecutionData()
|
||||
{
|
||||
Steps = steps.Cast<Pipelines.ActionStep>().ToList(),
|
||||
Outputs = outputs
|
||||
};
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker' or 'node12' instead.");
|
||||
@@ -378,7 +501,6 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
private void ConvertInputs(
|
||||
TemplateContext context,
|
||||
TemplateToken inputsToken,
|
||||
ActionDefinitionData actionDefinition)
|
||||
{
|
||||
@@ -415,566 +537,5 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts a YAML file into a TemplateToken
|
||||
/// </summary>
|
||||
internal sealed class YamlObjectReader : IObjectReader
|
||||
{
|
||||
internal YamlObjectReader(
|
||||
Int32? fileId,
|
||||
TextReader input)
|
||||
{
|
||||
m_fileId = fileId;
|
||||
m_parser = new Parser(input);
|
||||
}
|
||||
|
||||
public Boolean AllowLiteral(out LiteralToken value)
|
||||
{
|
||||
if (EvaluateCurrent() is Scalar scalar)
|
||||
{
|
||||
// Tag specified
|
||||
if (!string.IsNullOrEmpty(scalar.Tag))
|
||||
{
|
||||
// String tag
|
||||
if (string.Equals(scalar.Tag, c_stringTag, StringComparison.Ordinal))
|
||||
{
|
||||
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Not plain style
|
||||
if (scalar.Style != ScalarStyle.Plain)
|
||||
{
|
||||
throw new NotSupportedException($"The scalar style '{scalar.Style}' on line {scalar.Start.Line} and column {scalar.Start.Column} is not valid with the tag '{scalar.Tag}'");
|
||||
}
|
||||
|
||||
// Boolean, Float, Integer, or Null
|
||||
switch (scalar.Tag)
|
||||
{
|
||||
case c_booleanTag:
|
||||
value = ParseBoolean(scalar);
|
||||
break;
|
||||
case c_floatTag:
|
||||
value = ParseFloat(scalar);
|
||||
break;
|
||||
case c_integerTag:
|
||||
value = ParseInteger(scalar);
|
||||
break;
|
||||
case c_nullTag:
|
||||
value = ParseNull(scalar);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException($"Unexpected tag '{scalar.Tag}'");
|
||||
}
|
||||
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Plain style, determine type using YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
if (scalar.Style == ScalarStyle.Plain)
|
||||
{
|
||||
if (MatchNull(scalar, out var nullToken))
|
||||
{
|
||||
value = nullToken;
|
||||
}
|
||||
else if (MatchBoolean(scalar, out var booleanToken))
|
||||
{
|
||||
value = booleanToken;
|
||||
}
|
||||
else if (MatchInteger(scalar, out var numberToken) ||
|
||||
MatchFloat(scalar, out numberToken))
|
||||
{
|
||||
value = numberToken;
|
||||
}
|
||||
else
|
||||
{
|
||||
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
|
||||
}
|
||||
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise assume string
|
||||
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowSequenceStart(out SequenceToken value)
|
||||
{
|
||||
if (EvaluateCurrent() is SequenceStart sequenceStart)
|
||||
{
|
||||
value = new SequenceToken(m_fileId, sequenceStart.Start.Line, sequenceStart.Start.Column);
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowSequenceEnd()
|
||||
{
|
||||
if (EvaluateCurrent() is SequenceEnd)
|
||||
{
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowMappingStart(out MappingToken value)
|
||||
{
|
||||
if (EvaluateCurrent() is MappingStart mappingStart)
|
||||
{
|
||||
value = new MappingToken(m_fileId, mappingStart.Start.Line, mappingStart.Start.Column);
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
public Boolean AllowMappingEnd()
|
||||
{
|
||||
if (EvaluateCurrent() is MappingEnd)
|
||||
{
|
||||
MoveNext();
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
|
||||
/// </summary>
|
||||
public void ValidateEnd()
|
||||
{
|
||||
if (EvaluateCurrent() is DocumentEnd)
|
||||
{
|
||||
MoveNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Expected document end parse event");
|
||||
}
|
||||
|
||||
if (EvaluateCurrent() is StreamEnd)
|
||||
{
|
||||
MoveNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Expected stream end parse event");
|
||||
}
|
||||
|
||||
if (MoveNext())
|
||||
{
|
||||
throw new InvalidOperationException("Expected end of parse events");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
|
||||
/// </summary>
|
||||
public void ValidateStart()
|
||||
{
|
||||
if (EvaluateCurrent() != null)
|
||||
{
|
||||
throw new InvalidOperationException("Unexpected parser state");
|
||||
}
|
||||
|
||||
if (!MoveNext())
|
||||
{
|
||||
throw new InvalidOperationException("Expected a parse event");
|
||||
}
|
||||
|
||||
if (EvaluateCurrent() is StreamStart)
|
||||
{
|
||||
MoveNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Expected stream start parse event");
|
||||
}
|
||||
|
||||
if (EvaluateCurrent() is DocumentStart)
|
||||
{
|
||||
MoveNext();
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new InvalidOperationException("Expected document start parse event");
|
||||
}
|
||||
}
|
||||
|
||||
private ParsingEvent EvaluateCurrent()
|
||||
{
|
||||
if (m_current == null)
|
||||
{
|
||||
m_current = m_parser.Current;
|
||||
if (m_current != null)
|
||||
{
|
||||
if (m_current is Scalar scalar)
|
||||
{
|
||||
// Verify not using achors
|
||||
if (scalar.Anchor != null)
|
||||
{
|
||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{scalar.Anchor}'");
|
||||
}
|
||||
}
|
||||
else if (m_current is MappingStart mappingStart)
|
||||
{
|
||||
// Verify not using achors
|
||||
if (mappingStart.Anchor != null)
|
||||
{
|
||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{mappingStart.Anchor}'");
|
||||
}
|
||||
}
|
||||
else if (m_current is SequenceStart sequenceStart)
|
||||
{
|
||||
// Verify not using achors
|
||||
if (sequenceStart.Anchor != null)
|
||||
{
|
||||
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{sequenceStart.Anchor}'");
|
||||
}
|
||||
}
|
||||
else if (!(m_current is MappingEnd) &&
|
||||
!(m_current is SequenceEnd) &&
|
||||
!(m_current is DocumentStart) &&
|
||||
!(m_current is DocumentEnd) &&
|
||||
!(m_current is StreamStart) &&
|
||||
!(m_current is StreamEnd))
|
||||
{
|
||||
throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return m_current;
|
||||
}
|
||||
|
||||
private Boolean MoveNext()
|
||||
{
|
||||
m_current = null;
|
||||
return m_parser.MoveNext();
|
||||
}
|
||||
|
||||
private BooleanToken ParseBoolean(Scalar scalar)
|
||||
{
|
||||
if (MatchBoolean(scalar, out var token))
|
||||
{
|
||||
return token;
|
||||
}
|
||||
|
||||
ThrowInvalidValue(scalar, c_booleanTag); // throws
|
||||
return default;
|
||||
}
|
||||
|
||||
private NumberToken ParseFloat(Scalar scalar)
|
||||
{
|
||||
if (MatchFloat(scalar, out var token))
|
||||
{
|
||||
return token;
|
||||
}
|
||||
|
||||
ThrowInvalidValue(scalar, c_floatTag); // throws
|
||||
return default;
|
||||
}
|
||||
|
||||
private NumberToken ParseInteger(Scalar scalar)
|
||||
{
|
||||
if (MatchInteger(scalar, out var token))
|
||||
{
|
||||
return token;
|
||||
}
|
||||
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
return default;
|
||||
}
|
||||
|
||||
private NullToken ParseNull(Scalar scalar)
|
||||
{
|
||||
if (MatchNull(scalar, out var token))
|
||||
{
|
||||
return token;
|
||||
}
|
||||
|
||||
ThrowInvalidValue(scalar, c_nullTag); // throws
|
||||
return default;
|
||||
}
|
||||
|
||||
private Boolean MatchBoolean(
|
||||
Scalar scalar,
|
||||
out BooleanToken value)
|
||||
{
|
||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
switch (scalar.Value ?? string.Empty)
|
||||
{
|
||||
case "true":
|
||||
case "True":
|
||||
case "TRUE":
|
||||
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, true);
|
||||
return true;
|
||||
case "false":
|
||||
case "False":
|
||||
case "FALSE":
|
||||
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, false);
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private Boolean MatchFloat(
|
||||
Scalar scalar,
|
||||
out NumberToken value)
|
||||
{
|
||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
var str = scalar.Value;
|
||||
if (!string.IsNullOrEmpty(str))
|
||||
{
|
||||
// Check for [-+]?(\.inf|\.Inf|\.INF)|\.nan|\.NaN|\.NAN
|
||||
switch (str)
|
||||
{
|
||||
case ".inf":
|
||||
case ".Inf":
|
||||
case ".INF":
|
||||
case "+.inf":
|
||||
case "+.Inf":
|
||||
case "+.INF":
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.PositiveInfinity);
|
||||
return true;
|
||||
case "-.inf":
|
||||
case "-.Inf":
|
||||
case "-.INF":
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NegativeInfinity);
|
||||
return true;
|
||||
case ".nan":
|
||||
case ".NaN":
|
||||
case ".NAN":
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NaN);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?
|
||||
|
||||
// Skip leading sign
|
||||
var index = str[0] == '-' || str[0] == '+' ? 1 : 0;
|
||||
|
||||
// Check for integer portion
|
||||
var length = str.Length;
|
||||
var hasInteger = false;
|
||||
while (index < length && str[index] >= '0' && str[index] <= '9')
|
||||
{
|
||||
hasInteger = true;
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for decimal point
|
||||
var hasDot = false;
|
||||
if (index < length && str[index] == '.')
|
||||
{
|
||||
hasDot = true;
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for decimal portion
|
||||
var hasDecimal = false;
|
||||
while (index < length && str[index] >= '0' && str[index] <= '9')
|
||||
{
|
||||
hasDecimal = true;
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)
|
||||
if ((hasDot && hasDecimal) || hasInteger)
|
||||
{
|
||||
// Check for end
|
||||
if (index == length)
|
||||
{
|
||||
// Try parse
|
||||
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var doubleValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
|
||||
return true;
|
||||
}
|
||||
// Otherwise exceeds range
|
||||
else
|
||||
{
|
||||
ThrowInvalidValue(scalar, c_floatTag); // throws
|
||||
}
|
||||
}
|
||||
// Check [eE][-+]?[0-9]
|
||||
else if (index < length && (str[index] == 'e' || str[index] == 'E'))
|
||||
{
|
||||
index++;
|
||||
|
||||
// Skip sign
|
||||
if (index < length && (str[index] == '-' || str[index] == '+'))
|
||||
{
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for exponent
|
||||
var hasExponent = false;
|
||||
while (index < length && str[index] >= '0' && str[index] <= '9')
|
||||
{
|
||||
hasExponent = true;
|
||||
index++;
|
||||
}
|
||||
|
||||
// Check for end
|
||||
if (hasExponent && index == length)
|
||||
{
|
||||
// Try parse
|
||||
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var doubleValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, (Double)doubleValue);
|
||||
return true;
|
||||
}
|
||||
// Otherwise exceeds range
|
||||
else
|
||||
{
|
||||
ThrowInvalidValue(scalar, c_floatTag); // throws
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private Boolean MatchInteger(
|
||||
Scalar scalar,
|
||||
out NumberToken value)
|
||||
{
|
||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
var str = scalar.Value;
|
||||
if (!string.IsNullOrEmpty(str))
|
||||
{
|
||||
// Check for [0-9]+
|
||||
var firstChar = str[0];
|
||||
if (firstChar >= '0' && firstChar <= '9' &&
|
||||
str.Skip(1).All(x => x >= '0' && x <= '9'))
|
||||
{
|
||||
// Try parse
|
||||
if (Double.TryParse(str, NumberStyles.None, CultureInfo.InvariantCulture, out var doubleValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise exceeds range
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
}
|
||||
// Check for (-|+)[0-9]+
|
||||
else if ((firstChar == '-' || firstChar == '+') &&
|
||||
str.Length > 1 &&
|
||||
str.Skip(1).All(x => x >= '0' && x <= '9'))
|
||||
{
|
||||
// Try parse
|
||||
if (Double.TryParse(str, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out var doubleValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise exceeds range
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
}
|
||||
// Check for 0x[0-9a-fA-F]+
|
||||
else if (firstChar == '0' &&
|
||||
str.Length > 2 &&
|
||||
str[1] == 'x' &&
|
||||
str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F')))
|
||||
{
|
||||
// Try parse
|
||||
if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integerValue))
|
||||
{
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise exceeds range
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
}
|
||||
// Check for 0o[0-9]+
|
||||
else if (firstChar == '0' &&
|
||||
str.Length > 2 &&
|
||||
str[1] == 'o' &&
|
||||
str.Skip(2).All(x => x >= '0' && x <= '7'))
|
||||
{
|
||||
// Try parse
|
||||
var integerValue = default(Int32);
|
||||
try
|
||||
{
|
||||
integerValue = Convert.ToInt32(str.Substring(2), 8);
|
||||
}
|
||||
// Otherwise exceeds range
|
||||
catch (Exception)
|
||||
{
|
||||
ThrowInvalidValue(scalar, c_integerTag); // throws
|
||||
}
|
||||
|
||||
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private Boolean MatchNull(
|
||||
Scalar scalar,
|
||||
out NullToken value)
|
||||
{
|
||||
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
|
||||
switch (scalar.Value ?? string.Empty)
|
||||
{
|
||||
case "":
|
||||
case "null":
|
||||
case "Null":
|
||||
case "NULL":
|
||||
case "~":
|
||||
value = new NullToken(m_fileId, scalar.Start.Line, scalar.Start.Column);
|
||||
return true;
|
||||
}
|
||||
|
||||
value = default;
|
||||
return false;
|
||||
}
|
||||
|
||||
private void ThrowInvalidValue(
|
||||
Scalar scalar,
|
||||
String tag)
|
||||
{
|
||||
throw new NotSupportedException($"The value '{scalar.Value}' on line {scalar.Start.Line} and column {scalar.Start.Column} is invalid for the type '{scalar.Tag}'");
|
||||
}
|
||||
|
||||
private const String c_booleanTag = "tag:yaml.org,2002:bool";
|
||||
private const String c_floatTag = "tag:yaml.org,2002:float";
|
||||
private const String c_integerTag = "tag:yaml.org,2002:int";
|
||||
private const String c_nullTag = "tag:yaml.org,2002:null";
|
||||
private const String c_stringTag = "tag:yaml.org,2002:string";
|
||||
private readonly Int32? m_fileId;
|
||||
private readonly Parser m_parser;
|
||||
private ParsingEvent m_current;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
33
src/Runner.Worker/ActionNotFoundException.cs
Normal file
33
src/Runner.Worker/ActionNotFoundException.cs
Normal file
@@ -0,0 +1,33 @@
|
||||
using System;
|
||||
using System.Runtime.Serialization;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
public class ActionNotFoundException : Exception
|
||||
{
|
||||
public ActionNotFoundException(Uri actionUri)
|
||||
: base(FormatMessage(actionUri))
|
||||
{
|
||||
}
|
||||
|
||||
public ActionNotFoundException(string message)
|
||||
: base(message)
|
||||
{
|
||||
}
|
||||
|
||||
public ActionNotFoundException(string message, System.Exception inner)
|
||||
: base(message, inner)
|
||||
{
|
||||
}
|
||||
|
||||
protected ActionNotFoundException(SerializationInfo info, StreamingContext context)
|
||||
: base(info, context)
|
||||
{
|
||||
}
|
||||
|
||||
private static string FormatMessage(Uri actionUri)
|
||||
{
|
||||
return $"An action could not be found at the URI '{actionUri}'";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -18,6 +18,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
public enum ActionRunStage
|
||||
{
|
||||
Pre,
|
||||
Main,
|
||||
Post,
|
||||
}
|
||||
@@ -26,7 +27,7 @@ namespace GitHub.Runner.Worker
|
||||
public interface IActionRunner : IStep, IRunnerService
|
||||
{
|
||||
ActionRunStage Stage { get; set; }
|
||||
Boolean TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context);
|
||||
bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context);
|
||||
Pipelines.ActionStep Action { get; set; }
|
||||
}
|
||||
|
||||
@@ -81,20 +82,25 @@ namespace GitHub.Runner.Worker
|
||||
ActionExecutionData handlerData = definition.Data?.Execution;
|
||||
ArgUtil.NotNull(handlerData, nameof(handlerData));
|
||||
|
||||
// The action has post cleanup defined.
|
||||
// we need to create timeline record for them and add them to the step list that StepRunner is using
|
||||
if (handlerData.HasCleanup && Stage == ActionRunStage.Main)
|
||||
if (handlerData.HasPre &&
|
||||
Action.Reference is Pipelines.RepositoryPathReference repoAction &&
|
||||
string.Equals(repoAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
string postDisplayName = null;
|
||||
if (this.DisplayName.StartsWith(PipelineTemplateConstants.RunDisplayPrefix))
|
||||
{
|
||||
postDisplayName = $"Post {this.DisplayName.Substring(PipelineTemplateConstants.RunDisplayPrefix.Length)}";
|
||||
}
|
||||
else
|
||||
{
|
||||
postDisplayName = $"Post {this.DisplayName}";
|
||||
ExecutionContext.Warning($"`pre` execution is not supported for local action from '{repoAction.Path}'");
|
||||
}
|
||||
|
||||
// The action has post cleanup defined.
|
||||
// we need to create timeline record for them and add them to the step list that StepRunner is using
|
||||
if (handlerData.HasPost && (Stage == ActionRunStage.Pre || Stage == ActionRunStage.Main))
|
||||
{
|
||||
string postDisplayName = $"Post {this.DisplayName}";
|
||||
if (Stage == ActionRunStage.Pre &&
|
||||
this.DisplayName.StartsWith("Pre ", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// Trim the leading `Pre ` from the display name.
|
||||
// Otherwise, we will get `Post Pre xxx` as DisplayName for the Post step.
|
||||
postDisplayName = $"Post {this.DisplayName.Substring("Pre ".Length)}";
|
||||
}
|
||||
var repositoryReference = Action.Reference as RepositoryPathReference;
|
||||
var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}";
|
||||
var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" :
|
||||
@@ -108,7 +114,7 @@ namespace GitHub.Runner.Worker
|
||||
actionRunner.Condition = handlerData.CleanupCondition;
|
||||
actionRunner.DisplayName = postDisplayName;
|
||||
|
||||
ExecutionContext.RegisterPostJobStep($"{actionRunner.Action.Name}_post", actionRunner);
|
||||
ExecutionContext.RegisterPostJobStep(actionRunner);
|
||||
}
|
||||
|
||||
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
|
||||
@@ -129,25 +135,42 @@ namespace GitHub.Runner.Worker
|
||||
ExecutionContext.SetGitHubContext("event_path", workflowFile);
|
||||
}
|
||||
|
||||
// Set GITHUB_ACTION_REPOSITORY if this Action is from a repository
|
||||
if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction &&
|
||||
!string.Equals(repoPathReferenceAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
ExecutionContext.SetGitHubContext("action_repository", repoPathReferenceAction.Name);
|
||||
ExecutionContext.SetGitHubContext("action_ref", repoPathReferenceAction.Ref);
|
||||
}
|
||||
else
|
||||
{
|
||||
ExecutionContext.SetGitHubContext("action_repository", null);
|
||||
ExecutionContext.SetGitHubContext("action_ref", null);
|
||||
}
|
||||
|
||||
// Setup container stephost for running inside the container.
|
||||
if (ExecutionContext.Container != null)
|
||||
if (ExecutionContext.Global.Container != null)
|
||||
{
|
||||
// Make sure required container is already created.
|
||||
ArgUtil.NotNullOrEmpty(ExecutionContext.Container.ContainerId, nameof(ExecutionContext.Container.ContainerId));
|
||||
ArgUtil.NotNullOrEmpty(ExecutionContext.Global.Container.ContainerId, nameof(ExecutionContext.Global.Container.ContainerId));
|
||||
var containerStepHost = HostContext.CreateService<IContainerStepHost>();
|
||||
containerStepHost.Container = ExecutionContext.Container;
|
||||
containerStepHost.Container = ExecutionContext.Global.Container;
|
||||
stepHost = containerStepHost;
|
||||
}
|
||||
|
||||
// Setup File Command Manager
|
||||
var fileCommandManager = HostContext.CreateService<IFileCommandManager>();
|
||||
fileCommandManager.InitializeFiles(ExecutionContext, null);
|
||||
|
||||
// Load the inputs.
|
||||
ExecutionContext.Debug("Loading inputs");
|
||||
var templateTrace = ExecutionContext.ToTemplateTraceWriter();
|
||||
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
|
||||
var templateEvaluator = new PipelineTemplateEvaluator(templateTrace, schema);
|
||||
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues);
|
||||
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
|
||||
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues, ExecutionContext.ExpressionFunctions);
|
||||
|
||||
var userInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (KeyValuePair<string, string> input in inputs)
|
||||
{
|
||||
userInputs.Add(input.Key);
|
||||
string message = "";
|
||||
if (definition.Data?.Deprecated?.TryGetValue(input.Key, out message) == true)
|
||||
{
|
||||
@@ -155,24 +178,45 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
var validInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (handlerData.ExecutionType == ActionExecutionType.Container)
|
||||
{
|
||||
// container action always accept 'entryPoint' and 'args' as inputs
|
||||
// https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepswithargs
|
||||
validInputs.Add("entryPoint");
|
||||
validInputs.Add("args");
|
||||
}
|
||||
// Merge the default inputs from the definition
|
||||
if (definition.Data?.Inputs != null)
|
||||
{
|
||||
var manifestManager = HostContext.GetService<IActionManifestManager>();
|
||||
foreach (var input in (definition.Data?.Inputs))
|
||||
foreach (var input in definition.Data.Inputs)
|
||||
{
|
||||
string key = input.Key.AssertString("action input name").Value;
|
||||
validInputs.Add(key);
|
||||
if (!inputs.ContainsKey(key))
|
||||
{
|
||||
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var data in ExecutionContext.ExpressionValues)
|
||||
{
|
||||
evaluateContext[data.Key] = data.Value;
|
||||
inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value, evaluateContext);
|
||||
// Validate inputs only for actions with action.yml
|
||||
if (Action.Reference.Type == Pipelines.ActionSourceType.Repository)
|
||||
{
|
||||
var unexpectedInputs = new List<string>();
|
||||
foreach (var input in userInputs)
|
||||
{
|
||||
if (!validInputs.Contains(input))
|
||||
{
|
||||
unexpectedInputs.Add(input);
|
||||
}
|
||||
}
|
||||
|
||||
if (unexpectedInputs.Count > 0)
|
||||
{
|
||||
ExecutionContext.Warning($"Unexpected input(s) '{string.Join("', '", unexpectedInputs)}', valid inputs are ['{string.Join("', '", validInputs)}']");
|
||||
}
|
||||
}
|
||||
|
||||
// Load the action environment.
|
||||
@@ -204,15 +248,23 @@ namespace GitHub.Runner.Worker
|
||||
handlerData,
|
||||
inputs,
|
||||
environment,
|
||||
ExecutionContext.Variables,
|
||||
ExecutionContext.Global.Variables,
|
||||
actionDirectory: definition.Directory);
|
||||
|
||||
// Print out action details
|
||||
handler.PrintActionDetails(Stage);
|
||||
|
||||
// Run the task.
|
||||
try
|
||||
{
|
||||
await handler.RunAsync(Stage);
|
||||
}
|
||||
finally
|
||||
{
|
||||
fileCommandManager.ProcessFiles(ExecutionContext, ExecutionContext.Global.Container);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context)
|
||||
{
|
||||
@@ -295,11 +347,14 @@ namespace GitHub.Runner.Worker
|
||||
return displayName;
|
||||
}
|
||||
// Try evaluating fully
|
||||
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
|
||||
var templateEvaluator = new PipelineTemplateEvaluator(context.ToTemplateTraceWriter(), schema);
|
||||
try
|
||||
{
|
||||
didFullyEvaluate = templateEvaluator.TryEvaluateStepDisplayName(tokenToParse, contextData, out displayName);
|
||||
if (tokenToParse.CheckHasRequiredContext(contextData, context.ExpressionFunctions))
|
||||
{
|
||||
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||
displayName = templateEvaluator.EvaluateStepDisplayName(tokenToParse, contextData, context.ExpressionFunctions);
|
||||
didFullyEvaluate = true;
|
||||
}
|
||||
}
|
||||
catch (TemplateValidationException e)
|
||||
{
|
||||
|
||||
@@ -21,6 +21,11 @@ namespace GitHub.Runner.Worker.Container
|
||||
{
|
||||
}
|
||||
|
||||
public ContainerInfo(IHostContext hostContext)
|
||||
{
|
||||
UpdateWebProxyEnv(hostContext.WebProxy);
|
||||
}
|
||||
|
||||
public ContainerInfo(IHostContext hostContext, Pipelines.JobContainer container, bool isJobContainer = true, string networkAlias = null)
|
||||
{
|
||||
this.ContainerName = container.Alias;
|
||||
@@ -34,6 +39,9 @@ namespace GitHub.Runner.Worker.Container
|
||||
_environmentVariables = container.Environment;
|
||||
this.IsJobContainer = isJobContainer;
|
||||
this.ContainerNetworkAlias = networkAlias;
|
||||
this.RegistryAuthUsername = container.Credentials?.Username;
|
||||
this.RegistryAuthPassword = container.Credentials?.Password;
|
||||
this.RegistryServer = DockerUtil.ParseRegistryHostnameFromImageName(this.ContainerImage);
|
||||
|
||||
#if OS_WINDOWS
|
||||
_pathMappings.Add(new PathMapping(hostContext.GetDirectory(WellKnownDirectory.Work), "C:\\__w"));
|
||||
@@ -61,6 +69,7 @@ namespace GitHub.Runner.Worker.Container
|
||||
foreach (var volume in container.Volumes)
|
||||
{
|
||||
UserMountVolumes[volume] = volume;
|
||||
MountVolumes.Add(new MountVolume(volume));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,6 +87,9 @@ namespace GitHub.Runner.Worker.Container
|
||||
public string ContainerWorkDirectory { get; set; }
|
||||
public string ContainerCreateOptions { get; private set; }
|
||||
public string ContainerRuntimePath { get; set; }
|
||||
public string RegistryServer { get; set; }
|
||||
public string RegistryAuthUsername { get; set; }
|
||||
public string RegistryAuthPassword { get; set; }
|
||||
public bool IsJobContainer { get; set; }
|
||||
|
||||
public IDictionary<string, string> ContainerEnvironmentVariables
|
||||
|
||||
@@ -4,6 +4,7 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading;
|
||||
using System.Threading.Channels;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
@@ -17,7 +18,8 @@ namespace GitHub.Runner.Worker.Container
|
||||
string DockerInstanceLabel { get; }
|
||||
Task<DockerVersion> DockerVersion(IExecutionContext context);
|
||||
Task<int> DockerPull(IExecutionContext context, string image);
|
||||
Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string tag);
|
||||
Task<int> DockerPull(IExecutionContext context, string image, string configFileDirectory);
|
||||
Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag);
|
||||
Task<string> DockerCreate(IExecutionContext context, ContainerInfo container);
|
||||
Task<int> DockerRun(IExecutionContext context, ContainerInfo container, EventHandler<ProcessDataReceivedEventArgs> stdoutDataReceived, EventHandler<ProcessDataReceivedEventArgs> stderrDataReceived);
|
||||
Task<int> DockerStart(IExecutionContext context, string containerId);
|
||||
@@ -31,6 +33,7 @@ namespace GitHub.Runner.Worker.Container
|
||||
Task<int> DockerExec(IExecutionContext context, string containerId, string options, string command, List<string> outputs);
|
||||
Task<List<string>> DockerInspect(IExecutionContext context, string dockerObject, string options);
|
||||
Task<List<PortMapping>> DockerPort(IExecutionContext context, string containerId);
|
||||
Task<int> DockerLogin(IExecutionContext context, string configFileDirectory, string registry, string username, string password);
|
||||
}
|
||||
|
||||
public class DockerCommandManager : RunnerService, IDockerCommandManager
|
||||
@@ -82,14 +85,23 @@ namespace GitHub.Runner.Worker.Container
|
||||
return new DockerVersion(serverVersion, clientVersion);
|
||||
}
|
||||
|
||||
public async Task<int> DockerPull(IExecutionContext context, string image)
|
||||
public Task<int> DockerPull(IExecutionContext context, string image)
|
||||
{
|
||||
return await ExecuteDockerCommandAsync(context, "pull", image, context.CancellationToken);
|
||||
return DockerPull(context, image, null);
|
||||
}
|
||||
|
||||
public async Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string tag)
|
||||
public async Task<int> DockerPull(IExecutionContext context, string image, string configFileDirectory)
|
||||
{
|
||||
return await ExecuteDockerCommandAsync(context, "build", $"-t {tag} \"{dockerFile}\"", workingDirectory, context.CancellationToken);
|
||||
if (string.IsNullOrEmpty(configFileDirectory))
|
||||
{
|
||||
return await ExecuteDockerCommandAsync(context, $"pull", image, context.CancellationToken);
|
||||
}
|
||||
return await ExecuteDockerCommandAsync(context, $"--config {configFileDirectory} pull", image, context.CancellationToken);
|
||||
}
|
||||
|
||||
public async Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag)
|
||||
{
|
||||
return await ExecuteDockerCommandAsync(context, "build", $"-t {tag} -f \"{dockerFile}\" \"{dockerContext}\"", workingDirectory, context.CancellationToken);
|
||||
}
|
||||
|
||||
public async Task<string> DockerCreate(IExecutionContext context, ContainerInfo container)
|
||||
@@ -130,6 +142,13 @@ namespace GitHub.Runner.Worker.Container
|
||||
// Watermark for GitHub Action environment
|
||||
dockerOptions.Add("-e GITHUB_ACTIONS=true");
|
||||
|
||||
// Set CI=true when no one else already set it.
|
||||
// CI=true is common set in most CI provider in GitHub
|
||||
if (!container.ContainerEnvironmentVariables.ContainsKey("CI"))
|
||||
{
|
||||
dockerOptions.Add("-e CI=true");
|
||||
}
|
||||
|
||||
foreach (var volume in container.MountVolumes)
|
||||
{
|
||||
// replace `"` with `\"` and add `"{0}"` to all path.
|
||||
@@ -189,6 +208,13 @@ namespace GitHub.Runner.Worker.Container
|
||||
// Watermark for GitHub Action environment
|
||||
dockerOptions.Add("-e GITHUB_ACTIONS=true");
|
||||
|
||||
// Set CI=true when no one else already set it.
|
||||
// CI=true is common set in most CI provider in GitHub
|
||||
if (!container.ContainerEnvironmentVariables.ContainsKey("CI"))
|
||||
{
|
||||
dockerOptions.Add("-e CI=true");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(container.ContainerEntryPoint))
|
||||
{
|
||||
dockerOptions.Add($"--entrypoint \"{container.ContainerEntryPoint}\"");
|
||||
@@ -332,6 +358,28 @@ namespace GitHub.Runner.Worker.Container
|
||||
return DockerUtil.ParseDockerPort(portMappingLines);
|
||||
}
|
||||
|
||||
public Task<int> DockerLogin(IExecutionContext context, string configFileDirectory, string registry, string username, string password)
|
||||
{
|
||||
string args = $"--config {configFileDirectory} login {registry} -u {username} --password-stdin";
|
||||
context.Command($"{DockerPath} {args}");
|
||||
|
||||
var input = Channel.CreateBounded<string>(new BoundedChannelOptions(1) { SingleReader = true, SingleWriter = true });
|
||||
input.Writer.TryWrite(password);
|
||||
|
||||
var processInvoker = HostContext.CreateService<IProcessInvoker>();
|
||||
|
||||
return processInvoker.ExecuteAsync(
|
||||
workingDirectory: context.GetGitHubContext("workspace"),
|
||||
fileName: DockerPath,
|
||||
arguments: args,
|
||||
environment: null,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: false,
|
||||
redirectStandardIn: input,
|
||||
cancellationToken: context.CancellationToken);
|
||||
}
|
||||
|
||||
private Task<int> ExecuteDockerCommandAsync(IExecutionContext context, string command, string options, CancellationToken cancellationToken = default(CancellationToken))
|
||||
{
|
||||
return ExecuteDockerCommandAsync(context, command, options, null, cancellationToken);
|
||||
|
||||
@@ -45,5 +45,21 @@ namespace GitHub.Runner.Worker.Container
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
public static string ParseRegistryHostnameFromImageName(string name)
|
||||
{
|
||||
var nameSplit = name.Split('/');
|
||||
// Single slash is implictly from Dockerhub, unless first part has .tld or :port
|
||||
if (nameSplit.Length == 2 && (nameSplit[0].Contains(":") || nameSplit[0].Contains(".")))
|
||||
{
|
||||
return nameSplit[0];
|
||||
}
|
||||
// All other non Dockerhub registries
|
||||
else if (nameSplit.Length > 2)
|
||||
{
|
||||
return nameSplit[0];
|
||||
}
|
||||
return "";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,7 +49,7 @@ namespace GitHub.Runner.Worker
|
||||
data: data);
|
||||
|
||||
executionContext.Debug($"Register post job cleanup for stopping/deleting containers.");
|
||||
executionContext.RegisterPostJobStep(nameof(StopContainersAsync), postJobStep);
|
||||
executionContext.RegisterPostJobStep(postJobStep);
|
||||
|
||||
// Check whether we are inside a container.
|
||||
// Our container feature requires to map working directory from host to the container.
|
||||
@@ -91,7 +91,10 @@ namespace GitHub.Runner.Worker
|
||||
#endif
|
||||
|
||||
// Check docker client/server version
|
||||
executionContext.Output("##[group]Checking docker version");
|
||||
DockerVersion dockerVersion = await _dockerManger.DockerVersion(executionContext);
|
||||
executionContext.Output("##[endgroup]");
|
||||
|
||||
ArgUtil.NotNull(dockerVersion.ServerVersion, nameof(dockerVersion.ServerVersion));
|
||||
ArgUtil.NotNull(dockerVersion.ClientVersion, nameof(dockerVersion.ClientVersion));
|
||||
|
||||
@@ -111,7 +114,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
// Clean up containers left by previous runs
|
||||
executionContext.Debug($"Delete stale containers from previous jobs");
|
||||
executionContext.Output("##[group]Clean up resources from previous jobs");
|
||||
var staleContainers = await _dockerManger.DockerPS(executionContext, $"--all --quiet --no-trunc --filter \"label={_dockerManger.DockerInstanceLabel}\"");
|
||||
foreach (var staleContainer in staleContainers)
|
||||
{
|
||||
@@ -122,18 +125,20 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
executionContext.Debug($"Delete stale container networks from previous jobs");
|
||||
int networkPruneExitCode = await _dockerManger.DockerNetworkPrune(executionContext);
|
||||
if (networkPruneExitCode != 0)
|
||||
{
|
||||
executionContext.Warning($"Delete stale container networks failed, docker network prune fail with exit code {networkPruneExitCode}");
|
||||
}
|
||||
executionContext.Output("##[endgroup]");
|
||||
|
||||
// Create local docker network for this job to avoid port conflict when multiple runners run on same machine.
|
||||
// All containers within a job join the same network
|
||||
executionContext.Output("##[group]Create local container network");
|
||||
var containerNetwork = $"github_network_{Guid.NewGuid().ToString("N")}";
|
||||
await CreateContainerNetworkAsync(executionContext, containerNetwork);
|
||||
executionContext.JobContext.Container["network"] = new StringContextData(containerNetwork);
|
||||
executionContext.Output("##[endgroup]");
|
||||
|
||||
foreach (var container in containers)
|
||||
{
|
||||
@@ -141,10 +146,12 @@ namespace GitHub.Runner.Worker
|
||||
await StartContainerAsync(executionContext, container);
|
||||
}
|
||||
|
||||
executionContext.Output("##[group]Waiting for all services to be ready");
|
||||
foreach (var container in containers.Where(c => !c.IsJobContainer))
|
||||
{
|
||||
await ContainerHealthcheck(executionContext, container);
|
||||
}
|
||||
executionContext.Output("##[endgroup]");
|
||||
}
|
||||
|
||||
public async Task StopContainersAsync(IExecutionContext executionContext, object data)
|
||||
@@ -173,6 +180,10 @@ namespace GitHub.Runner.Worker
|
||||
Trace.Info($"Container name: {container.ContainerName}");
|
||||
Trace.Info($"Container image: {container.ContainerImage}");
|
||||
Trace.Info($"Container options: {container.ContainerCreateOptions}");
|
||||
|
||||
var groupName = container.IsJobContainer ? "Starting job container" : $"Starting {container.ContainerNetworkAlias} service container";
|
||||
executionContext.Output($"##[group]{groupName}");
|
||||
|
||||
foreach (var port in container.UserPortMappings)
|
||||
{
|
||||
Trace.Info($"User provided port: {port.Value}");
|
||||
@@ -180,14 +191,25 @@ namespace GitHub.Runner.Worker
|
||||
foreach (var volume in container.UserMountVolumes)
|
||||
{
|
||||
Trace.Info($"User provided volume: {volume.Value}");
|
||||
var mount = new MountVolume(volume.Value);
|
||||
if (string.Equals(mount.SourceVolumePath, "/", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
executionContext.Warning($"Volume mount {volume.Value} is going to mount '/' into the container which may cause file ownership change in the entire file system and cause Actions Runner to lose permission to access the disk.");
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Add at a later date. This currently no local package registry to test with
|
||||
// UpdateRegistryAuthForGitHubToken(executionContext, container);
|
||||
|
||||
// Before pulling, generate client authentication if required
|
||||
var configLocation = await ContainerRegistryLogin(executionContext, container);
|
||||
|
||||
// Pull down docker image with retry up to 3 times
|
||||
int retryCount = 0;
|
||||
int pullExitCode = 0;
|
||||
while (retryCount < 3)
|
||||
{
|
||||
pullExitCode = await _dockerManger.DockerPull(executionContext, container.ContainerImage);
|
||||
pullExitCode = await _dockerManger.DockerPull(executionContext, container.ContainerImage, configLocation);
|
||||
if (pullExitCode == 0)
|
||||
{
|
||||
break;
|
||||
@@ -204,6 +226,9 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// Remove credentials after pulling
|
||||
ContainerRegistryLogout(configLocation);
|
||||
|
||||
if (retryCount == 3 && pullExitCode != 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Docker pull failed with exit code {pullExitCode}");
|
||||
@@ -299,6 +324,7 @@ namespace GitHub.Runner.Worker
|
||||
container.ContainerRuntimePath = DockerUtil.ParsePathFromConfigEnv(containerEnv);
|
||||
executionContext.JobContext.Container["id"] = new StringContextData(container.ContainerId);
|
||||
}
|
||||
executionContext.Output("##[endgroup]");
|
||||
}
|
||||
|
||||
private async Task StopContainerAsync(IExecutionContext executionContext, ContainerInfo container)
|
||||
@@ -420,5 +446,83 @@ namespace GitHub.Runner.Worker
|
||||
throw new InvalidOperationException($"Failed to initialize, {container.ContainerNetworkAlias} service is {serviceHealth}.");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<string> ContainerRegistryLogin(IExecutionContext executionContext, ContainerInfo container)
|
||||
{
|
||||
if (string.IsNullOrEmpty(container.RegistryAuthUsername) || string.IsNullOrEmpty(container.RegistryAuthPassword))
|
||||
{
|
||||
// No valid client config can be generated
|
||||
return "";
|
||||
}
|
||||
var configLocation = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Temp), $".docker_{Guid.NewGuid()}");
|
||||
try
|
||||
{
|
||||
var dirInfo = Directory.CreateDirectory(configLocation);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to create directory to store registry client credentials: {e.Message}");
|
||||
}
|
||||
var loginExitCode = await _dockerManger.DockerLogin(
|
||||
executionContext,
|
||||
configLocation,
|
||||
container.RegistryServer,
|
||||
container.RegistryAuthUsername,
|
||||
container.RegistryAuthPassword);
|
||||
|
||||
if (loginExitCode != 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Docker login for '{container.RegistryServer}' failed with exit code {loginExitCode}");
|
||||
}
|
||||
return configLocation;
|
||||
}
|
||||
|
||||
private void ContainerRegistryLogout(string configLocation)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!string.IsNullOrEmpty(configLocation) && Directory.Exists(configLocation))
|
||||
{
|
||||
Directory.Delete(configLocation, recursive: true);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to remove directory containing Docker client credentials: {e.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
private void UpdateRegistryAuthForGitHubToken(IExecutionContext executionContext, ContainerInfo container)
|
||||
{
|
||||
var registryIsTokenCompatible = container.RegistryServer.Equals("docker.pkg.github.com", StringComparison.OrdinalIgnoreCase);
|
||||
if (!registryIsTokenCompatible)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var registryMatchesWorkflow = false;
|
||||
|
||||
// REGISTRY/OWNER/REPO/IMAGE[:TAG]
|
||||
var imageParts = container.ContainerImage.Split('/');
|
||||
if (imageParts.Length != 4)
|
||||
{
|
||||
executionContext.Warning($"Could not identify owner and repo for container image {container.ContainerImage}. Skipping automatic token auth");
|
||||
return;
|
||||
}
|
||||
var owner = imageParts[1];
|
||||
var repo = imageParts[2];
|
||||
var nwo = $"{owner}/{repo}";
|
||||
if (nwo.Equals(executionContext.GetGitHubContext("repository"), StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
registryMatchesWorkflow = true;
|
||||
}
|
||||
|
||||
var registryCredentialsNotSupplied = string.IsNullOrEmpty(container.RegistryAuthUsername) && string.IsNullOrEmpty(container.RegistryAuthPassword);
|
||||
if (registryCredentialsNotSupplied && registryMatchesWorkflow)
|
||||
{
|
||||
container.RegistryAuthUsername = executionContext.GetGitHubContext("actor");
|
||||
container.RegistryAuthPassword = executionContext.GetGitHubContext("token");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,9 +86,9 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
executionContext.Debug("Zipping diagnostic files.");
|
||||
|
||||
string buildNumber = executionContext.Variables.Build_Number ?? "UnknownBuildNumber";
|
||||
string buildNumber = executionContext.Global.Variables.Build_Number ?? "UnknownBuildNumber";
|
||||
string buildName = $"Build {buildNumber}";
|
||||
string phaseName = executionContext.Variables.System_PhaseDisplayName ?? "UnknownPhaseName";
|
||||
string phaseName = executionContext.Global.Variables.System_PhaseDisplayName ?? "UnknownPhaseName";
|
||||
|
||||
// zip the files
|
||||
string diagnosticsZipFileName = $"{buildName}-{phaseName}.zip";
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user