mirror of
https://github.com/actions/runner.git
synced 2025-12-10 12:36:23 +00:00
Compare commits
120 Commits
v2.285.3
...
users/tihu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c19d9869df | ||
|
|
ad819dcda7 | ||
|
|
50ff391290 | ||
|
|
91755dee3f | ||
|
|
5aa2eff74f | ||
|
|
349e53a4a6 | ||
|
|
6e04a2f31c | ||
|
|
69d4341e6c | ||
|
|
6d8da4382c | ||
|
|
0d344bfcbe | ||
|
|
7804e1f478 | ||
|
|
94fbbcb902 | ||
|
|
85e7732b48 | ||
|
|
d72e7284c3 | ||
|
|
f15ea3f9f5 | ||
|
|
01be856eff | ||
|
|
1ef8ea7a83 | ||
|
|
173497268d | ||
|
|
b7acd0c99a | ||
|
|
ff55930ef3 | ||
|
|
be598f1e9b | ||
|
|
3e33b4c5f2 | ||
|
|
81810c635a | ||
|
|
d20a5409d6 | ||
|
|
29eae8d616 | ||
|
|
4ec52d2693 | ||
|
|
7867655321 | ||
|
|
26a794b84e | ||
|
|
1c9d2bb8c7 | ||
|
|
f811eb606d | ||
|
|
1028b93325 | ||
|
|
987e6ea26b | ||
|
|
e1acc1b30a | ||
|
|
994483abd2 | ||
|
|
56d74472db | ||
|
|
85c2303669 | ||
|
|
fffde09794 | ||
|
|
27563fb634 | ||
|
|
05f579c6b9 | ||
|
|
9337d7c91f | ||
|
|
a57f90280f | ||
|
|
2b83621809 | ||
|
|
55a774ccae | ||
|
|
5bc3e957a0 | ||
|
|
3aa819753f | ||
|
|
2e7fa13380 | ||
|
|
830575dafe | ||
|
|
9a41ec3d6e | ||
|
|
c7b6b9eeca | ||
|
|
1c47d81cba | ||
|
|
ec6e73c496 | ||
|
|
aac4c94521 | ||
|
|
0b33ec52e3 | ||
|
|
0ba7a48fcd | ||
|
|
0c8da9557e | ||
|
|
41d2a8698f | ||
|
|
8ba748b104 | ||
|
|
0d5cfff227 | ||
|
|
09b12df42f | ||
|
|
7509298739 | ||
|
|
3a4b489eca | ||
|
|
b2e1f9cc71 | ||
|
|
95eab27aaa | ||
|
|
43f0259aa9 | ||
|
|
277e6ee29f | ||
|
|
a770ab8785 | ||
|
|
4bb5cabbda | ||
|
|
9d12ac680b | ||
|
|
ca44d06b70 | ||
|
|
dc55b3c781 | ||
|
|
e609a02c6e | ||
|
|
58ef855d9c | ||
|
|
5d2341445e | ||
|
|
ca66b37602 | ||
|
|
4072908989 | ||
|
|
32ea6b29ac | ||
|
|
36cc41502e | ||
|
|
73271539cf | ||
|
|
6607453761 | ||
|
|
8893af2439 | ||
|
|
9d7b633a5b | ||
|
|
d74c400c38 | ||
|
|
0fcd63d171 | ||
|
|
593673ba9e | ||
|
|
2b0a2aeba2 | ||
|
|
fa921f9a61 | ||
|
|
64a14ad875 | ||
|
|
6c0f4fa33b | ||
|
|
0204c270db | ||
|
|
6a0bd2beeb | ||
|
|
af9b3ea42d | ||
|
|
8486979e0f | ||
|
|
10f831a550 | ||
|
|
bc0c26d4c0 | ||
|
|
df8d8de173 | ||
|
|
f1b2b232a2 | ||
|
|
aec37f7d8b | ||
|
|
d074936898 | ||
|
|
1179c88a50 | ||
|
|
ea228852d9 | ||
|
|
102239df26 | ||
|
|
eb05bb06da | ||
|
|
9876fcf7bd | ||
|
|
d3fe59a297 | ||
|
|
a50dd51ca0 | ||
|
|
93531ffcaf | ||
|
|
c7a500ae6f | ||
|
|
6a6da20d5a | ||
|
|
194da4faf8 | ||
|
|
ce0512c079 | ||
|
|
676fc8a055 | ||
|
|
8c9510f9c3 | ||
|
|
6b03daa25f | ||
|
|
833ad6752e | ||
|
|
5462304057 | ||
|
|
78a67f8e73 | ||
|
|
747f95e677 | ||
|
|
2204fd6d92 | ||
|
|
5bb20cd94c | ||
|
|
a711bd9494 |
3
.github/workflows/build.yml
vendored
3
.github/workflows/build.yml
vendored
@@ -1,9 +1,10 @@
|
||||
name: Runner CI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- main
|
||||
- releases/*
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
|
||||
1
.github/workflows/codeql.yml
vendored
1
.github/workflows/codeql.yml
vendored
@@ -2,6 +2,7 @@ name: "Code Scanning - Action"
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
|
||||
|
||||
335
.github/workflows/e2etest.yml
vendored
Normal file
335
.github/workflows/e2etest.yml
vendored
Normal file
@@ -0,0 +1,335 @@
|
||||
name: Runner E2E Test
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- releases/*
|
||||
|
||||
jobs:
|
||||
init:
|
||||
name: Initialize workflow ☕
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
unique_runner_label: ${{steps.generator.outputs.runner_label}}
|
||||
steps:
|
||||
- name: Delete all runners
|
||||
uses: actions/github-script@v3
|
||||
with:
|
||||
debug: true
|
||||
script: |
|
||||
var runnersResp = await github.actions.listSelfHostedRunnersForRepo({
|
||||
owner: 'actions',
|
||||
repo: 'runner',
|
||||
per_page: '100'
|
||||
});
|
||||
for(var i=0; i<runnersResp.data.total_count; i++){
|
||||
core.debug(JSON.stringify(runnersResp.data.runners[i]))
|
||||
await github.actions.deleteSelfHostedRunnerFromRepo({
|
||||
owner: 'actions',
|
||||
repo: 'runner',
|
||||
runner_id: runnersResp.data.runners[i].id
|
||||
});
|
||||
}
|
||||
github-token: ${{secrets.PAT}}
|
||||
- name: Generate Unique Runner label
|
||||
id: generator
|
||||
run: |
|
||||
label=$(openssl rand -hex 16)
|
||||
echo ::set-output name=runner_label::$label
|
||||
|
||||
build:
|
||||
name: Build runner packages 🏗 📦
|
||||
strategy:
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
|
||||
include:
|
||||
- runtime: linux-x64
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: linux-arm64
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: linux-arm
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: osx-x64
|
||||
os: macOS-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-latest
|
||||
devScript: ./dev
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
run: |
|
||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Create runner package tar.gz/zip
|
||||
- name: Package Release
|
||||
run: |
|
||||
${{ matrix.devScript }} package Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Upload runner package tar.gz/zip as artifact
|
||||
- name: Publish Artifact
|
||||
uses: actions/upload-artifact@v1
|
||||
with:
|
||||
name: runner-package-${{ matrix.runtime }}
|
||||
path: _package
|
||||
|
||||
dispatch_workflow:
|
||||
name: Dispatch workflow to runners 🚨
|
||||
needs: [init, build]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dispatch workflow
|
||||
timeout-minutes: 10
|
||||
uses: actions/github-script@v3
|
||||
with:
|
||||
debug: true
|
||||
script: |
|
||||
function sleep(ms) { return new Promise(resolve => setTimeout(resolve, ms)); }
|
||||
async function dispatchWorkflow(runner) {
|
||||
await github.actions.createWorkflowDispatch({
|
||||
owner: 'actions',
|
||||
repo: 'runner',
|
||||
workflow_id: 'runner-basic-e2e-test-case.yml',
|
||||
ref: 'main',
|
||||
inputs: {target_runner: runner}
|
||||
});
|
||||
}
|
||||
var runWin64 = false, runLinux64 = false, runOsx64 = false, runLinuxARM64 = false;
|
||||
while (true) {
|
||||
core.info(`------------- Waiting for runners to be configured --------------`)
|
||||
await sleep(10000);
|
||||
var runnersResp = await github.actions.listSelfHostedRunnersForRepo({owner: 'actions', repo: 'runner', per_page: '100'});
|
||||
for (var i = 0; i < runnersResp.data.total_count; i++) {
|
||||
core.debug(JSON.stringify(runnersResp.data.runners[i]))
|
||||
var labels = runnersResp.data.runners[i].labels;
|
||||
for (var j = 0; j < labels.length; j++) {
|
||||
core.debug(`Comparing: ${labels[j].name} to win-x64/linux-x64/osx-x64/linux-arm64-${{ needs.init.outputs.unique_runner_label }}`)
|
||||
if (labels[j].name == 'win-x64-${{needs.init.outputs.unique_runner_label}}' && runWin64 == false) {
|
||||
core.info(`------------------- Windows runner is configured, queue Windows Run -------------------------`)
|
||||
runWin64 = true;
|
||||
await dispatchWorkflow('win-x64-${{needs.init.outputs.unique_runner_label}}');
|
||||
break;
|
||||
} else if (labels[j].name == 'linux-x64-${{needs.init.outputs.unique_runner_label}}' && runLinux64 == false) {
|
||||
core.info(`------------------- Linux runner is configured, queue Linux Run -------------------------`)
|
||||
runLinux64 = true;
|
||||
await dispatchWorkflow('linux-x64-${{needs.init.outputs.unique_runner_label}}');
|
||||
break;
|
||||
} else if (labels[j].name == 'osx-x64-${{needs.init.outputs.unique_runner_label}}' && runOsx64 == false) {
|
||||
core.info(`------------------- macOS runner is configured, queue macOS Run -------------------------`)
|
||||
runOsx64 = true;
|
||||
await dispatchWorkflow('osx-x64-${{needs.init.outputs.unique_runner_label}}');
|
||||
break;
|
||||
} else if (labels[j].name == 'linux-arm64-${{needs.init.outputs.unique_runner_label}}' && runLinuxARM64 == false) {
|
||||
core.info(`------------------- Linux ARM64 runner is configured, queue Linux ARM64 Run-------------------------`)
|
||||
runLinuxARM64 = true;
|
||||
await dispatchWorkflow('linux-arm64-${{needs.init.outputs.unique_runner_label}}');
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (runWin64 && runLinux64 && runOsx64 && runLinuxARM64) {
|
||||
core.info(`--------------------- ALL runner are running jobs --------------------------`)
|
||||
break;
|
||||
} else {
|
||||
core.info(`---------- Windows running: ${runWin64} -- Linux running: ${runLinux64} -- macOS running: ${runOsx64} -- Linux ARM64 running: ${runLinuxARM64} -----------`)
|
||||
}
|
||||
}
|
||||
github-token: ${{secrets.PAT}}
|
||||
|
||||
LinuxE2E:
|
||||
needs: [build, init]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Runner
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner-package-linux-x64
|
||||
- name: Unzip Runner Package
|
||||
run: |
|
||||
tar -xzf *.tar.gz
|
||||
- name: Configure Runner
|
||||
env:
|
||||
unique_runner_name: linux-x64-${{needs.init.outputs.unique_runner_label}}
|
||||
run: |
|
||||
./config.sh --url ${{github.event.repository.html_url}} --unattended --name $unique_runner_name --pat ${{secrets.PAT}} --labels $unique_runner_name --replace
|
||||
- name: Start Runner and Wait for Job
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
./run.sh --once
|
||||
- name: Remove Runner
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: |
|
||||
./config.sh remove --pat ${{secrets.PAT}}
|
||||
- name: Upload Runner Logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: linux_x64_logs
|
||||
path: _diag
|
||||
macOSE2E:
|
||||
needs: [build, init]
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Download Runner
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner-package-osx-x64
|
||||
- name: Unzip Runner Package
|
||||
run: |
|
||||
tar -xzf *.tar.gz
|
||||
- name: Configure Runner
|
||||
env:
|
||||
unique_runner_name: osx-x64-${{needs.init.outputs.unique_runner_label}}
|
||||
run: |
|
||||
./config.sh --url ${{github.event.repository.html_url}} --unattended --name $unique_runner_name --pat ${{secrets.PAT}} --labels $unique_runner_name --replace
|
||||
- name: Start Runner and Wait for Job
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
./run.sh --once
|
||||
- name: Remove Runner
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: |
|
||||
./config.sh remove --pat ${{secrets.PAT}}
|
||||
- name: Upload Runner Logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: osx_x64_logs
|
||||
path: _diag
|
||||
|
||||
ARM64E2E:
|
||||
needs: [build, init]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Runner
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner-package-linux-arm64
|
||||
- name: Unzip Runner Package
|
||||
run: |
|
||||
tar -xzf *.tar.gz
|
||||
- name: Prepare QEMU
|
||||
run: |
|
||||
docker run --rm --privileged multiarch/qemu-user-static:register --reset
|
||||
- name: Configure Runner
|
||||
uses: docker://multiarch/ubuntu-core:arm64-bionic
|
||||
with:
|
||||
args: 'bash -c "apt-get update && apt-get install -y curl && ./bin/installdependencies.sh && ./config.sh --unattended --name $unique_runner_name --url ${{github.event.repository.html_url}} --pat ${{secrets.PAT}} --labels $unique_runner_name --replace"'
|
||||
env:
|
||||
RUNNER_ALLOW_RUNASROOT: 1
|
||||
unique_runner_name: linux-arm64-${{needs.init.outputs.unique_runner_label}}
|
||||
|
||||
- name: Start Runner and Wait for Job
|
||||
timeout-minutes: 5
|
||||
uses: docker://multiarch/ubuntu-core:arm64-bionic
|
||||
with:
|
||||
args: 'bash -c "apt-get update && apt-get install -y curl git && ./bin/installdependencies.sh && ./run.sh --once"'
|
||||
env:
|
||||
RUNNER_ALLOW_RUNASROOT: 1
|
||||
|
||||
- name: Remove Runner
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
uses: docker://multiarch/ubuntu-core:arm64-bionic
|
||||
with:
|
||||
args: 'bash -c "apt-get update && apt-get install -y curl && ./bin/installdependencies.sh && ./config.sh remove --pat ${{secrets.PAT}}"'
|
||||
env:
|
||||
RUNNER_ALLOW_RUNASROOT: 1
|
||||
|
||||
- name: Upload Runner Logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: linux_arm64_logs
|
||||
path: _diag
|
||||
|
||||
WindowsE2E:
|
||||
needs: [build, init]
|
||||
runs-on: windows-latest
|
||||
steps:
|
||||
- name: Download Runner
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: runner-package-win-x64
|
||||
- name: Unzip Runner Package
|
||||
run: |
|
||||
Get-ChildItem *.zip | Expand-Archive -DestinationPath $PWD
|
||||
- name: Configure Runner
|
||||
shell: cmd
|
||||
run: |
|
||||
config.cmd --unattended --url ${{github.event.repository.html_url}} --name %unique_runner_name% --pat ${{secrets.PAT}} --labels %unique_runner_name% --replace
|
||||
env:
|
||||
unique_runner_name: win-x64-${{needs.init.outputs.unique_runner_label}}
|
||||
|
||||
- name: Start Runner and Wait for Job
|
||||
shell: cmd
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
run.cmd --once
|
||||
- name: Remove Runner
|
||||
shell: cmd
|
||||
if: always()
|
||||
continue-on-error: true
|
||||
run: |
|
||||
config.cmd remove --pat ${{secrets.PAT}}
|
||||
- name: Upload Runner Logs
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: win_x64_logs
|
||||
path: _diag
|
||||
|
||||
check:
|
||||
name: Check runner logs 🕵️♂️
|
||||
needs: [WindowsE2E, LinuxE2E, macOSE2E, ARM64E2E]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Linux Runner Logs
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: linux_x64_logs
|
||||
path: linux_x64_logs
|
||||
- name: Download macOS Runner Logs
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: osx_x64_logs
|
||||
path: osx_x64_logs
|
||||
- name: Download Linux ARM64 Runner Logs
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: linux_arm64_logs
|
||||
path: linux_arm64_logs
|
||||
- name: Download Windows Runner Logs
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: win_x64_logs
|
||||
path: win_x64_logs
|
||||
- name: Check Runner Logs
|
||||
run: |
|
||||
function failed()
|
||||
{
|
||||
local error=${1:-Undefined error}
|
||||
echo "Failed: $error" >&2
|
||||
exit 1
|
||||
}
|
||||
grep -R "completed with result: Succeeded" ./win_x64_logs || failed "Windows Runner fail to run the job, please check logs"
|
||||
grep -R "completed with result: Succeeded" ./linux_x64_logs || failed "Linux Runner fail to run the job, please check logs"
|
||||
grep -R "completed with result: Succeeded" ./osx_x64_logs || failed "macOS Runner fail to run the job, please check logs"
|
||||
grep -R "completed with result: Succeeded" ./linux_arm64_logs || failed "Linux ARM64 Runner fail to run the job, please check logs"
|
||||
35
.github/workflows/release.yml
vendored
35
.github/workflows/release.yml
vendored
@@ -1,13 +1,14 @@
|
||||
name: Runner CD
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
paths:
|
||||
- releaseVersion
|
||||
|
||||
jobs:
|
||||
check:
|
||||
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/master'
|
||||
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
@@ -44,6 +45,12 @@ jobs:
|
||||
|
||||
build:
|
||||
needs: check
|
||||
outputs:
|
||||
linux-x64-sha: ${{ steps.sha.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha: ${{ steps.sha.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha: ${{ steps.sha.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha: ${{ steps.sha.outputs.win-x64-sha256 }}
|
||||
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
|
||||
strategy:
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
|
||||
@@ -100,7 +107,19 @@ jobs:
|
||||
with:
|
||||
name: runner-packages
|
||||
path: _package
|
||||
|
||||
# compute shas and set as job outputs to use in release notes
|
||||
- run: brew install coreutils #needed for shasum util
|
||||
if: ${{ matrix.os == 'macOS-latest' }}
|
||||
name: Install Dependencies for SHA Calculation (osx)
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
|
||||
shell: bash
|
||||
id: sha
|
||||
name: Compute SHA256
|
||||
working-directory: _package
|
||||
release:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
@@ -125,11 +144,15 @@ jobs:
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
const releaseNote = fs.readFileSync('${{ github.workspace }}/releaseNote.md', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion)
|
||||
var releaseNote = fs.readFileSync('${{ github.workspace }}/releaseNote.md', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion)
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA>/g, '${{needs.build.outputs.win-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA>/g, '${{needs.build.outputs.osx-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
|
||||
console.log(releaseNote)
|
||||
core.setOutput('version', runnerVersion);
|
||||
core.setOutput('note', releaseNote);
|
||||
|
||||
core.setOutput('note', releaseNote);
|
||||
# Create GitHub release
|
||||
- uses: actions/create-release@master
|
||||
id: createRelease
|
||||
@@ -192,4 +215,4 @@ jobs:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
31
.github/workflows/runner-basic-e2e-test-case.yml
vendored
Normal file
31
.github/workflows/runner-basic-e2e-test-case.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Runner Basics Test Case
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
target_runner:
|
||||
description: 'Self-hosted runner will run the job'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- ${{github.event.inputs.target_runner}}
|
||||
|
||||
name: Runner Basic Test 🛠
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Run a one-line script
|
||||
run: echo Hello, world!
|
||||
- name: Run a multi-line script
|
||||
shell: bash
|
||||
run: |
|
||||
printenv|sort
|
||||
cat $GITHUB_EVENT_PATH
|
||||
- name: Validate GitHub Context
|
||||
shell: bash
|
||||
run: |
|
||||
declare -a context_vars=("GITHUB_ACTION" "GITHUB_ACTIONS" "GITHUB_REPOSITORY" "GITHUB_WORKSPACE" "GITHUB_SHA" "GITHUB_RUN_ID" "GITHUB_RUN_NUMBER")
|
||||
for var in ${context_vars[@]};
|
||||
do [ -z "${!var}" ] && echo "##[error]$var not found" && exit 1 || echo "$var: ${!var}"; done
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -22,7 +22,4 @@ _dotnetsdk
|
||||
TestResults
|
||||
TestLogs
|
||||
.DS_Store
|
||||
**/*.DotSettings.user
|
||||
|
||||
#generated
|
||||
src/Runner.Sdk/BuildConstants.cs
|
||||
**/*.DotSettings.user
|
||||
1
CODEOWNERS
Normal file
1
CODEOWNERS
Normal file
@@ -0,0 +1 @@
|
||||
* @actions/actions-runtime
|
||||
@@ -5,8 +5,9 @@
|
||||
# GitHub Actions Runner
|
||||
|
||||
[](https://github.com/actions/runner/actions)
|
||||
[](https://github.com/actions/runner/actions)
|
||||
|
||||
The runner is the application that runs a job from a GitHub Actions workflow. The runner can run on the [hosted machine pools](https://github.com/actions/virtual-environments) or run on [self-hosted environments](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners).
|
||||
The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the [hosted virtual environments](https://github.com/actions/virtual-environments), or you can [self-host the runner](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners) in your own environment.
|
||||
|
||||
## Get Started
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ These are described in detail below:
|
||||
- http://proxy.com
|
||||
- http://127.0.0.1:8080
|
||||
- http://user:password@proxy.com
|
||||
- `no_proxy` a comma seperated list of hosts that should not use the proxy. An optional port may be specified
|
||||
- `no_proxy` a comma separated list of hosts that should not use the proxy. An optional port may be specified
|
||||
- `google.com`
|
||||
- `yahoo.com:443`
|
||||
- `google.com,bing.com`
|
||||
@@ -31,9 +31,9 @@ We won't use `http_proxy` for https traffic when `https_proxy` is not set, this
|
||||
Otherwise action authors and workflow users need to adjust to differences between the runner proxy convention, and tools used by their actions and scripts.
|
||||
|
||||
Example:
|
||||
Customer set `http_proxy=http://127.0.0.1:8888` and configure the runner against `https://github.com/owner/repo`, with the `https_proxy` -> `http_proxy` fallback, the runner will connect to server without any problem. However, if user runs `git push` to `https://github.com/owner/repo`, `git` won't use the proxy since it require `https_proxy` to be set for any https traffic.
|
||||
Customer set `http_proxy=http://127.0.0.1:8888` and configure the runner against `https://github.com/owner/repo`, with the `https_proxy` -> `http_proxy` fallback, the runner will connect to the server without any problem. However, if a user runs `git push` to `https://github.com/owner/repo`, `git` won't use the proxy since it requires `https_proxy` to be set for any https traffic.
|
||||
|
||||
> `golang`, `node.js` and other dev tools from the linux community use `http_proxy` for both http and https traffic base on my research.
|
||||
> `golang`, `node.js` and other dev tools from the linux community use `http_proxy` for both http and https traffic based on my research.
|
||||
|
||||
A majority of our users are using Linux where these variables are commonly required to be set by various programs. By reading these values, we simplify the process for self hosted runners to set up proxy, and expose it in a way users are already familiar with.
|
||||
|
||||
@@ -43,7 +43,7 @@ We will support the lowercase and uppercase variants, with lowercase taking prio
|
||||
|
||||
### No Proxy Format
|
||||
|
||||
While exact implementations are different per application on handle `no_proxy` env, most applications accept a comma separated list of hosts. Some accept wildcard characters (*). We are going to do exact case-insentive matches, and not support wildcards at this time.
|
||||
While exact implementations are different per application on handle `no_proxy` env, most applications accept a comma separated list of hosts. Some accept wildcard characters (*). We are going to do exact case-insensitive matches, and not support wildcards at this time.
|
||||
For example:
|
||||
- example.com will match example.com, foo.example.com, foo.bar.example.com
|
||||
- foo.example.com will match bar.foo.example.com and foo.example.com
|
||||
@@ -57,5 +57,5 @@ We will not support IP addresses for `no_proxy`, only hostnames.
|
||||
3. The runner will read from the environmental variables during config and runtime and use the provided proxy if it exists
|
||||
4. Users may need to pass these environmental variables into other applications if they do not natively take these variables
|
||||
5. Action authors may need to update their workflows to react to the these environment variables
|
||||
6. We will document the way of setting environmental variables for runners using the environmental variables and how the runner uses them
|
||||
7. Like all other secrets, users will be able to relatively easily figure out proxy password if they can modify a workflow file running on a self hosted machine
|
||||
6. We will document the way of setting environmental variables for runners using the environment variables and how the runner uses them
|
||||
7. Like all other secrets, users will be able to relatively easily figure out proxy password if they can modify a workflow file running on a self hosted machine
|
||||
|
||||
@@ -34,7 +34,7 @@ A way out for rare cases where scoping is a problem.
|
||||
|
||||
`##[remove-matcher]owner`
|
||||
|
||||
For the this to be usable, the `owner` needs to be discoverable. Therefore, debug print the owner on registration.
|
||||
For this to be usable, the `owner` needs to be discoverable. Therefore, debug print the owner on registration.
|
||||
|
||||
### Single line matcher
|
||||
|
||||
@@ -184,7 +184,7 @@ Solving this problem means:
|
||||
- Use the `github.workspace` (where the repo is cloned on disk)
|
||||
- Match against a repository to determine the relative path within the repo
|
||||
|
||||
This is a place where we diverge from VSCode. VSCode task configuration are specific to the local workspace (workspace root is known or can be specified). We're solving a more generic problem, so we need more information - specifically the `fromPath` property - in order to accurately root the path.
|
||||
This is a place where we diverge from VSCode. VSCode task configurations are specific to the local workspace (workspace root is known or can be specified). We're solving a more generic problem, so we need more information - specifically the `fromPath` property - in order to accurately root the path.
|
||||
|
||||
In order to avoid creating inaccurate hyperlinks on the error issues, the agent will verify the file exists and is in the main repository. Otherwise omit the file property from the error issue and debug trace what happened.
|
||||
|
||||
@@ -203,7 +203,7 @@ Problem matchers are unable to interpret severity strings other than `warning` a
|
||||
|
||||
However some tools indicate error/warning in different ways. For example `flake8` uses codes like `E100`, `W200`, and `F300` (error, warning, fatal, respectively).
|
||||
|
||||
Therefore, allow a property `severity`, sibling to `owner`, which identifies the default severity for the problem matcher. This allows two problem matchers are registered - one for warnings and one for errors.
|
||||
Therefore, allow a property `severity`, sibling to `owner`, which identifies the default severity for the problem matcher. This allows two problem matchers to be registered - one for warnings and one for errors.
|
||||
|
||||
For example, given the following `flake8` output:
|
||||
|
||||
|
||||
@@ -84,7 +84,7 @@ powershell/pwsh
|
||||
- Users can always opt out by not using the builtins, and providing a shell option like: `pwsh -File {0}`, or `powershell -Command "& '{0}'"`, depending on need
|
||||
|
||||
cmd
|
||||
- There doesnt seem to be a way to fully opt in to fail-fast behavior other than writing your script to check each error code and respond accordingly, so we cant actually provide that behavior by default, it will be completely up to the user to write this behavior into their script
|
||||
- There doesn't seem to be a way to fully opt in to fail-fast behavior other than writing your script to check each error code and respond accordingly, so we can't actually provide that behavior by default, it will be completely up to the user to write this behavior into their script
|
||||
- cmd.exe will exit (return the error code to the runner) with the errorlevel of the last program it executed. This is internally consistent with the previous default behavior (sh, pwsh) and is the cmd.exe default, so we keep that behavior
|
||||
|
||||
## Consequences
|
||||
|
||||
@@ -15,7 +15,7 @@ This gives us good coverage across the board for secrets and secrets with a pref
|
||||
|
||||
However, we don't have great coverage for cases where the secret has a string appended to it before it is base64 encoded (i.e.: `base64($pass\n))`).
|
||||
|
||||
Most notably we've seen this as a result of user error where a user accidentially appends a newline or space character before encoding their secret in base64.
|
||||
Most notably we've seen this as a result of user error where a user accidentally appends a newline or space character before encoding their secret in base64.
|
||||
|
||||
## Decision
|
||||
|
||||
@@ -45,4 +45,4 @@ This will result in us only revealing length or bit information when a prefix or
|
||||
|
||||
- In the case where a secret has a prefix or suffix added before base64 encoding, we may now reveal up to 20 bits of information and the length of the original string modulo 3, rather then the original 16 bits and no length information
|
||||
- Secrets with a suffix appended before encoding will now be masked across the board. Previously it was only masked if it was a multiple of 3 characters
|
||||
- Performance will suffer in a neglible way
|
||||
- Performance will suffer in a negligible way
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
# ADR 054x: Composite Run Steps
|
||||
# ADR 0549: Composite Run Steps
|
||||
|
||||
**Date**: 2020-06-17
|
||||
|
||||
**Status**: Proposed
|
||||
|
||||
**Relevant PR**: https://github.com/actions/runner/pull/549
|
||||
**Status**: Accepted
|
||||
|
||||
## Context
|
||||
|
||||
@@ -12,18 +10,39 @@ Customers want to be able to compose actions from actions (ex: https://github.co
|
||||
|
||||
An important step towards meeting this goal is to build in functionality for actions where users can simply execute any number of steps.
|
||||
|
||||
## Guiding Principles
|
||||
### Guiding Principles
|
||||
|
||||
We don't want the workflow author to need to know how the internal workings of the action work. Users shouldn't know the internal workings of the composite action (for example, `default.shell` and `default.workingDir` should not be inherited from the workflow file to the action file). When deciding how to design certain parts of composite run steps, we want to think one logical step from the consumer.
|
||||
|
||||
A composite action is treated as **one** individual job step (aka encapsulation).
|
||||
|
||||
A composite action is treated as **one** individual job step (this is known as encapsulation).
|
||||
|
||||
## Decision
|
||||
|
||||
**In this ADR, we only support running multiple run steps in an Action.** In doing so, we build in support for mapping and flowing the inputs, outputs, and env variables (ex: All nested steps should have access to its parents' input variables and nested steps can overwrite the input variables).
|
||||
|
||||
## Steps
|
||||
### Composite Run Steps Features
|
||||
This feature supports at the top action level:
|
||||
- name
|
||||
- description
|
||||
- inputs
|
||||
- runs
|
||||
- outputs
|
||||
|
||||
This feature supports at the run step level:
|
||||
- name
|
||||
- id
|
||||
- run
|
||||
- env
|
||||
- shell
|
||||
- working-directory
|
||||
|
||||
This feature **does not support** at the run step level:
|
||||
- timeout-minutes
|
||||
- secrets
|
||||
- conditionals (needs, if, etc.)
|
||||
- continue-on-error
|
||||
|
||||
### Steps
|
||||
|
||||
Example `workflow.yml`
|
||||
|
||||
@@ -51,7 +70,9 @@ runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- run: pip install -r requirements.txt
|
||||
shell: bash
|
||||
- run: npm install
|
||||
shell: bash
|
||||
```
|
||||
|
||||
Example Output
|
||||
@@ -65,7 +86,70 @@ echo hello world 4
|
||||
|
||||
We add a token called "composite" which allows our Runner code to process composite actions. By invoking "using: composite", our Runner code then processes the "steps" attribute, converts this template code to a list of steps, and finally runs each run step sequentially. If any step fails and there are no `if` conditions defined, the whole composite action job fails.
|
||||
|
||||
## Inputs
|
||||
### Defaults
|
||||
|
||||
We will not support "defaults" in a composite action.
|
||||
|
||||
### Shell and Working-directory
|
||||
|
||||
For each run step in a composite action, the action author can set the `shell` and `working-directory` attributes for that step. The shell attribute is **required** for each run step because the action author does not know what the workflow author is using for the operating system so we need to explicitly prevent unknown behavior by making sure that each run step has an explicit shell **set by the action author.** On the other hand, `working-directory` is optional. Moreover, the composite action author can map in values from the `inputs` for it's `shell` and `working-directory` attributes at the step level for an action.
|
||||
|
||||
For example,
|
||||
|
||||
`action.yml`
|
||||
|
||||
|
||||
```yaml
|
||||
inputs:
|
||||
shell_1:
|
||||
description: 'Your name'
|
||||
default: 'pwsh'
|
||||
steps:
|
||||
- run: echo 1
|
||||
shell: ${{ inputs.shell_1 }}
|
||||
```
|
||||
|
||||
Note, the workflow file and action file are treated as separate entities. **So, the workflow `defaults` will never change the `shell` and `working-directory` value in the run steps in a composite action.** Note, `defaults` in a workflow only apply to run steps not "uses" steps (steps that use an action).
|
||||
|
||||
### Running Local Scripts
|
||||
|
||||
Example 'workflow.yml':
|
||||
```yaml
|
||||
jobs:
|
||||
build:
|
||||
runs-on: self-hosted
|
||||
steps:
|
||||
- uses: user/composite@v1
|
||||
```
|
||||
|
||||
Example `user/composite/action.yml`:
|
||||
|
||||
```yaml
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- run: chmod +x ${{ github.action_path }}/test/script2.sh
|
||||
shell: bash
|
||||
- run: chmod +x $GITHUB_ACTION_PATH/script.sh
|
||||
shell: bash
|
||||
- run: ${{ github.action_path }}/test/script2.sh
|
||||
shell: bash
|
||||
- run: $GITHUB_ACTION_PATH/script.sh
|
||||
shell: bash
|
||||
```
|
||||
Where `user/composite` has the file structure:
|
||||
```
|
||||
.
|
||||
+-- action.yml
|
||||
+-- script.sh
|
||||
+-- test
|
||||
| +-- script2.sh
|
||||
```
|
||||
|
||||
|
||||
Users will be able to run scripts located in their action folder by first prepending the relative path and script name with `$GITHUB_ACTION_PATH` or `github.action_path` which contains the path in which the composite action is downloaded to and where those "files" live. Note, you'll have to use `chmod` before running each script if you do not git check in your script files into your github repo with the executable bit turned on.
|
||||
|
||||
### Inputs
|
||||
|
||||
Example `workflow.yml`:
|
||||
|
||||
@@ -88,6 +172,7 @@ runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- run: echo hello ${{ inputs.your_name }}
|
||||
shell: bash
|
||||
```
|
||||
|
||||
Example Output:
|
||||
@@ -98,7 +183,7 @@ hello Octocat
|
||||
|
||||
Each input variable in the composite action is only viewable in its own scope.
|
||||
|
||||
## Outputs
|
||||
### Outputs
|
||||
|
||||
Example `workflow.yml`:
|
||||
|
||||
@@ -108,6 +193,7 @@ steps:
|
||||
- id: foo
|
||||
uses: user/composite@v1
|
||||
- run: echo random-number ${{ steps.foo.outputs.random-number }}
|
||||
shell: bash
|
||||
```
|
||||
|
||||
Example `user/composite/action.yml`:
|
||||
@@ -121,7 +207,8 @@ runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- id: random-number-generator
|
||||
run: echo "::set-output name=random-number::$(echo $RANDOM)"
|
||||
run: echo "::set-output name=random-id::$(echo $RANDOM)"
|
||||
shell: bash
|
||||
```
|
||||
|
||||
Example Output:
|
||||
@@ -135,22 +222,26 @@ Each of the output variables from the composite action is viewable from the work
|
||||
|
||||
Moreover, the output ids are only accessible within the scope where it was defined. Note that in the example above, in our `workflow.yml` file, it should not have access to output id (i.e. `random-id`). The reason why we are doing this is because we don't want to require the workflow author to know the internal workings of the composite action.
|
||||
|
||||
## Context
|
||||
### Context
|
||||
|
||||
Similar to the workflow file, the composite action has access to the [same context objects](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#contexts) (ex: `github`, `env`, `strategy`).
|
||||
|
||||
## Environment
|
||||
### Environment
|
||||
|
||||
In the Composite Action, you'll only be able to use `::set-env::` to set environment variables just like you could with other actions.
|
||||
|
||||
## Secrets
|
||||
### Secrets
|
||||
|
||||
**Note** : This feature will be focused on in a future ADR.
|
||||
**We will not support "Secrets" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||
|
||||
We'll pass the secrets from the composite action's parents (ex: the workflow file) to the composite action. Secrets can be created in the composite action with the secrets context. In the actions yaml, we'll automatically mask the secret.
|
||||
|
||||
|
||||
## If Condition
|
||||
### If Condition
|
||||
|
||||
** If and needs conditions will not be supported in the composite run steps feature. It will be supported later on in a new feature. **
|
||||
|
||||
Old reasoning:
|
||||
|
||||
Example `workflow.yml`:
|
||||
|
||||
@@ -168,24 +259,30 @@ runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- run: echo "just succeeding"
|
||||
shell: bash
|
||||
- run: echo "I will run, as my current scope is succeeding"
|
||||
shell: bash
|
||||
if: success()
|
||||
- run: exit 1
|
||||
shell: bash
|
||||
- run: echo "I will not run, as my current scope is now failing"
|
||||
shell: bash
|
||||
```
|
||||
|
||||
**We will not support "if Condition" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||
|
||||
See the paragraph below for a rudimentary approach (thank you to @cybojenix for the idea, example, and explanation for this approach):
|
||||
|
||||
The `if` statement in the parent (in the example above, this is the `workflow.yml`) shows whether or not we should run the composite action. So, our composite action will run since the `if` condition for running the composite action is `always()`.
|
||||
|
||||
**Note that the if condition on the parent does not propogate to the rest of its children though.**
|
||||
**Note that the if condition on the parent does not propagate to the rest of its children though.**
|
||||
|
||||
In the child action (in this example, this is the `action.yml`), it starts with a clean slate (in other words, no imposing if conditions). Similar to the logic in the paragraph above, `echo "I will run, as my current scope is succeeding"` will run since the `if` condition checks if the previous steps **within this composite action** has not failed. `run: echo "I will not run, as my current scope is now failing"` will not run since the previous step resulted in an error and by default, the if expression is set to `success()` if the if condition is not set for a step.
|
||||
|
||||
|
||||
What if a step has `cancelled()`? We do the opposite of our approach above if `cancelled()` is used for any of our composite run steps. We will cancel any step that has this condition if the workflow is cancelled at all.
|
||||
|
||||
## Timeout-minutes
|
||||
### Timeout-minutes
|
||||
|
||||
Example `workflow.yml`:
|
||||
|
||||
@@ -205,13 +302,18 @@ runs:
|
||||
- id: foo1
|
||||
run: echo test 1
|
||||
timeout-minutes: 10
|
||||
shell: bash
|
||||
- id: foo2
|
||||
run: echo test 2
|
||||
shell: bash
|
||||
- id: foo3
|
||||
run: echo test 3
|
||||
timeout-minutes: 10
|
||||
shell: bash
|
||||
```
|
||||
|
||||
**We will not support "timeout-minutes" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||
|
||||
A composite action in its entirety is a job. You can set both timeout-minutes for the whole composite action or its steps as long as the the sum of the `timeout-minutes` for each composite action step that has the attribute `timeout-minutes` is less than or equals to `timeout-minutes` for the composite action. There is no default timeout-minutes for each composite action step.
|
||||
|
||||
If the time taken for any of the steps in combination or individually exceed the whole composite action `timeout-minutes` attribute, the whole job will fail (1). If an individual step exceeds its own `timeout-minutes` attribute but the total time that has been used including this step is below the overall composite action `timeout-minutes`, the individual step will fail but the rest of the steps will run based on their own `timeout-minutes` attribute (they will still abide by condition (1) though).
|
||||
@@ -223,7 +325,7 @@ The rationale behind this is that users can configure their steps with the `if`
|
||||
[Usage limits still apply](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions?query=if%28%29#usage-limits)
|
||||
|
||||
|
||||
## Continue-on-error
|
||||
### Continue-on-error
|
||||
|
||||
Example `workflow.yml`:
|
||||
|
||||
@@ -245,18 +347,18 @@ runs:
|
||||
steps:
|
||||
- run: exit 1
|
||||
continue-on-error: true
|
||||
shell: bash
|
||||
- run: echo "Hello World 2" <----- This step will run
|
||||
shell: bash
|
||||
```
|
||||
|
||||
**We will not support "continue-on-error" in a composite action for now. This functionality will be focused on in a future ADR.**
|
||||
|
||||
If any of the steps fail in the composite action and the `continue-on-error` is set to `false` for the whole composite action step in the workflow file, then the steps below it will run. On the flip side, if `continue-on-error` is set to `true` for the whole composite action step in the workflow file, the next job step will run.
|
||||
|
||||
For the composite action steps, it follows the same logic as above. In this example, `"Hello World 2"` will be outputted because the previous step has `continue-on-error` set to `true` although that previous step errored.
|
||||
|
||||
## Defaults
|
||||
|
||||
The composite action author will be required to set the `shell` and `workingDir` of the composite action. Moreover, the composite action author will be able to explicitly set the shell for each composite run step. The workflow author will not have the ability to change these attributes.
|
||||
|
||||
## Visualizing Composite Action in the GitHub Actions UI
|
||||
### Visualizing Composite Action in the GitHub Actions UI
|
||||
We want all the composite action's steps to be condensed into the original composite action node.
|
||||
|
||||
Here is a visual represenation of the [first example](#Steps)
|
||||
@@ -271,5 +373,6 @@ Here is a visual represenation of the [first example](#Steps)
|
||||
```
|
||||
|
||||
|
||||
## Conclusion
|
||||
## Consequences
|
||||
|
||||
This ADR lays the framework for eventually supporting nested Composite Actions within Composite Actions. This ADR allows for users to run multiple run steps within a GitHub Composite Action with the support of inputs, outputs, environment, and context for use in any steps as well as the if, timeout-minutes, and the continue-on-error attributes for each Composite Action step.
|
||||
|
||||
45
docs/checks/actions.md
Normal file
45
docs/checks/actions.md
Normal file
@@ -0,0 +1,45 @@
|
||||
|
||||
# Actions Connection Check
|
||||
|
||||
## What is this check for?
|
||||
|
||||
Make sure the runner has access to actions service for GitHub.com or GitHub Enterprise Server
|
||||
|
||||
- For GitHub.com
|
||||
- The runner needs to access https://api.github.com for downloading actions.
|
||||
- The runner needs to access https://vstoken.actions.githubusercontent.com/_apis/.../ for requesting an access token.
|
||||
- The runner needs to access https://pipelines.actions.githubusercontent.com/_apis/.../ for receiving workflow jobs.
|
||||
- For GitHub Enterprise Server
|
||||
- The runner needs to access https://myGHES.com/api/v3 for downloading actions.
|
||||
- The runner needs to access https://myGHES.com/_services/vstoken/_apis/.../ for requesting an access token.
|
||||
- The runner needs to access https://myGHES.com/_services/pipelines/_apis/.../ for receiving workflow jobs.
|
||||
|
||||
## What is checked?
|
||||
|
||||
- DNS lookup for api.github.com or myGHES.com using dotnet
|
||||
- Ping api.github.com or myGHES.com using dotnet
|
||||
- Make HTTP GET to https://api.github.com or https://myGHES.com/api/v3 using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
---
|
||||
- DNS lookup for vstoken.actions.githubusercontent.com using dotnet
|
||||
- Ping vstoken.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://vstoken.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/vstoken/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
---
|
||||
- DNS lookup for pipelines.actions.githubusercontent.com using dotnet
|
||||
- Ping pipelines.actions.githubusercontent.com using dotnet
|
||||
- Make HTTP GET to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
- Make HTTP POST to https://pipelines.actions.githubusercontent.com/_apis/health or https://myGHES.com/_services/pipelines/_apis/health using dotnet, check response headers contains `x-vss-e2eid`
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
### 2. SSL certificate related issue
|
||||
|
||||
If you are seeing `System.Net.Http.HttpRequestException: The SSL connection could not be established, see inner exception.` in the log, it means the runner can't connect to Actions service due to SSL handshake failure.
|
||||
> Please check the [SSL cert doc](./sslcert.md)
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
34
docs/checks/git.md
Normal file
34
docs/checks/git.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Git Connection Check
|
||||
|
||||
## What is this check for?
|
||||
|
||||
Make sure `git` can access GitHub.com or your GitHub Enterprise Server.
|
||||
|
||||
|
||||
## What is checked?
|
||||
|
||||
The test is done by executing
|
||||
```bash
|
||||
# For GitHub.com
|
||||
git ls-remote --exit-code https://github.com/actions/checkout HEAD
|
||||
|
||||
# For GitHub Enterprise Server
|
||||
git ls-remote --exit-code https://ghes.me/actions/checkout HEAD
|
||||
```
|
||||
|
||||
The test also set environment variable `GIT_TRACE=1` and `GIT_CURL_VERBOSE=1` before running `git ls-remote`, this will make `git` to produce debug log for better debug any potential issues.
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
### 2. SSL certificate related issue
|
||||
|
||||
If you are seeing `SSL Certificate problem:` in the log, it means the `git` can't connect to the GitHub server due to SSL handshake failure.
|
||||
> Please check the [SSL cert doc](./sslcert.md)
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
26
docs/checks/internet.md
Normal file
26
docs/checks/internet.md
Normal file
@@ -0,0 +1,26 @@
|
||||
# Internet Connection Check
|
||||
|
||||
## What is this check for?
|
||||
|
||||
Make sure the runner has access to https://api.github.com
|
||||
|
||||
The runner needs to access https://api.github.com to download any actions from the marketplace.
|
||||
|
||||
Even the runner is configured to GitHub Enterprise Server, the runner can still download actions from GitHub.com with [GitHub Connect](https://docs.github.com/en/enterprise-server@2.22/admin/github-actions/enabling-automatic-access-to-githubcom-actions-using-github-connect)
|
||||
|
||||
|
||||
## What is checked?
|
||||
|
||||
- DNS lookup for api.github.com using dotnet
|
||||
- Ping api.github.com using dotnet
|
||||
- Make HTTP GET to https://api.github.com using dotnet, check response headers contains `X-GitHub-Request-Id`
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
32
docs/checks/network.md
Normal file
32
docs/checks/network.md
Normal file
@@ -0,0 +1,32 @@
|
||||
## Common Network Related Issues
|
||||
|
||||
### Common things that can cause the runner to not working properly
|
||||
|
||||
- Bug in the runner or the dotnet framework that causes actions runner can't make Http request in a certain network environment.
|
||||
|
||||
- Proxy/Firewall block certain HTTP method, like it block all POST and PUT calls which the runner will use to upload logs.
|
||||
|
||||
- Proxy/Firewall only allows requests with certain user-agent to pass through and the actions runner user-agent is not in the allow list.
|
||||
|
||||
- Proxy try to decrypt and exam HTTPS traffic for security purpose but cause the actions-runner to fail to finish SSL handshake due to the lack of trusting proxy's CA.
|
||||
|
||||
- Proxy try to modify the HTTPS request (like add or change some http headers) and causes the request become incompatible with the Actions Service (ASP.NetCore), Ex: [Nginx](https://github.com/dotnet/aspnetcore/issues/17081)
|
||||
|
||||
- Firewall rules that block action runner from accessing certain hosts, ex: `*.github.com`, `*.actions.githubusercontent.com`, etc.
|
||||
|
||||
|
||||
### Identify and solve these problems
|
||||
|
||||
The key is to figure out where is the problem, the network environment, or the actions runner?
|
||||
|
||||
Use a 3rd party tool to make the same requests as the runner did would be a good start point.
|
||||
|
||||
- Use `nslookup` to check DNS
|
||||
- Use `ping` to check Ping
|
||||
- Use `traceroute`, `tracepath`, or `tracert` to check the network route between the runner and the Actions service
|
||||
- Use `curl -v` to check the network stack, good for verifying default certificate/proxy settings.
|
||||
- Use `Invoke-WebRequest` from `pwsh` (`PowerShell Core`) to check the dotnet network stack, good for verifying bugs in the dotnet framework.
|
||||
|
||||
If the 3rd party tool is also experiencing the same error as the runner does, then you might want to contact your network administrator for help.
|
||||
|
||||
Otherwise, contact GitHub customer support or log an issue at https://github.com/actions/runner
|
||||
30
docs/checks/nodejs.md
Normal file
30
docs/checks/nodejs.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# Node.js Connection Check
|
||||
|
||||
## What is this check for?
|
||||
|
||||
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
|
||||
|
||||
The runner carries it's own copy of node.js executable under `<runner_root>/externals/node12/`.
|
||||
|
||||
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node12/`.
|
||||
|
||||
> Not the `node` from `$PATH`
|
||||
|
||||
## What is checked?
|
||||
|
||||
- Make HTTPS GET to https://api.github.com or https://myGHES.com/api/v3 using node.js, make sure it gets 200 response code.
|
||||
|
||||
## How to fix the issue?
|
||||
|
||||
### 1. Check the common network issue
|
||||
|
||||
> Please check the [network doc](./network.md)
|
||||
|
||||
### 2. SSL certificate related issue
|
||||
|
||||
If you are seeing `Https request failed due to SSL cert issue` in the log, it means the `node.js` can't connect to the GitHub server due to SSL handshake failure.
|
||||
> Please check the [SSL cert doc](./sslcert.md)
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
89
docs/checks/sslcert.md
Normal file
89
docs/checks/sslcert.md
Normal file
@@ -0,0 +1,89 @@
|
||||
## SSL Certificate Related Issues
|
||||
|
||||
You might run into an SSL certificate error when your GitHub Enterprise Server is using a self-signed SSL server certificate or a web proxy within your network is decrypting HTTPS traffic for a security audit.
|
||||
|
||||
As long as your certificate is generated properly, most of the issues should be fixed after your trust the certificate properly on the runner machine.
|
||||
|
||||
> Different OS might have extra requirements on SSL certificate,
|
||||
> Ex: macOS requires `ExtendedKeyUsage` https://support.apple.com/en-us/HT210176
|
||||
|
||||
### Don't skip SSL cert validation
|
||||
|
||||
> !!! DO NOT SKIP SSL CERT VALIDATION !!!
|
||||
> !!! IT IS A BAD SECURITY PRACTICE !!!
|
||||
|
||||
### Download SSL certificate chain
|
||||
|
||||
Depends on how your SSL server certificate gets configured, you might need to download the whole certificate chain from a machine that has trusted the SSL certificate's CA.
|
||||
|
||||
- Approach 1: Download certificate chain using a browser (Chrome, Firefox, IT), you can google for more example, [here is what I found](https://medium.com/@menakajain/export-download-ssl-certificate-from-server-site-url-bcfc41ea46a2)
|
||||
|
||||
- Approach 2: Download certificate chain using OpenSSL, you can google for more example, [here is what I found](https://superuser.com/a/176721)
|
||||
|
||||
- Approach 3: Ask your network administrator or the owner of the CA certificate to send you a copy of it
|
||||
|
||||
### Trust CA certificate for the Runner
|
||||
|
||||
The actions runner is a dotnet core application which will follow how dotnet load SSL CA certificates on each OS.
|
||||
|
||||
You can get full details documentation at [here](https://docs.microsoft.com/en-us/dotnet/standard/security/cross-platform-cryptography#x509store)
|
||||
|
||||
In short:
|
||||
- Windows: Load from Windows certificate store.
|
||||
- Linux: Load from OpenSSL CA cert bundle.
|
||||
- macOS: Load from macOS KeyChain.
|
||||
|
||||
To let the runner trusts your CA certificate, you will need to:
|
||||
1. Save your SSL certificate chain which includes the root CA and all intermediate CAs into a `.pem` file.
|
||||
2. Use `OpenSSL` to convert `.pem` file to a proper format for different OS, here is some [doc with sample commands](https://www.sslshopper.com/ssl-converter.html)
|
||||
3. Trust CA on different OS:
|
||||
- Windows: https://docs.microsoft.com/en-us/skype-sdk/sdn/articles/installing-the-trusted-root-certificate
|
||||
- macOS: 
|
||||
- Linux: Refer to the distribution documentation
|
||||
1. RedHat: https://www.redhat.com/sysadmin/ca-certificates-cli
|
||||
2. Ubuntu: http://manpages.ubuntu.com/manpages/focal/man8/update-ca-certificates.8.html
|
||||
3. Google search: "trust ca certificate on [linux distribution]"
|
||||
4. If all approaches failed, set environment variable `SSL_CERT_FILE` to the CA bundle `.pem` file we get.
|
||||
> To verity cert gets installed properly on Linux, you can try use `curl -v https://sitewithsslissue.com` and `pwsh -Command \"Invoke-WebRequest -Uri https://sitewithsslissue.com\"`
|
||||
|
||||
### Trust CA certificate for Git CLI
|
||||
|
||||
Git uses various CA bundle file depends on your operation system.
|
||||
- Git packaged the CA bundle file within the Git installation on Windows
|
||||
- Git use OpenSSL certificate CA bundle file on Linux and macOS
|
||||
|
||||
You can check where Git check CA file by running:
|
||||
```bash
|
||||
export GIT_CURL_VERBOSE=1
|
||||
git ls-remote https://github.com/actions/runner HEAD
|
||||
```
|
||||
|
||||
You should see something like:
|
||||
```
|
||||
* Couldn't find host github.com in the .netrc file; using defaults
|
||||
* Trying 140.82.114.4...
|
||||
* TCP_NODELAY set
|
||||
* Connected to github.com (140.82.114.4) port 443 (#0)
|
||||
* ALPN, offering h2
|
||||
* ALPN, offering http/1.1
|
||||
* successfully set certificate verify locations:
|
||||
* CAfile: /etc/ssl/cert.pem
|
||||
CApath: none
|
||||
* SSL connection using TLSv1.2 / ECDHE-RSA-AES128-GCM-SHA256
|
||||
```
|
||||
This tells me `/etc/ssl/cert.pem` is where it read trusted CA certificates.
|
||||
|
||||
To let Git trusts your CA certificate, you will need to:
|
||||
1. Save your SSL certificate chain which includes the root CA and all intermediate CAs into a `.pem` file.
|
||||
2. Set `http.sslCAInfo` Git config or `GIT_SSL_CAINFO` environment variable to the full path of the `.pem` file [Git Doc](https://git-scm.com/docs/git-config#Documentation/git-config.txt-httpsslCAInfo)
|
||||
> I would recommend using `http.sslCAInfo` since it can be scope to certain hosts that need the extra trusted CA.
|
||||
> Ex: `git config --global http.https://myghes.com/.sslCAInfo /extra/ca/cert.pem`
|
||||
> This will make Git use the `/extra/ca/cert.pem` only when communicates with `https://myghes.com` and keep using the default CA bundle with others.
|
||||
|
||||
### Trust CA certificate for Node.js
|
||||
|
||||
Node.js has compiled a snapshot of the Mozilla CA store that is fixed at each version of Node.js' release time.
|
||||
|
||||
To let Node.js trusts your CA certificate, you will need to:
|
||||
1. Save your SSL certificate chain which includes the root CA and all intermediate CAs into a `.pem` file.
|
||||
2. Set environment variable `NODE_EXTRA_CA_CERTS` which point to the file. ex: `export NODE_EXTRA_CA_CERTS=/full/path/to/cacert.pem` or `set NODE_EXTRA_CA_CERTS=C:\full\path\to\cacert.pem`
|
||||
@@ -14,7 +14,7 @@ Issues in this repository should be for the runner application. Note that the V
|
||||
|
||||
We ask that before significant effort is put into code changes, that we have agreement on taking the change before time is invested in code changes.
|
||||
|
||||
1. Create a feature request. Once agreed we will take the enhancment
|
||||
1. Create a feature request. Once agreed we will take the enhancement
|
||||
2. Create an ADR to agree on the details of the change.
|
||||
|
||||
An ADR is an Architectural Decision Record. This allows consensus on the direction forward and also serves as a record of the change and motivation. [Read more here](adrs/README.md)
|
||||
|
||||
BIN
docs/res/macOStrustCA.gif
Normal file
BIN
docs/res/macOStrustCA.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 14 MiB |
@@ -1,18 +1,11 @@
|
||||
## Features
|
||||
- Resolve action download info from server (#508, #515, #550)
|
||||
- Print runner and machine name to log. (#539)
|
||||
|
||||
|
||||
## Bugs
|
||||
- Reduce input validation warnings (#506)
|
||||
- Fix null ref exception in SecretMasker caused by `hashfiles` timeout. (#516)
|
||||
- Add libicu66 to `./installDependencies.sh` for Ubuntu 20.04 (#535)
|
||||
- Fix DataContract with Token service (#532)
|
||||
- Skip search $PATH on command with fully qualified path (#526)
|
||||
- Restore SELinux context on service file when SELinux is enabled (#525)
|
||||
- Fixed an issue where docker containers failed to initialize (#977)
|
||||
|
||||
## Misc
|
||||
- Remove SPS/Token migration code. Remove GHES url manipulate code. (#513)
|
||||
- Add sub-step for developer flow for clarity (#523)
|
||||
- Update Links and Language to Git + VSCode (#522)
|
||||
- Update runner configuration exception message (#540)
|
||||
|
||||
|
||||
## Windows x64
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
|
||||
@@ -74,3 +67,13 @@ tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
|
||||
|
||||
## Using your self hosted runner
|
||||
For additional details about configuring, running, or shutting down the runner please check out our [product docs.](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/adding-self-hosted-runners)
|
||||
|
||||
## SHA-256 Checksums
|
||||
|
||||
The SHA-256 checksums for the packages included in this build are shown below:
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>.zip <!-- BEGIN SHA win-x64 --><WIN_X64_SHA><!-- END SHA win-x64 -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA osx-x64 --><OSX_X64_SHA><!-- END SHA osx-x64 -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
|
||||
|
||||
@@ -12,12 +12,13 @@ set -e
|
||||
#
|
||||
# Usage:
|
||||
# export RUNNER_CFG_PAT=<yourPAT>
|
||||
# ./create-latest-svc scope [ghe_domain] [name] [user]
|
||||
# ./create-latest-svc scope [ghe_domain] [name] [user] [labels]
|
||||
#
|
||||
# scope required repo (:owner/:repo) or org (:organization)
|
||||
# ghe_domain optional the fully qualified domain name of your GitHub Enterprise Server deployment
|
||||
# name optional defaults to hostname
|
||||
# user optional user svc will run as. defaults to current
|
||||
# labels optional list of labels (split by comma) applied on the runner
|
||||
#
|
||||
# Notes:
|
||||
# PATS over envvars are more secure
|
||||
@@ -30,6 +31,7 @@ runner_scope=${1}
|
||||
ghe_hostname=${2}
|
||||
runner_name=${3:-$(hostname)}
|
||||
svc_user=${4:-$USER}
|
||||
labels=${5}
|
||||
|
||||
echo "Configuring runner @ ${runner_scope}"
|
||||
sudo echo
|
||||
@@ -130,8 +132,8 @@ fi
|
||||
|
||||
echo
|
||||
echo "Configuring ${runner_name} @ $runner_url"
|
||||
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name"
|
||||
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name
|
||||
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name --labels $labels"
|
||||
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name --labels $labels
|
||||
|
||||
#---------------------------------------
|
||||
# Configuring as a service
|
||||
|
||||
10
src/.editorconfig
Normal file
10
src/.editorconfig
Normal file
@@ -0,0 +1,10 @@
|
||||
[*.cs]
|
||||
charset = utf-8
|
||||
insert_final_newline = true
|
||||
|
||||
csharp_new_line_before_else = true
|
||||
csharp_new_line_before_catch = true
|
||||
csharp_new_line_before_finally = true
|
||||
csharp_new_line_before_open_brace = all
|
||||
|
||||
csharp_space_after_keywords_in_control_flow_statements = true
|
||||
@@ -1,4 +1,4 @@
|
||||
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio Version 16
|
||||
VisualStudioVersion = 16.0.29411.138
|
||||
@@ -21,6 +21,11 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sdk", "Sdk\Sdk.csproj", "{D
|
||||
EndProject
|
||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Test", "Test\Test.csproj", "{C932061F-F6A1-4F1E-B854-A6C6B30DC3EF}"
|
||||
EndProject
|
||||
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{EFB254FC-7927-445E-BA64-6676ADB309E9}"
|
||||
ProjectSection(SolutionItems) = preProject
|
||||
.editorconfig = .editorconfig
|
||||
EndProjectSection
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
|
||||
677
src/Misc/dotnet-install.ps1
vendored
677
src/Misc/dotnet-install.ps1
vendored
@@ -23,8 +23,6 @@
|
||||
Default: latest
|
||||
Represents a build version on specific channel. Possible values:
|
||||
- latest - most latest build on specific channel
|
||||
- coherent - most latest coherent build on specific channel
|
||||
coherent applies only to SDK downloads
|
||||
- 3-part version in a format A.B.C - represents specific version of build
|
||||
examples: 2.0.0-preview2-006120, 1.1.0
|
||||
.PARAMETER InstallDir
|
||||
@@ -69,6 +67,8 @@
|
||||
.PARAMETER ProxyUseDefaultCredentials
|
||||
Default: false
|
||||
Use default credentials, when using proxy address.
|
||||
.PARAMETER ProxyBypassList
|
||||
If set with ProxyAddress, will provide the list of comma separated urls that will bypass the proxy
|
||||
.PARAMETER SkipNonVersionedFiles
|
||||
Default: false
|
||||
Skips installing non-versioned files if they already exist, such as dotnet.exe.
|
||||
@@ -96,6 +96,7 @@ param(
|
||||
[string]$FeedCredential,
|
||||
[string]$ProxyAddress,
|
||||
[switch]$ProxyUseDefaultCredentials,
|
||||
[string[]]$ProxyBypassList=@(),
|
||||
[switch]$SkipNonVersionedFiles,
|
||||
[switch]$NoCdn
|
||||
)
|
||||
@@ -119,11 +120,45 @@ $VersionRegEx="/\d+\.\d+[^/]+/"
|
||||
$OverrideNonVersionedFiles = !$SkipNonVersionedFiles
|
||||
|
||||
function Say($str) {
|
||||
Write-Host "dotnet-install: $str"
|
||||
try {
|
||||
Write-Host "dotnet-install: $str"
|
||||
}
|
||||
catch {
|
||||
# Some platforms cannot utilize Write-Host (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: $str"
|
||||
}
|
||||
}
|
||||
|
||||
function Say-Warning($str) {
|
||||
try {
|
||||
Write-Warning "dotnet-install: $str"
|
||||
}
|
||||
catch {
|
||||
# Some platforms cannot utilize Write-Warning (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: Warning: $str"
|
||||
}
|
||||
}
|
||||
|
||||
# Writes a line with error style settings.
|
||||
# Use this function to show a human-readable comment along with an exception.
|
||||
function Say-Error($str) {
|
||||
try {
|
||||
# Write-Error is quite oververbose for the purpose of the function, let's write one line with error style settings.
|
||||
$Host.UI.WriteErrorLine("dotnet-install: $str")
|
||||
}
|
||||
catch {
|
||||
Write-Output "dotnet-install: Error: $str"
|
||||
}
|
||||
}
|
||||
|
||||
function Say-Verbose($str) {
|
||||
Write-Verbose "dotnet-install: $str"
|
||||
try {
|
||||
Write-Verbose "dotnet-install: $str"
|
||||
}
|
||||
catch {
|
||||
# Some platforms cannot utilize Write-Verbose (Azure Functions, for instance). Fall back to Write-Output
|
||||
Write-Output "dotnet-install: $str"
|
||||
}
|
||||
}
|
||||
|
||||
function Say-Invocation($Invocation) {
|
||||
@@ -137,7 +172,7 @@ function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [in
|
||||
|
||||
while ($true) {
|
||||
try {
|
||||
return $ScriptBlock.Invoke()
|
||||
return & $ScriptBlock
|
||||
}
|
||||
catch {
|
||||
$Attempts++
|
||||
@@ -176,7 +211,7 @@ function Get-CLIArchitecture-From-Architecture([string]$Architecture) {
|
||||
{ $_ -eq "x86" } { return "x86" }
|
||||
{ $_ -eq "arm" } { return "arm" }
|
||||
{ $_ -eq "arm64" } { return "arm64" }
|
||||
default { throw "Architecture not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues" }
|
||||
default { throw "Architecture '$Architecture' not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues" }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -237,7 +272,11 @@ function GetHTTPResponse([Uri] $Uri)
|
||||
|
||||
if($ProxyAddress) {
|
||||
$HttpClientHandler = New-Object System.Net.Http.HttpClientHandler
|
||||
$HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{Address=$ProxyAddress;UseDefaultCredentials=$ProxyUseDefaultCredentials}
|
||||
$HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{
|
||||
Address=$ProxyAddress;
|
||||
UseDefaultCredentials=$ProxyUseDefaultCredentials;
|
||||
BypassList = $ProxyBypassList;
|
||||
}
|
||||
$HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler
|
||||
}
|
||||
else {
|
||||
@@ -247,18 +286,41 @@ function GetHTTPResponse([Uri] $Uri)
|
||||
# Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out
|
||||
# 20 minutes allows it to work over much slower connections.
|
||||
$HttpClient.Timeout = New-TimeSpan -Minutes 20
|
||||
$Response = $HttpClient.GetAsync("${Uri}${FeedCredential}").Result
|
||||
if (($Response -eq $null) -or (-not ($Response.IsSuccessStatusCode))) {
|
||||
# The feed credential is potentially sensitive info. Do not log FeedCredential to console output.
|
||||
$ErrorMsg = "Failed to download $Uri."
|
||||
if ($Response -ne $null) {
|
||||
$ErrorMsg += " $Response"
|
||||
$Task = $HttpClient.GetAsync("${Uri}${FeedCredential}").ConfigureAwait("false");
|
||||
$Response = $Task.GetAwaiter().GetResult();
|
||||
|
||||
if (($null -eq $Response) -or (-not ($Response.IsSuccessStatusCode))) {
|
||||
# The feed credential is potentially sensitive info. Do not log FeedCredential to console output.
|
||||
$DownloadException = [System.Exception] "Unable to download $Uri."
|
||||
|
||||
if ($null -ne $Response) {
|
||||
$DownloadException.Data["StatusCode"] = [int] $Response.StatusCode
|
||||
$DownloadException.Data["ErrorMessage"] = "Unable to download $Uri. Returned HTTP status code: " + $DownloadException.Data["StatusCode"]
|
||||
}
|
||||
|
||||
throw $ErrorMsg
|
||||
throw $DownloadException
|
||||
}
|
||||
|
||||
return $Response
|
||||
return $Response
|
||||
}
|
||||
catch [System.Net.Http.HttpRequestException] {
|
||||
$DownloadException = [System.Exception] "Unable to download $Uri."
|
||||
|
||||
# Pick up the exception message and inner exceptions' messages if they exist
|
||||
$CurrentException = $PSItem.Exception
|
||||
$ErrorMsg = $CurrentException.Message + "`r`n"
|
||||
while ($CurrentException.InnerException) {
|
||||
$CurrentException = $CurrentException.InnerException
|
||||
$ErrorMsg += $CurrentException.Message + "`r`n"
|
||||
}
|
||||
|
||||
# Check if there is an issue concerning TLS.
|
||||
if ($ErrorMsg -like "*SSL/TLS*") {
|
||||
$ErrorMsg += "Ensure that TLS 1.2 or higher is enabled to use this script.`r`n"
|
||||
}
|
||||
|
||||
$DownloadException.Data["ErrorMessage"] = $ErrorMsg
|
||||
throw $DownloadException
|
||||
}
|
||||
finally {
|
||||
if ($HttpClient -ne $null) {
|
||||
@@ -268,7 +330,7 @@ function GetHTTPResponse([Uri] $Uri)
|
||||
})
|
||||
}
|
||||
|
||||
function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Coherent) {
|
||||
function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$VersionFileUrl = $null
|
||||
@@ -278,17 +340,11 @@ function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Co
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$VersionFileUrl = "$UncachedFeed/aspnetcore/Runtime/$Channel/latest.version"
|
||||
}
|
||||
# Currently, the WindowsDesktop runtime is manufactured with the .Net core runtime
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$VersionFileUrl = "$UncachedFeed/Runtime/$Channel/latest.version"
|
||||
$VersionFileUrl = "$UncachedFeed/WindowsDesktop/$Channel/latest.version"
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
if ($Coherent) {
|
||||
$VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.coherent.version"
|
||||
}
|
||||
else {
|
||||
$VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version"
|
||||
}
|
||||
$VersionFileUrl = "$UncachedFeed/Sdk/$Channel/latest.version"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
@@ -297,7 +353,8 @@ function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel, [bool]$Co
|
||||
$Response = GetHTTPResponse -Uri $VersionFileUrl
|
||||
}
|
||||
catch {
|
||||
throw "Could not resolve version information."
|
||||
Say-Error "Could not resolve version information."
|
||||
throw
|
||||
}
|
||||
$StringContent = $Response.Content.ReadAsStringAsync().Result
|
||||
|
||||
@@ -323,7 +380,8 @@ function Parse-Jsonfile-For-Version([string]$JSonFile) {
|
||||
$JSonContent = Get-Content($JSonFile) -Raw | ConvertFrom-Json | Select-Object -expand "sdk" -ErrorAction SilentlyContinue
|
||||
}
|
||||
catch {
|
||||
throw "Json file unreadable: '$JSonFile'"
|
||||
Say-Error "Json file unreadable: '$JSonFile'"
|
||||
throw
|
||||
}
|
||||
if ($JSonContent) {
|
||||
try {
|
||||
@@ -336,7 +394,8 @@ function Parse-Jsonfile-For-Version([string]$JSonFile) {
|
||||
}
|
||||
}
|
||||
catch {
|
||||
throw "Unable to parse the SDK node in '$JSonFile'"
|
||||
Say-Error "Unable to parse the SDK node in '$JSonFile'"
|
||||
throw
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -352,16 +411,12 @@ function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel,
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if (-not $JSonFile) {
|
||||
switch ($Version.ToLower()) {
|
||||
{ $_ -eq "latest" } {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $False
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
{ $_ -eq "coherent" } {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel -Coherent $True
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
default { return $Version }
|
||||
if ($Version.ToLower() -eq "latest") {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
else {
|
||||
return $Version
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -372,17 +427,29 @@ function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel,
|
||||
function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
# If anything fails in this lookup it will default to $SpecificVersion
|
||||
$SpecificProductVersion = Get-Product-Version -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-runtime-$SpecificVersion-win-$CLIArchitecture.zip"
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/dotnet-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$PayloadURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/aspnetcore-runtime-$SpecificVersion-win-$CLIArchitecture.zip"
|
||||
$PayloadURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/aspnetcore-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificVersion-win-$CLIArchitecture.zip"
|
||||
# The windows desktop runtime is part of the core runtime layout prior to 5.0
|
||||
$PayloadURL = "$AzureFeed/Runtime/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
if ($SpecificVersion -match '^(\d+)\.(.*)$')
|
||||
{
|
||||
$majorVersion = [int]$Matches[1]
|
||||
if ($majorVersion -ge 5)
|
||||
{
|
||||
$PayloadURL = "$AzureFeed/WindowsDesktop/$SpecificVersion/windowsdesktop-runtime-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
}
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-sdk-$SpecificVersion-win-$CLIArchitecture.zip"
|
||||
$PayloadURL = "$AzureFeed/Sdk/$SpecificVersion/dotnet-sdk-$SpecificProductVersion-win-$CLIArchitecture.zip"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
@@ -390,7 +457,7 @@ function Get-Download-Link([string]$AzureFeed, [string]$SpecificVersion, [string
|
||||
|
||||
Say-Verbose "Constructed primary named payload URL: $PayloadURL"
|
||||
|
||||
return $PayloadURL
|
||||
return $PayloadURL, $SpecificProductVersion
|
||||
}
|
||||
|
||||
function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [string]$CLIArchitecture) {
|
||||
@@ -411,6 +478,60 @@ function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [
|
||||
return $PayloadURL
|
||||
}
|
||||
|
||||
function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
# The windows desktop runtime is part of the core runtime layout prior to 5.0
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
|
||||
if ($SpecificVersion -match '^(\d+)\.(.*)')
|
||||
{
|
||||
$majorVersion = [int]$Matches[1]
|
||||
if ($majorVersion -ge 5)
|
||||
{
|
||||
$ProductVersionTxtURL = "$AzureFeed/WindowsDesktop/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
}
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value '$Runtime' specified for `$Runtime"
|
||||
}
|
||||
|
||||
Say-Verbose "Checking for existence of $ProductVersionTxtURL"
|
||||
|
||||
try {
|
||||
$productVersionResponse = GetHTTPResponse($productVersionTxtUrl)
|
||||
|
||||
if ($productVersionResponse.StatusCode -eq 200) {
|
||||
$productVersion = $productVersionResponse.Content.ReadAsStringAsync().Result.Trim()
|
||||
if ($productVersion -ne $SpecificVersion)
|
||||
{
|
||||
Say "Using alternate version $productVersion found in $ProductVersionTxtURL"
|
||||
}
|
||||
|
||||
return $productVersion
|
||||
}
|
||||
else {
|
||||
Say-Verbose "Got StatusCode $($productVersionResponse.StatusCode) trying to get productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
|
||||
$productVersion = $SpecificVersion
|
||||
}
|
||||
} catch {
|
||||
Say-Verbose "Could not read productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion (Exception: '$($_.Exception.Message)' )"
|
||||
$productVersion = $SpecificVersion
|
||||
}
|
||||
|
||||
return $productVersion
|
||||
}
|
||||
|
||||
function Get-User-Share-Path() {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
@@ -548,6 +669,23 @@ function DownloadFile($Source, [string]$OutPath) {
|
||||
}
|
||||
}
|
||||
|
||||
function SafeRemoveFile($Path) {
|
||||
try {
|
||||
if (Test-Path $Path) {
|
||||
Remove-Item $Path
|
||||
Say-Verbose "The temporary file `"$Path`" was removed."
|
||||
}
|
||||
else
|
||||
{
|
||||
Say-Verbose "The temporary file `"$Path`" does not exist, therefore is not removed."
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
Say-Warning "Failed to remove the temporary file: `"$Path`", remove it manually."
|
||||
}
|
||||
}
|
||||
|
||||
function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolderRelativePath) {
|
||||
$BinPath = Get-Absolute-Path $(Join-Path -Path $InstallRoot -ChildPath $BinFolderRelativePath)
|
||||
if (-Not $NoPath) {
|
||||
@@ -564,9 +702,14 @@ function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolde
|
||||
}
|
||||
}
|
||||
|
||||
Say "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:"
|
||||
Say "- The SDK needs to be installed without user interaction and without admin rights."
|
||||
Say "- The SDK installation doesn't need to persist across multiple CI runs."
|
||||
Say "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.`r`n"
|
||||
|
||||
$CLIArchitecture = Get-CLIArchitecture-From-Architecture $Architecture
|
||||
$SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $AzureFeed -Channel $Channel -Version $Version -JSonFile $JSonFile
|
||||
$DownloadLink = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
$DownloadLink, $EffectiveVersion = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
$LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
|
||||
$InstallRoot = Resolve-Installation-Path $InstallDir
|
||||
@@ -592,7 +735,12 @@ if ($DryRun) {
|
||||
}
|
||||
}
|
||||
Say "Repeatable invocation: $RepeatableCommand"
|
||||
exit 0
|
||||
if ($SpecificVersion -ne $EffectiveVersion)
|
||||
{
|
||||
Say "NOTE: Due to finding a version manifest with this runtime, it would actually install with version '$EffectiveVersion'"
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if ($Runtime -eq "dotnet") {
|
||||
@@ -615,12 +763,18 @@ else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
}
|
||||
|
||||
if ($SpecificVersion -ne $EffectiveVersion)
|
||||
{
|
||||
Say "Performing installation checks for effective version: $EffectiveVersion"
|
||||
$SpecificVersion = $EffectiveVersion
|
||||
}
|
||||
|
||||
# Check if the SDK version is already installed.
|
||||
$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
|
||||
if ($isAssetInstalled) {
|
||||
Say "$assetName version $SpecificVersion is already installed."
|
||||
Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
|
||||
exit 0
|
||||
return
|
||||
}
|
||||
|
||||
New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null
|
||||
@@ -628,30 +782,69 @@ New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null
|
||||
$installDrive = $((Get-Item $InstallRoot).PSDrive.Name);
|
||||
$diskInfo = Get-PSDrive -Name $installDrive
|
||||
if ($diskInfo.Free / 1MB -le 100) {
|
||||
Say "There is not enough disk space on drive ${installDrive}:"
|
||||
exit 0
|
||||
throw "There is not enough disk space on drive ${installDrive}:"
|
||||
}
|
||||
|
||||
$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
|
||||
Say-Verbose "Zip path: $ZipPath"
|
||||
|
||||
$DownloadFailed = $false
|
||||
Say "Downloading link: $DownloadLink"
|
||||
|
||||
$PrimaryDownloadStatusCode = 0
|
||||
$LegacyDownloadStatusCode = 0
|
||||
|
||||
$PrimaryDownloadFailedMsg = ""
|
||||
$LegacyDownloadFailedMsg = ""
|
||||
|
||||
Say "Downloading primary link $DownloadLink"
|
||||
try {
|
||||
DownloadFile -Source $DownloadLink -OutPath $ZipPath
|
||||
}
|
||||
catch {
|
||||
Say "Cannot download: $DownloadLink"
|
||||
if ($PSItem.Exception.Data.Contains("StatusCode")) {
|
||||
$PrimaryDownloadStatusCode = $PSItem.Exception.Data["StatusCode"]
|
||||
}
|
||||
|
||||
if ($PSItem.Exception.Data.Contains("ErrorMessage")) {
|
||||
$PrimaryDownloadFailedMsg = $PSItem.Exception.Data["ErrorMessage"]
|
||||
} else {
|
||||
$PrimaryDownloadFailedMsg = $PSItem.Exception.Message
|
||||
}
|
||||
|
||||
if ($PrimaryDownloadStatusCode -eq 404) {
|
||||
Say "The resource at $DownloadLink is not available."
|
||||
} else {
|
||||
Say $PSItem.Exception.Message
|
||||
}
|
||||
|
||||
SafeRemoveFile -Path $ZipPath
|
||||
|
||||
if ($LegacyDownloadLink) {
|
||||
$DownloadLink = $LegacyDownloadLink
|
||||
$ZipPath = [System.IO.Path]::combine([System.IO.Path]::GetTempPath(), [System.IO.Path]::GetRandomFileName())
|
||||
Say-Verbose "Legacy zip path: $ZipPath"
|
||||
Say "Downloading legacy link: $DownloadLink"
|
||||
Say "Downloading legacy link $DownloadLink"
|
||||
try {
|
||||
DownloadFile -Source $DownloadLink -OutPath $ZipPath
|
||||
}
|
||||
catch {
|
||||
Say "Cannot download: $DownloadLink"
|
||||
if ($PSItem.Exception.Data.Contains("StatusCode")) {
|
||||
$LegacyDownloadStatusCode = $PSItem.Exception.Data["StatusCode"]
|
||||
}
|
||||
|
||||
if ($PSItem.Exception.Data.Contains("ErrorMessage")) {
|
||||
$LegacyDownloadFailedMsg = $PSItem.Exception.Data["ErrorMessage"]
|
||||
} else {
|
||||
$LegacyDownloadFailedMsg = $PSItem.Exception.Message
|
||||
}
|
||||
|
||||
if ($LegacyDownloadStatusCode -eq 404) {
|
||||
Say "The resource at $DownloadLink is not available."
|
||||
} else {
|
||||
Say $PSItem.Exception.Message
|
||||
}
|
||||
|
||||
SafeRemoveFile -Path $ZipPath
|
||||
$DownloadFailed = $true
|
||||
}
|
||||
}
|
||||
@@ -661,7 +854,19 @@ catch {
|
||||
}
|
||||
|
||||
if ($DownloadFailed) {
|
||||
throw "Could not find/download: `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
if (($PrimaryDownloadStatusCode -eq 404) -and ((-not $LegacyDownloadLink) -or ($LegacyDownloadStatusCode -eq 404))) {
|
||||
throw "Could not find `"$assetName`" with version = $SpecificVersion`nRefer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
} else {
|
||||
# 404-NotFound is an expected response if it goes from only one of the links, do not show that error.
|
||||
# If primary path is available (not 404-NotFound) then show the primary error else show the legacy error.
|
||||
if ($PrimaryDownloadStatusCode -ne 404) {
|
||||
throw "Could not download `"$assetName`" with version = $SpecificVersion`r`n$PrimaryDownloadFailedMsg"
|
||||
}
|
||||
if (($LegacyDownloadLink) -and ($LegacyDownloadStatusCode -ne 404)) {
|
||||
throw "Could not download `"$assetName`" with version = $SpecificVersion`r`n$LegacyDownloadFailedMsg"
|
||||
}
|
||||
throw "Could not download `"$assetName`" with version = $SpecificVersion"
|
||||
}
|
||||
}
|
||||
|
||||
Say "Extracting zip from $DownloadLink"
|
||||
@@ -683,206 +888,208 @@ if (!$isAssetInstalled) {
|
||||
$isAssetInstalled = Is-Dotnet-Package-Installed -InstallRoot $InstallRoot -RelativePathToPackage $dotnetPackageRelativePath -SpecificVersion $SpecificVersion
|
||||
}
|
||||
|
||||
# Version verification failed. More likely something is wrong either with the downloaded content or with the verification algorithm.
|
||||
if (!$isAssetInstalled) {
|
||||
Say-Error "Failed to verify the version of installed `"$assetName`".`nInstallation source: $DownloadLink.`nInstallation location: $InstallRoot.`nReport the bug at https://github.com/dotnet/install-scripts/issues."
|
||||
throw "`"$assetName`" with version = $SpecificVersion failed to install with an unknown error."
|
||||
}
|
||||
|
||||
Remove-Item $ZipPath
|
||||
SafeRemoveFile -Path $ZipPath
|
||||
|
||||
Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath $BinFolderRelativePath
|
||||
|
||||
Say "Note that the script does not resolve dependencies during installation."
|
||||
Say "To check the list of dependencies, go to https://docs.microsoft.com/dotnet/core/install/windows#dependencies"
|
||||
Say "Installation finished"
|
||||
exit 0
|
||||
|
||||
# SIG # Begin signature block
|
||||
# MIIjhwYJKoZIhvcNAQcCoIIjeDCCI3QCAQExDzANBglghkgBZQMEAgEFADB5Bgor
|
||||
# MIIjjwYJKoZIhvcNAQcCoIIjgDCCI3wCAQExDzANBglghkgBZQMEAgEFADB5Bgor
|
||||
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
|
||||
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCAiKYSY4KtkeThH
|
||||
# d5M1aXqv1K0/pff07QwfUbYZ/qX5LqCCDYUwggYDMIID66ADAgECAhMzAAABiK9S
|
||||
# 1rmSbej5AAAAAAGIMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
|
||||
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCCNsnhcJvx/hXmM
|
||||
# w8KjuvvIMDBFonhg9XJFc1QwfTyH4aCCDYEwggX/MIID56ADAgECAhMzAAABh3IX
|
||||
# chVZQMcJAAAAAAGHMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
|
||||
# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy
|
||||
# b3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNpZ25p
|
||||
# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ4WhcNMjEwMzAzMTgzOTQ4WjB0MQsw
|
||||
# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ3WhcNMjEwMzAzMTgzOTQ3WjB0MQsw
|
||||
# CQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9u
|
||||
# ZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMR4wHAYDVQQDExVNaWNy
|
||||
# b3NvZnQgQ29ycG9yYXRpb24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
|
||||
# AQCSCNryE+Cewy2m4t/a74wZ7C9YTwv1PyC4BvM/kSWPNs8n0RTe+FvYfU+E9uf0
|
||||
# t7nYlAzHjK+plif2BhD+NgdhIUQ8sVwWO39tjvQRHjP2//vSvIfmmkRoML1Ihnjs
|
||||
# 9kQiZQzYRDYYRp9xSQYmRwQjk5hl8/U7RgOiQDitVHaU7BT1MI92lfZRuIIDDYBd
|
||||
# vXtbclYJMVOwqZtv0O9zQCret6R+fRSGaDNfEEpcILL+D7RV3M4uaJE4Ta6KAOdv
|
||||
# V+MVaJp1YXFTZPKtpjHO6d9pHQPZiG7NdC6QbnRGmsa48uNQrb6AfmLKDI1Lp31W
|
||||
# MogTaX5tZf+CZT9PSuvjOCLNAgMBAAGjggGCMIIBfjAfBgNVHSUEGDAWBgorBgEE
|
||||
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUj9RJL9zNrPcL10RZdMQIXZN7MG8w
|
||||
# VAYDVR0RBE0wS6RJMEcxLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
|
||||
# dGlvbnMgTGltaXRlZDEWMBQGA1UEBRMNMjMwMDEyKzQ1ODM4NjAfBgNVHSMEGDAW
|
||||
# gBRIbmTlUAXTgqoXNzcitW2oynUClTBUBgNVHR8ETTBLMEmgR6BFhkNodHRwOi8v
|
||||
# d3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NybC9NaWNDb2RTaWdQQ0EyMDExXzIw
|
||||
# MTEtMDctMDguY3JsMGEGCCsGAQUFBwEBBFUwUzBRBggrBgEFBQcwAoZFaHR0cDov
|
||||
# L3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9jZXJ0cy9NaWNDb2RTaWdQQ0EyMDEx
|
||||
# XzIwMTEtMDctMDguY3J0MAwGA1UdEwEB/wQCMAAwDQYJKoZIhvcNAQELBQADggIB
|
||||
# ACnXo8hjp7FeT+H6iQlV3CcGnkSbFvIpKYafgzYCFo3UHY1VHYJVb5jHEO8oG26Q
|
||||
# qBELmak6MTI+ra3WKMTGhE1sEIlowTcp4IAs8a5wpCh6Vf4Z/bAtIppP3p3gXk2X
|
||||
# 8UXTc+WxjQYsDkFiSzo/OBa5hkdW1g4EpO43l9mjToBdqEPtIXsZ7Hi1/6y4gK0P
|
||||
# mMiwG8LMpSn0n/oSHGjrUNBgHJPxgs63Slf58QGBznuXiRaXmfTUDdrvhRocdxIM
|
||||
# i8nXQwWACMiQzJSRzBP5S2wUq7nMAqjaTbeXhJqD2SFVHdUYlKruvtPSwbnqSRWT
|
||||
# GI8s4FEXt+TL3w5JnwVZmZkUFoioQDMMjFyaKurdJ6pnzbr1h6QW0R97fWc8xEIz
|
||||
# LIOiU2rjwWAtlQqFO8KNiykjYGyEf5LyAJKAO+rJd9fsYR+VBauIEQoYmjnUbTXM
|
||||
# SY2Lf5KMluWlDOGVh8q6XjmBccpaT+8tCfxpaVYPi1ncnwTwaPQvVq8RjWDRB7Pa
|
||||
# 8ruHgj2HJFi69+hcq7mWx5nTUtzzFa7RSZfE5a1a5AuBmGNRr7f8cNfa01+tiWjV
|
||||
# Kk1a+gJUBSP0sIxecFbVSXTZ7bqeal45XSDIisZBkWb+83TbXdTGMDSUFKTAdtC+
|
||||
# r35GfsN8QVy59Hb5ZYzAXczhgRmk7NyE6jD0Ym5TKiW5MIIHejCCBWKgAwIBAgIK
|
||||
# YQ6Q0gAAAAAAAzANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMCVVMxEzARBgNV
|
||||
# BAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jv
|
||||
# c29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJvb3QgQ2VydGlm
|
||||
# aWNhdGUgQXV0aG9yaXR5IDIwMTEwHhcNMTEwNzA4MjA1OTA5WhcNMjYwNzA4MjEw
|
||||
# OTA5WjB+MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UE
|
||||
# BxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSgwJgYD
|
||||
# VQQDEx9NaWNyb3NvZnQgQ29kZSBTaWduaW5nIFBDQSAyMDExMIICIjANBgkqhkiG
|
||||
# 9w0BAQEFAAOCAg8AMIICCgKCAgEAq/D6chAcLq3YbqqCEE00uvK2WCGfQhsqa+la
|
||||
# UKq4BjgaBEm6f8MMHt03a8YS2AvwOMKZBrDIOdUBFDFC04kNeWSHfpRgJGyvnkmc
|
||||
# 6Whe0t+bU7IKLMOv2akrrnoJr9eWWcpgGgXpZnboMlImEi/nqwhQz7NEt13YxC4D
|
||||
# dato88tt8zpcoRb0RrrgOGSsbmQ1eKagYw8t00CT+OPeBw3VXHmlSSnnDb6gE3e+
|
||||
# lD3v++MrWhAfTVYoonpy4BI6t0le2O3tQ5GD2Xuye4Yb2T6xjF3oiU+EGvKhL1nk
|
||||
# kDstrjNYxbc+/jLTswM9sbKvkjh+0p2ALPVOVpEhNSXDOW5kf1O6nA+tGSOEy/S6
|
||||
# A4aN91/w0FK/jJSHvMAhdCVfGCi2zCcoOCWYOUo2z3yxkq4cI6epZuxhH2rhKEmd
|
||||
# X4jiJV3TIUs+UsS1Vz8kA/DRelsv1SPjcF0PUUZ3s/gA4bysAoJf28AVs70b1FVL
|
||||
# 5zmhD+kjSbwYuER8ReTBw3J64HLnJN+/RpnF78IcV9uDjexNSTCnq47f7Fufr/zd
|
||||
# sGbiwZeBe+3W7UvnSSmnEyimp31ngOaKYnhfsi+E11ecXL93KCjx7W3DKI8sj0A3
|
||||
# T8HhhUSJxAlMxdSlQy90lfdu+HggWCwTXWCVmj5PM4TasIgX3p5O9JawvEagbJjS
|
||||
# 4NaIjAsCAwEAAaOCAe0wggHpMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBRI
|
||||
# bmTlUAXTgqoXNzcitW2oynUClTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTAL
|
||||
# BgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBRyLToCMZBD
|
||||
# uRQFTuHqp8cx0SOJNDBaBgNVHR8EUzBRME+gTaBLhklodHRwOi8vY3JsLm1pY3Jv
|
||||
# c29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
|
||||
# MDNfMjIuY3JsMF4GCCsGAQUFBwEBBFIwUDBOBggrBgEFBQcwAoZCaHR0cDovL3d3
|
||||
# dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
|
||||
# MDNfMjIuY3J0MIGfBgNVHSAEgZcwgZQwgZEGCSsGAQQBgjcuAzCBgzA/BggrBgEF
|
||||
# BQcCARYzaHR0cDovL3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9kb2NzL3ByaW1h
|
||||
# cnljcHMuaHRtMEAGCCsGAQUFBwICMDQeMiAdAEwAZQBnAGEAbABfAHAAbwBsAGkA
|
||||
# YwB5AF8AcwB0AGEAdABlAG0AZQBuAHQALiAdMA0GCSqGSIb3DQEBCwUAA4ICAQBn
|
||||
# 8oalmOBUeRou09h0ZyKbC5YR4WOSmUKWfdJ5DJDBZV8uLD74w3LRbYP+vj/oCso7
|
||||
# v0epo/Np22O/IjWll11lhJB9i0ZQVdgMknzSGksc8zxCi1LQsP1r4z4HLimb5j0b
|
||||
# pdS1HXeUOeLpZMlEPXh6I/MTfaaQdION9MsmAkYqwooQu6SpBQyb7Wj6aC6VoCo/
|
||||
# KmtYSWMfCWluWpiW5IP0wI/zRive/DvQvTXvbiWu5a8n7dDd8w6vmSiXmE0OPQvy
|
||||
# CInWH8MyGOLwxS3OW560STkKxgrCxq2u5bLZ2xWIUUVYODJxJxp/sfQn+N4sOiBp
|
||||
# mLJZiWhub6e3dMNABQamASooPoI/E01mC8CzTfXhj38cbxV9Rad25UAqZaPDXVJi
|
||||
# hsMdYzaXht/a8/jyFqGaJ+HNpZfQ7l1jQeNbB5yHPgZ3BtEGsXUfFL5hYbXw3MYb
|
||||
# BL7fQccOKO7eZS/sl/ahXJbYANahRr1Z85elCUtIEJmAH9AAKcWxm6U/RXceNcbS
|
||||
# oqKfenoi+kiVH6v7RyOA9Z74v2u3S5fi63V4GuzqN5l5GEv/1rMjaHXmr/r8i+sL
|
||||
# gOppO6/8MO0ETI7f33VtY5E90Z1WTk+/gFcioXgRMiF670EKsT/7qMykXcGhiJtX
|
||||
# cVZOSEXAQsmbdlsKgEhr/Xmfwb1tbWrJUnMTDXpQzTGCFVgwghVUAgEBMIGVMH4x
|
||||
# AQDOt8kLc7P3T7MKIhouYHewMFmnq8Ayu7FOhZCQabVwBp2VS4WyB2Qe4TQBT8aB
|
||||
# znANDEPjHKNdPT8Xz5cNali6XHefS8i/WXtF0vSsP8NEv6mBHuA2p1fw2wB/F0dH
|
||||
# sJ3GfZ5c0sPJjklsiYqPw59xJ54kM91IOgiO2OUzjNAljPibjCWfH7UzQ1TPHc4d
|
||||
# weils8GEIrbBRb7IWwiObL12jWT4Yh71NQgvJ9Fn6+UhD9x2uk3dLj84vwt1NuFQ
|
||||
# itKJxIV0fVsRNR3abQVOLqpDugbr0SzNL6o8xzOHL5OXiGGwg6ekiXA1/2XXY7yV
|
||||
# Fc39tledDtZjSjNbex1zzwSXAgMBAAGjggF+MIIBejAfBgNVHSUEGDAWBgorBgEE
|
||||
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUhov4ZyO96axkJdMjpzu2zVXOJcsw
|
||||
# UAYDVR0RBEkwR6RFMEMxKTAnBgNVBAsTIE1pY3Jvc29mdCBPcGVyYXRpb25zIFB1
|
||||
# ZXJ0byBSaWNvMRYwFAYDVQQFEw0yMzAwMTIrNDU4Mzg1MB8GA1UdIwQYMBaAFEhu
|
||||
# ZOVQBdOCqhc3NyK1bajKdQKVMFQGA1UdHwRNMEswSaBHoEWGQ2h0dHA6Ly93d3cu
|
||||
# bWljcm9zb2Z0LmNvbS9wa2lvcHMvY3JsL01pY0NvZFNpZ1BDQTIwMTFfMjAxMS0w
|
||||
# Ny0wOC5jcmwwYQYIKwYBBQUHAQEEVTBTMFEGCCsGAQUFBzAChkVodHRwOi8vd3d3
|
||||
# Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NlcnRzL01pY0NvZFNpZ1BDQTIwMTFfMjAx
|
||||
# MS0wNy0wOC5jcnQwDAYDVR0TAQH/BAIwADANBgkqhkiG9w0BAQsFAAOCAgEAixmy
|
||||
# S6E6vprWD9KFNIB9G5zyMuIjZAOuUJ1EK/Vlg6Fb3ZHXjjUwATKIcXbFuFC6Wr4K
|
||||
# NrU4DY/sBVqmab5AC/je3bpUpjtxpEyqUqtPc30wEg/rO9vmKmqKoLPT37svc2NV
|
||||
# BmGNl+85qO4fV/w7Cx7J0Bbqk19KcRNdjt6eKoTnTPHBHlVHQIHZpMxacbFOAkJr
|
||||
# qAVkYZdz7ikNXTxV+GRb36tC4ByMNxE2DF7vFdvaiZP0CVZ5ByJ2gAhXMdK9+usx
|
||||
# zVk913qKde1OAuWdv+rndqkAIm8fUlRnr4saSCg7cIbUwCCf116wUJ7EuJDg0vHe
|
||||
# yhnCeHnBbyH3RZkHEi2ofmfgnFISJZDdMAeVZGVOh20Jp50XBzqokpPzeZ6zc1/g
|
||||
# yILNyiVgE+RPkjnUQshd1f1PMgn3tns2Cz7bJiVUaqEO3n9qRFgy5JuLae6UweGf
|
||||
# AeOo3dgLZxikKzYs3hDMaEtJq8IP71cX7QXe6lnMmXU/Hdfz2p897Zd+kU+vZvKI
|
||||
# 3cwLfuVQgK2RZ2z+Kc3K3dRPz2rXycK5XCuRZmvGab/WbrZiC7wJQapgBodltMI5
|
||||
# GMdFrBg9IeF7/rP4EqVQXeKtevTlZXjpuNhhjuR+2DMt/dWufjXpiW91bo3aH6Ea
|
||||
# jOALXmoxgltCp1K7hrS6gmsvj94cLRf50QQ4U8Qwggd6MIIFYqADAgECAgphDpDS
|
||||
# AAAAAAADMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzETMBEGA1UECBMK
|
||||
# V2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0
|
||||
# IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBDZXJ0aWZpY2F0
|
||||
# ZSBBdXRob3JpdHkgMjAxMTAeFw0xMTA3MDgyMDU5MDlaFw0yNjA3MDgyMTA5MDla
|
||||
# MH4xCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdS
|
||||
# ZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMT
|
||||
# H01pY3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTEwggIiMA0GCSqGSIb3DQEB
|
||||
# AQUAA4ICDwAwggIKAoICAQCr8PpyEBwurdhuqoIQTTS68rZYIZ9CGypr6VpQqrgG
|
||||
# OBoESbp/wwwe3TdrxhLYC/A4wpkGsMg51QEUMULTiQ15ZId+lGAkbK+eSZzpaF7S
|
||||
# 35tTsgosw6/ZqSuuegmv15ZZymAaBelmdugyUiYSL+erCFDPs0S3XdjELgN1q2jz
|
||||
# y23zOlyhFvRGuuA4ZKxuZDV4pqBjDy3TQJP4494HDdVceaVJKecNvqATd76UPe/7
|
||||
# 4ytaEB9NViiienLgEjq3SV7Y7e1DkYPZe7J7hhvZPrGMXeiJT4Qa8qEvWeSQOy2u
|
||||
# M1jFtz7+MtOzAz2xsq+SOH7SnYAs9U5WkSE1JcM5bmR/U7qcD60ZI4TL9LoDho33
|
||||
# X/DQUr+MlIe8wCF0JV8YKLbMJyg4JZg5SjbPfLGSrhwjp6lm7GEfauEoSZ1fiOIl
|
||||
# XdMhSz5SxLVXPyQD8NF6Wy/VI+NwXQ9RRnez+ADhvKwCgl/bwBWzvRvUVUvnOaEP
|
||||
# 6SNJvBi4RHxF5MHDcnrgcuck379GmcXvwhxX24ON7E1JMKerjt/sW5+v/N2wZuLB
|
||||
# l4F77dbtS+dJKacTKKanfWeA5opieF+yL4TXV5xcv3coKPHtbcMojyyPQDdPweGF
|
||||
# RInECUzF1KVDL3SV9274eCBYLBNdYJWaPk8zhNqwiBfenk70lrC8RqBsmNLg1oiM
|
||||
# CwIDAQABo4IB7TCCAekwEAYJKwYBBAGCNxUBBAMCAQAwHQYDVR0OBBYEFEhuZOVQ
|
||||
# BdOCqhc3NyK1bajKdQKVMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1Ud
|
||||
# DwQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFHItOgIxkEO5FAVO
|
||||
# 4eqnxzHRI4k0MFoGA1UdHwRTMFEwT6BNoEuGSWh0dHA6Ly9jcmwubWljcm9zb2Z0
|
||||
# LmNvbS9wa2kvY3JsL3Byb2R1Y3RzL01pY1Jvb0NlckF1dDIwMTFfMjAxMV8wM18y
|
||||
# Mi5jcmwwXgYIKwYBBQUHAQEEUjBQME4GCCsGAQUFBzAChkJodHRwOi8vd3d3Lm1p
|
||||
# Y3Jvc29mdC5jb20vcGtpL2NlcnRzL01pY1Jvb0NlckF1dDIwMTFfMjAxMV8wM18y
|
||||
# Mi5jcnQwgZ8GA1UdIASBlzCBlDCBkQYJKwYBBAGCNy4DMIGDMD8GCCsGAQUFBwIB
|
||||
# FjNodHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2RvY3MvcHJpbWFyeWNw
|
||||
# cy5odG0wQAYIKwYBBQUHAgIwNB4yIB0ATABlAGcAYQBsAF8AcABvAGwAaQBjAHkA
|
||||
# XwBzAHQAYQB0AGUAbQBlAG4AdAAuIB0wDQYJKoZIhvcNAQELBQADggIBAGfyhqWY
|
||||
# 4FR5Gi7T2HRnIpsLlhHhY5KZQpZ90nkMkMFlXy4sPvjDctFtg/6+P+gKyju/R6mj
|
||||
# 82nbY78iNaWXXWWEkH2LRlBV2AySfNIaSxzzPEKLUtCw/WvjPgcuKZvmPRul1LUd
|
||||
# d5Q54ulkyUQ9eHoj8xN9ppB0g430yyYCRirCihC7pKkFDJvtaPpoLpWgKj8qa1hJ
|
||||
# Yx8JaW5amJbkg/TAj/NGK978O9C9Ne9uJa7lryft0N3zDq+ZKJeYTQ49C/IIidYf
|
||||
# wzIY4vDFLc5bnrRJOQrGCsLGra7lstnbFYhRRVg4MnEnGn+x9Cf43iw6IGmYslmJ
|
||||
# aG5vp7d0w0AFBqYBKig+gj8TTWYLwLNN9eGPfxxvFX1Fp3blQCplo8NdUmKGwx1j
|
||||
# NpeG39rz+PIWoZon4c2ll9DuXWNB41sHnIc+BncG0QaxdR8UvmFhtfDcxhsEvt9B
|
||||
# xw4o7t5lL+yX9qFcltgA1qFGvVnzl6UJS0gQmYAf0AApxbGbpT9Fdx41xtKiop96
|
||||
# eiL6SJUfq/tHI4D1nvi/a7dLl+LrdXga7Oo3mXkYS//WsyNodeav+vyL6wuA6mk7
|
||||
# r/ww7QRMjt/fdW1jkT3RnVZOT7+AVyKheBEyIXrvQQqxP/uozKRdwaGIm1dxVk5I
|
||||
# RcBCyZt2WwqASGv9eZ/BvW1taslScxMNelDNMYIVZDCCFWACAQEwgZUwfjELMAkG
|
||||
# A1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQx
|
||||
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEoMCYGA1UEAxMfTWljcm9z
|
||||
# b2Z0IENvZGUgU2lnbmluZyBQQ0EgMjAxMQITMwAAAYdyF3IVWUDHCQAAAAABhzAN
|
||||
# BglghkgBZQMEAgEFAKCBrjAZBgkqhkiG9w0BCQMxDAYKKwYBBAGCNwIBBDAcBgor
|
||||
# BgEEAYI3AgELMQ4wDAYKKwYBBAGCNwIBFTAvBgkqhkiG9w0BCQQxIgQgpT/bxWwe
|
||||
# aW0EinKMWCAzDXUjwXkIHldYzR6lw4/1Pc0wQgYKKwYBBAGCNwIBDDE0MDKgFIAS
|
||||
# AE0AaQBjAHIAbwBzAG8AZgB0oRqAGGh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbTAN
|
||||
# BgkqhkiG9w0BAQEFAASCAQCHd7sSQVq0YDg8QDx6/kLWn3s6jtvvIDCCgsO9spHM
|
||||
# quPd4FPbG67DCsKDClekQs52qrtRO3Zo+JMnCw4j3bS+gZHzeJr2shbftOrpsFoD
|
||||
# l7OPcUmtrqul9dkQCOp8t0MP3ls0n96/YyNy6lz4BAlTdkdDx957uAxalKaCIBzb
|
||||
# R9QyppOKIfNFvwD4EI5KI6tpmSy/uH8SrRg7ZExAYZl6J6R18WkL7KHn649lPoAQ
|
||||
# ujwrIXH10xOJops45ILGzKWQcHmCzLJGYapL4VHUuK+73nT+9ZROGHdk/PyvIcdw
|
||||
# iERa+C06v305t3DA+CuHFy1tvyw7IFF6RVbLZPwxrJjToYIS7jCCEuoGCisGAQQB
|
||||
# gjcDAwExghLaMIIS1gYJKoZIhvcNAQcCoIISxzCCEsMCAQMxDzANBglghkgBZQME
|
||||
# AgEFADCCAVUGCyqGSIb3DQEJEAEEoIIBRASCAUAwggE8AgEBBgorBgEEAYRZCgMB
|
||||
# MDEwDQYJYIZIAWUDBAIBBQAEIOCaTmvM1AP0WaEVqzKaaCu/R+bTlR4kCrM/ZXsb
|
||||
# /eNOAgZgGeLsMwsYEzIwMjEwMjAzMjExNzQ5LjU5MVowBIACAfSggdSkgdEwgc4x
|
||||
# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt
|
||||
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01p
|
||||
# Y3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTECEzMAAAGIr1LWuZJt6PkAAAAA
|
||||
# AYgwDQYJYIZIAWUDBAIBBQCgga4wGQYJKoZIhvcNAQkDMQwGCisGAQQBgjcCAQQw
|
||||
# HAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUwLwYJKoZIhvcNAQkEMSIEIFxZ
|
||||
# Yezh3liQqiGQuXNa+zYfoSIbLqOpdEn2ZKskBkisMEIGCisGAQQBgjcCAQwxNDAy
|
||||
# oBSAEgBNAGkAYwByAG8AcwBvAGYAdKEagBhodHRwOi8vd3d3Lm1pY3Jvc29mdC5j
|
||||
# b20wDQYJKoZIhvcNAQEBBQAEggEAjLUrwCXJCPHZulZuKAQSX+MfnIRFAhlN7ru2
|
||||
# 6H8rudvhkWgqMISkLb9gFDPR5FhR4sqdYgKW4P0ERao9ypCGi1FWDLqygC2XBbHj
|
||||
# NEQHBxHJs5SMsMAXNSIcYHqVAvhF3nXoseaNBkhOTrkQ1FS/fW7AfDGRbsiiESzv
|
||||
# lebf92shZylBFKOsKQLAL0mF/B7xrxHJIj5dgQoD1phATRNHOEQj3jgmkidFWowV
|
||||
# 4r8MzbxRhAEORbnJexlUoDQJQH3YwxuUyXkTvrYMTKSbGJLlwRaZQbrcBU0k4gCH
|
||||
# y8Sci+p9Rq+aOTzLCoNrZyh9E7OdwVDm1FJAtY30bV50T2WSFKGCEuIwghLeBgor
|
||||
# BgEEAYI3AwMBMYISzjCCEsoGCSqGSIb3DQEHAqCCErswghK3AgEDMQ8wDQYJYIZI
|
||||
# AWUDBAIBBQAwggFRBgsqhkiG9w0BCRABBKCCAUAEggE8MIIBOAIBAQYKKwYBBAGE
|
||||
# WQoDATAxMA0GCWCGSAFlAwQCAQUABCD7JNcBBSfhlKPL1tN3CEKRKJuT/dZ8RO9K
|
||||
# orYLXJeLTwIGXvN89YD7GBMyMDIwMDcwMTE0MTYyMC40MDVaMASAAgH0oIHQpIHN
|
||||
# MIHKMQswCQYDVQQGEwJVUzELMAkGA1UECBMCV0ExEDAOBgNVBAcTB1JlZG1vbmQx
|
||||
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEtMCsGA1UECxMkTWljcm9z
|
||||
# b2Z0IElyZWxhbmQgT3BlcmF0aW9ucyBMaW1pdGVkMSYwJAYDVQQLEx1UaGFsZXMg
|
||||
# VFNTIEVTTjoxNzlFLTRCQjAtODI0NjElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUt
|
||||
# U3RhbXAgU2VydmljZaCCDjkwggTxMIID2aADAgECAhMzAAABDKp4btzMQkzBAAAA
|
||||
# AAEMMA0GCSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNo
|
||||
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1p
|
||||
# Y3Jvc29mdCBPcGVyYXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMg
|
||||
# VFNTIEVTTjo4OTdBLUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUt
|
||||
# U3RhbXAgU2VydmljZaCCDkEwggT1MIID3aADAgECAhMzAAABLCKvRZd1+RvuAAAA
|
||||
# AAEsMA0GCSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNo
|
||||
# aW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29y
|
||||
# cG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEw
|
||||
# MB4XDTE5MTAyMzIzMTkxNloXDTIxMDEyMTIzMTkxNlowgcoxCzAJBgNVBAYTAlVT
|
||||
# MQswCQYDVQQIEwJXQTEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9z
|
||||
# b2Z0IENvcnBvcmF0aW9uMS0wKwYDVQQLEyRNaWNyb3NvZnQgSXJlbGFuZCBPcGVy
|
||||
# YXRpb25zIExpbWl0ZWQxJjAkBgNVBAsTHVRoYWxlcyBUU1MgRVNOOjE3OUUtNEJC
|
||||
# MC04MjQ2MSUwIwYDVQQDExxNaWNyb3NvZnQgVGltZS1TdGFtcCBTZXJ2aWNlMIIB
|
||||
# IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq5011+XqVJmQKtiw39igeEMv
|
||||
# CLcZ1forbmxsDkpnCN1SrThKI+n2Pr3zqTzJVgdJFCoKm1ks1gtRJ7HaL6tDkrOw
|
||||
# 8XJmfJaxyQAluCQ+e40NI+A4w+u59Gy89AVY5lJNrmCva6gozfg1kxw6abV5WWr+
|
||||
# PjEpNCshO4hxv3UqgMcCKnT2YVSZzF1Gy7APub1fY0P1vNEuOFKrNCEEvWIKRrqs
|
||||
# eyBB73G8KD2yw6jfz0VKxNSRAdhJV/ghOyrDt5a+L6C3m1rpr8sqiof3iohv3ANI
|
||||
# gNqw6ex+4+G+B7JMbIHbGpPdebedL6ePbuBCnbgJoDn340k0aw6ij21GvvUnkQID
|
||||
# AQABo4IBGzCCARcwHQYDVR0OBBYEFAlCOq9DDIa0A0oqgKtM5vjuZeK+MB8GA1Ud
|
||||
# IwQYMBaAFNVjOlyKMZDzQ3t8RhvFM2hahW1VMFYGA1UdHwRPME0wS6BJoEeGRWh0
|
||||
# dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kvY3JsL3Byb2R1Y3RzL01pY1RpbVN0
|
||||
# YVBDQV8yMDEwLTA3LTAxLmNybDBaBggrBgEFBQcBAQROMEwwSgYIKwYBBQUHMAKG
|
||||
# Pmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9wa2kvY2VydHMvTWljVGltU3RhUENB
|
||||
# XzIwMTAtMDctMDEuY3J0MAwGA1UdEwEB/wQCMAAwEwYDVR0lBAwwCgYIKwYBBQUH
|
||||
# AwgwDQYJKoZIhvcNAQELBQADggEBAET3xBg/IZ9zdOfwbDGK7cK3qKYt/qUOlbRB
|
||||
# zgeNjb32K86nGeRGkBee10dVOEGWUw6KtBeWh1LQ70b64/tLtiLcsf9JzaAyDYb1
|
||||
# sRmMi5fjRZ753TquaT8V7NJ7RfEuYfvZlubfQD0MVbU4tzsdZdYuxE37V2J9pN89
|
||||
# j7GoFNtAnSnCn1MRxENAILgt9XzeQzTEDhFYW0N2DNphTkRPXGjpDmwi6WtkJ5fv
|
||||
# 0iTyB4dwEC+/ed0lGbFLcytJoMwfTNMdH6gcnHlMzsniornGFZa5PPiV78XoZ9Fe
|
||||
# upKo8ZKNGhLLLB5GTtqfHex5no3ioVSq+NthvhX0I/V+iXJsopowggZxMIIEWaAD
|
||||
# AgECAgphCYEqAAAAAAACMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzET
|
||||
# MBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMV
|
||||
# TWljcm9zb2Z0IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBD
|
||||
# ZXJ0aWZpY2F0ZSBBdXRob3JpdHkgMjAxMDAeFw0xMDA3MDEyMTM2NTVaFw0yNTA3
|
||||
# MDEyMTQ2NTVaMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw
|
||||
# DgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24x
|
||||
# JjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwMIIBIjANBgkq
|
||||
# hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqR0NvHcRijog7PwTl/X6f2mUa3RUENWl
|
||||
# CgCChfvtfGhLLF/Fw+Vhwna3PmYrW/AVUycEMR9BGxqVHc4JE458YTBZsTBED/Fg
|
||||
# iIRUQwzXTbg4CLNC3ZOs1nMwVyaCo0UN0Or1R4HNvyRgMlhgRvJYR4YyhB50YWeR
|
||||
# X4FUsc+TTJLBxKZd0WETbijGGvmGgLvfYfxGwScdJGcSchohiq9LZIlQYrFd/Xcf
|
||||
# PfBXday9ikJNQFHRD5wGPmd/9WbAA5ZEfu/QS/1u5ZrKsajyeioKMfDaTgaRtogI
|
||||
# Neh4HLDpmc085y9Euqf03GS9pAHBIAmTeM38vMDJRF1eFpwBBU8iTQIDAQABo4IB
|
||||
# 5jCCAeIwEAYJKwYBBAGCNxUBBAMCAQAwHQYDVR0OBBYEFNVjOlyKMZDzQ3t8RhvF
|
||||
# M2hahW1VMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1UdDwQEAwIBhjAP
|
||||
# BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNX2VsuP6KJcYmjRPZSQW9fOmhjE
|
||||
# MFYGA1UdHwRPME0wS6BJoEeGRWh0dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kv
|
||||
# Y3JsL3Byb2R1Y3RzL01pY1Jvb0NlckF1dF8yMDEwLTA2LTIzLmNybDBaBggrBgEF
|
||||
# BQcBAQROMEwwSgYIKwYBBQUHMAKGPmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9w
|
||||
# a2kvY2VydHMvTWljUm9vQ2VyQXV0XzIwMTAtMDYtMjMuY3J0MIGgBgNVHSABAf8E
|
||||
# gZUwgZIwgY8GCSsGAQQBgjcuAzCBgTA9BggrBgEFBQcCARYxaHR0cDovL3d3dy5t
|
||||
# aWNyb3NvZnQuY29tL1BLSS9kb2NzL0NQUy9kZWZhdWx0Lmh0bTBABggrBgEFBQcC
|
||||
# AjA0HjIgHQBMAGUAZwBhAGwAXwBQAG8AbABpAGMAeQBfAFMAdABhAHQAZQBtAGUA
|
||||
# bgB0AC4gHTANBgkqhkiG9w0BAQsFAAOCAgEAB+aIUQ3ixuCYP4FxAz2do6Ehb7Pr
|
||||
# psz1Mb7PBeKp/vpXbRkws8LFZslq3/Xn8Hi9x6ieJeP5vO1rVFcIK1GCRBL7uVOM
|
||||
# zPRgEop2zEBAQZvcXBf/XPleFzWYJFZLdO9CEMivv3/Gf/I3fVo/HPKZeUqRUgCv
|
||||
# OA8X9S95gWXZqbVr5MfO9sp6AG9LMEQkIjzP7QOllo9ZKby2/QThcJ8ySif9Va8v
|
||||
# /rbljjO7Yl+a21dA6fHOmWaQjP9qYn/dxUoLkSbiOewZSnFjnXshbcOco6I8+n99
|
||||
# lmqQeKZt0uGc+R38ONiU9MalCpaGpL2eGq4EQoO4tYCbIjggtSXlZOz39L9+Y1kl
|
||||
# D3ouOVd2onGqBooPiRa6YacRy5rYDkeagMXQzafQ732D8OE7cQnfXXSYIghh2rBQ
|
||||
# Hm+98eEA3+cxB6STOvdlR3jo+KhIq/fecn5ha293qYHLpwmsObvsxsvYgrRyzR30
|
||||
# uIUBHoD7G4kqVDmyW9rIDVWZeodzOwjmmC3qjeAzLhIp9cAvVCch98isTtoouLGp
|
||||
# 25ayp0Kiyc8ZQU3ghvkqmqMRZjDTu3QyS99je/WZii8bxyGvWbWu3EQ8l1Bx16HS
|
||||
# xVXjad5XwdHeMMD9zOZN+w2/XU/pnR4ZOC+8z1gFLu8NoFA12u8JJxzVs341Hgi6
|
||||
# 2jbb01+P3nSISRKhggLLMIICNAIBATCB+KGB0KSBzTCByjELMAkGA1UEBhMCVVMx
|
||||
# CzAJBgNVBAgTAldBMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
|
||||
# ZnQgQ29ycG9yYXRpb24xLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
|
||||
# dGlvbnMgTGltaXRlZDEmMCQGA1UECxMdVGhhbGVzIFRTUyBFU046MTc5RS00QkIw
|
||||
# LTgyNDYxJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1wIFNlcnZpY2WiIwoB
|
||||
# ATAHBgUrDgMCGgMVAMsg9FQ9pgPLXI2Ld5z7xDS0QAZ9oIGDMIGApH4wfDELMAkG
|
||||
# A1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQx
|
||||
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9z
|
||||
# b2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAwDQYJKoZIhvcNAQEFBQACBQDipo0MMCIY
|
||||
# DzIwMjAwNzAxMTIxODIwWhgPMjAyMDA3MDIxMjE4MjBaMHQwOgYKKwYBBAGEWQoE
|
||||
# ATEsMCowCgIFAOKmjQwCAQAwBwIBAAICE70wBwIBAAICEeIwCgIFAOKn3owCAQAw
|
||||
# NgYKKwYBBAGEWQoEAjEoMCYwDAYKKwYBBAGEWQoDAqAKMAgCAQACAwehIKEKMAgC
|
||||
# AQACAwGGoDANBgkqhkiG9w0BAQUFAAOBgQCOPjlHOH8nYtgt2XnpKXenxPUR03ED
|
||||
# xPBm8XR5Z1vIq53RU9jG6yYcYNTdK+q38SGZtu0W/SgagTfKCQhjhRakuv7rGSs2
|
||||
# dlhx9LGCoc/q1vqmZpRSjkqWVcc/NzmldUWIWnLlV6rmLGoDmfCH5BcsiU6Eo6wU
|
||||
# iUVwnnXoqsCaBzGCAw0wggMJAgEBMIGTMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQI
|
||||
# EwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
|
||||
# ZnQgQ29ycG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBD
|
||||
# QSAyMDEwAhMzAAABDKp4btzMQkzBAAAAAAEMMA0GCWCGSAFlAwQCAQUAoIIBSjAa
|
||||
# BgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwLwYJKoZIhvcNAQkEMSIEIDpwhjyu
|
||||
# zgu3Kmxpnpz86ZlthBqEzG5vaEMOkYRyuFCaMIH6BgsqhkiG9w0BCRACLzGB6jCB
|
||||
# 5zCB5DCBvQQgg5AWKX7M1+m2//+V7qmRvt1K/ww5Muu8XzGJBqygVCkwgZgwgYCk
|
||||
# MB4XDTE5MTIxOTAxMTUwM1oXDTIxMDMxNzAxMTUwM1owgc4xCzAJBgNVBAYTAlVT
|
||||
# MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQK
|
||||
# ExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1pY3Jvc29mdCBPcGVy
|
||||
# YXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMgVFNTIEVTTjo4OTdB
|
||||
# LUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAgU2Vydmlj
|
||||
# ZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPK1zgSSq+MxAYo3qpCt
|
||||
# QDxSMPPJy6mm/wfEJNjNUnYtLFBwl1BUS5trEk/t41ldxITKehs+ABxYqo4Qxsg3
|
||||
# Gy1ugKiwHAnYiiekfC+ZhptNFgtnDZIn45zC0AlVr/6UfLtsLcHCh1XElLUHfEC0
|
||||
# nBuQcM/SpYo9e3l1qY5NdMgDGxCsmCKdiZfYXIu+U0UYIBhdzmSHnB3fxZOBVcr5
|
||||
# htFHEBBNt/rFJlm/A4yb8oBsp+Uf0p5QwmO/bCcdqB15JpylOhZmWs0sUfJKlK9E
|
||||
# rAhBwGki2eIRFKsQBdkXS9PWpF1w2gIJRvSkDEaCf+lbGTPdSzHSbfREWOF9wY3i
|
||||
# Yj8CAwEAAaOCARswggEXMB0GA1UdDgQWBBRRahZSGfrCQhCyIyGH9DkiaW7L0zAf
|
||||
# BgNVHSMEGDAWgBTVYzpcijGQ80N7fEYbxTNoWoVtVTBWBgNVHR8ETzBNMEugSaBH
|
||||
# hkVodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNU
|
||||
# aW1TdGFQQ0FfMjAxMC0wNy0wMS5jcmwwWgYIKwYBBQUHAQEETjBMMEoGCCsGAQUF
|
||||
# BzAChj5odHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpL2NlcnRzL01pY1RpbVN0
|
||||
# YVBDQV8yMDEwLTA3LTAxLmNydDAMBgNVHRMBAf8EAjAAMBMGA1UdJQQMMAoGCCsG
|
||||
# AQUFBwMIMA0GCSqGSIb3DQEBCwUAA4IBAQBPFxHIwi4vAH49w9Svmz6K3tM55RlW
|
||||
# 5pPeULXdut2Rqy6Ys0+VpZsbuaEoxs6Z1C3hMbkiqZFxxyltxJpuHTyGTg61zfNI
|
||||
# F5n6RsYF3s7IElDXNfZznF1/2iWc6uRPZK8rxxUJ/7emYXZCYwuUY0XjsCpP9pbR
|
||||
# RKeJi6r5arSyI+NfKxvgoM21JNt1BcdlXuAecdd/k8UjxCscffanoK2n6LFw1PcZ
|
||||
# lEO7NId7o+soM2C0QY5BYdghpn7uqopB6ixyFIIkDXFub+1E7GmAEwfU6VwEHL7y
|
||||
# 9rNE8bd+JrQs+yAtkkHy9FmXg/PsGq1daVzX1So7CJ6nyphpuHSN3VfTMIIGcTCC
|
||||
# BFmgAwIBAgIKYQmBKgAAAAAAAjANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMC
|
||||
# VVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNV
|
||||
# BAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJv
|
||||
# b3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTAwHhcNMTAwNzAxMjEzNjU1WhcN
|
||||
# MjUwNzAxMjE0NjU1WjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3Rv
|
||||
# bjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0
|
||||
# aW9uMSYwJAYDVQQDEx1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMDCCASIw
|
||||
# DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKkdDbx3EYo6IOz8E5f1+n9plGt0
|
||||
# VBDVpQoAgoX77XxoSyxfxcPlYcJ2tz5mK1vwFVMnBDEfQRsalR3OCROOfGEwWbEw
|
||||
# RA/xYIiEVEMM1024OAizQt2TrNZzMFcmgqNFDdDq9UeBzb8kYDJYYEbyWEeGMoQe
|
||||
# dGFnkV+BVLHPk0ySwcSmXdFhE24oxhr5hoC732H8RsEnHSRnEnIaIYqvS2SJUGKx
|
||||
# Xf13Hz3wV3WsvYpCTUBR0Q+cBj5nf/VmwAOWRH7v0Ev9buWayrGo8noqCjHw2k4G
|
||||
# kbaICDXoeByw6ZnNPOcvRLqn9NxkvaQBwSAJk3jN/LzAyURdXhacAQVPIk0CAwEA
|
||||
# AaOCAeYwggHiMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBTVYzpcijGQ80N7
|
||||
# fEYbxTNoWoVtVTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTALBgNVHQ8EBAMC
|
||||
# AYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBTV9lbLj+iiXGJo0T2UkFvX
|
||||
# zpoYxDBWBgNVHR8ETzBNMEugSaBHhkVodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20v
|
||||
# cGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXRfMjAxMC0wNi0yMy5jcmwwWgYI
|
||||
# KwYBBQUHAQEETjBMMEoGCCsGAQUFBzAChj5odHRwOi8vd3d3Lm1pY3Jvc29mdC5j
|
||||
# b20vcGtpL2NlcnRzL01pY1Jvb0NlckF1dF8yMDEwLTA2LTIzLmNydDCBoAYDVR0g
|
||||
# AQH/BIGVMIGSMIGPBgkrBgEEAYI3LgMwgYEwPQYIKwYBBQUHAgEWMWh0dHA6Ly93
|
||||
# d3cubWljcm9zb2Z0LmNvbS9QS0kvZG9jcy9DUFMvZGVmYXVsdC5odG0wQAYIKwYB
|
||||
# BQUHAgIwNB4yIB0ATABlAGcAYQBsAF8AUABvAGwAaQBjAHkAXwBTAHQAYQB0AGUA
|
||||
# bQBlAG4AdAAuIB0wDQYJKoZIhvcNAQELBQADggIBAAfmiFEN4sbgmD+BcQM9naOh
|
||||
# IW+z66bM9TG+zwXiqf76V20ZMLPCxWbJat/15/B4vceoniXj+bzta1RXCCtRgkQS
|
||||
# +7lTjMz0YBKKdsxAQEGb3FwX/1z5Xhc1mCRWS3TvQhDIr79/xn/yN31aPxzymXlK
|
||||
# kVIArzgPF/UveYFl2am1a+THzvbKegBvSzBEJCI8z+0DpZaPWSm8tv0E4XCfMkon
|
||||
# /VWvL/625Y4zu2JfmttXQOnxzplmkIz/amJ/3cVKC5Em4jnsGUpxY517IW3DnKOi
|
||||
# PPp/fZZqkHimbdLhnPkd/DjYlPTGpQqWhqS9nhquBEKDuLWAmyI4ILUl5WTs9/S/
|
||||
# fmNZJQ96LjlXdqJxqgaKD4kWumGnEcua2A5HmoDF0M2n0O99g/DhO3EJ3110mCII
|
||||
# YdqwUB5vvfHhAN/nMQekkzr3ZUd46PioSKv33nJ+YWtvd6mBy6cJrDm77MbL2IK0
|
||||
# cs0d9LiFAR6A+xuJKlQ5slvayA1VmXqHczsI5pgt6o3gMy4SKfXAL1QnIffIrE7a
|
||||
# KLixqduWsqdCosnPGUFN4Ib5KpqjEWYw07t0MkvfY3v1mYovG8chr1m1rtxEPJdQ
|
||||
# cdeh0sVV42neV8HR3jDA/czmTfsNv11P6Z0eGTgvvM9YBS7vDaBQNdrvCScc1bN+
|
||||
# NR4Iuto229Nfj950iEkSoYICzzCCAjgCAQEwgfyhgdSkgdEwgc4xCzAJBgNVBAYT
|
||||
# AlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYD
|
||||
# VQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1pY3Jvc29mdCBP
|
||||
# cGVyYXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMgVFNTIEVTTjo4
|
||||
# OTdBLUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAgU2Vy
|
||||
# dmljZaIjCgEBMAcGBSsOAwIaAxUADE5OKSMoNx/mYxYWap1RTOohbJ2ggYMwgYCk
|
||||
# fjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
|
||||
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYDVQQD
|
||||
# Ex1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMAITMwAAAQyqeG7czEJMwQAA
|
||||
# AAABDDAiBCD11urvv5vgo4gFVQ2NMVrzgxT87Yuiq16YdswYbaYeITANBgkqhkiG
|
||||
# 9w0BAQsFAASCAQAi3q8hwcT2ft4b2EleaiyZxOImV/cKusmth1dtCh5/Jb0GbOld
|
||||
# f5cSalrjf42MNPodWAtgmWozkYrQF6HxnsOiYiamfRA8E3E7xyRMy7AFfAhjcwMi
|
||||
# xaW4Iye6E1Ec6LtULANxfDtG/KIdCWdZxKqOezL3nzFNQWmm1mXPV+UnKpnJkA3E
|
||||
# DsQOUWk8J6ojDurhrP536WI+3arg8PcnppHBLd/xNKYdlsTb+6qndgzKXkDDt1CV
|
||||
# 4zCyuZ7bO8eyZAmNoSZz22k7vus9UjBz/CDhXylo20N43nr29rWPItUgH4uvOGQn
|
||||
# t26Y/yjBaQImz32psrfJEMbQ7cl789s8WOx8
|
||||
# SIG # End signature block
|
||||
# Ex1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMDANBgkqhkiG9w0BAQUFAAIF
|
||||
# AOPFChkwIhgPMjAyMTAyMDMxNTQwMDlaGA8yMDIxMDIwNDE1NDAwOVowdDA6Bgor
|
||||
# BgEEAYRZCgQBMSwwKjAKAgUA48UKGQIBADAHAgEAAgIXmDAHAgEAAgIRyTAKAgUA
|
||||
# 48ZbmQIBADA2BgorBgEEAYRZCgQCMSgwJjAMBgorBgEEAYRZCgMCoAowCAIBAAID
|
||||
# B6EgoQowCAIBAAIDAYagMA0GCSqGSIb3DQEBBQUAA4GBAHeeznL2n6HWCjHH94Fl
|
||||
# hcdW6TEXzq4XNgp1Gx1W9F8gJ4x+SwoV7elJZkwgGffcpHomLvIY/VSuzsl1NgtJ
|
||||
# TWM2UxoqSv58BBOrl4eGhH6kkg8Ucy2tdeK5T8cHa8pMkq2j9pFd2mRG/6VMk0dl
|
||||
# Xz7Uy3Z6bZqkcABMyAfuAaGbMYIDDTCCAwkCAQEwgZMwfDELMAkGA1UEBhMCVVMx
|
||||
# EzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoT
|
||||
# FU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9zb2Z0IFRpbWUt
|
||||
# U3RhbXAgUENBIDIwMTACEzMAAAEsIq9Fl3X5G+4AAAAAASwwDQYJYIZIAWUDBAIB
|
||||
# BQCgggFKMBoGCSqGSIb3DQEJAzENBgsqhkiG9w0BCRABBDAvBgkqhkiG9w0BCQQx
|
||||
# IgQg/QYv7yp+354WTjWUIsXWndTEzXjaYjqwYjcBxCJKjdUwgfoGCyqGSIb3DQEJ
|
||||
# EAIvMYHqMIHnMIHkMIG9BCBbn/0uFFh42hTM5XOoKdXevBaiSxmYK9Ilcn9nu5ZH
|
||||
# 4TCBmDCBgKR+MHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw
|
||||
# DgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24x
|
||||
# JjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwAhMzAAABLCKv
|
||||
# RZd1+RvuAAAAAAEsMCIEIIfIM3YbzHswb/Kj/qq1l1cHA6QBl+gEXYanUNJomrpT
|
||||
# MA0GCSqGSIb3DQEBCwUABIIBAAwdcXssUZGO7ho5+NHLjIxLtQk543aKGo+lrRMY
|
||||
# Q9abE1h/AaaNJl0iGxX4IihNWyfovSfYL3L4eODUBAu68tWSxeceRfWNsb/ZZfUi
|
||||
# v89hpLssI/Gf1BEgNMA4zCuIGQiC8okusVumEpAhhvCEbSiTTTtBdolTnU/CAKui
|
||||
# oxaU3R9XkKh1F4oAM26+dJ1J2BLQXPs5afNvvedDsZWNQUPK1sFF3JRfzxiTrwBW
|
||||
# EJRyflev9gyDoqCHzippgb+6+eti1WTkcA9Q49GIT11S6LOAVqkSC9N7Nqf8ksh8
|
||||
# ARdwT8jigpsm+mj7lrVU9upDkhVYhKeO8oiZq95Q53Zkteo=
|
||||
# SIG # End signature block
|
||||
|
||||
349
src/Misc/dotnet-install.sh
vendored
349
src/Misc/dotnet-install.sh
vendored
@@ -40,7 +40,7 @@ if [ -t 1 ] && command -v tput > /dev/null; then
|
||||
fi
|
||||
|
||||
say_warning() {
|
||||
printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}"
|
||||
printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}" >&3
|
||||
}
|
||||
|
||||
say_err() {
|
||||
@@ -183,6 +183,9 @@ get_current_os_name() {
|
||||
elif is_musl_based_distro; then
|
||||
echo "linux-musl"
|
||||
return 0
|
||||
elif [ "$linux_platform_name" = "linux-musl" ]; then
|
||||
echo "linux-musl"
|
||||
return 0
|
||||
else
|
||||
echo "linux"
|
||||
return 0
|
||||
@@ -241,42 +244,6 @@ check_min_reqs() {
|
||||
return 0
|
||||
}
|
||||
|
||||
check_pre_reqs() {
|
||||
eval $invocation
|
||||
|
||||
if [ "${DOTNET_INSTALL_SKIP_PREREQS:-}" = "1" ]; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [ "$(uname)" = "Linux" ]; then
|
||||
if is_musl_based_distro; then
|
||||
if ! command -v scanelf > /dev/null; then
|
||||
say_warning "scanelf not found, please install pax-utils package."
|
||||
return 0
|
||||
fi
|
||||
LDCONFIG_COMMAND="scanelf --ldpath -BF '%f'"
|
||||
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libintl)" ] && say_warning "Unable to locate libintl. Probable prerequisite missing; install libintl (or gettext)."
|
||||
else
|
||||
if [ ! -x "$(command -v ldconfig)" ]; then
|
||||
say_verbose "ldconfig is not in PATH, trying /sbin/ldconfig."
|
||||
LDCONFIG_COMMAND="/sbin/ldconfig"
|
||||
else
|
||||
LDCONFIG_COMMAND="ldconfig"
|
||||
fi
|
||||
local librarypath=${LD_LIBRARY_PATH:-}
|
||||
LDCONFIG_COMMAND="$LDCONFIG_COMMAND -NXv ${librarypath//:/ }"
|
||||
fi
|
||||
|
||||
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep zlib)" ] && say_warning "Unable to locate zlib. Probable prerequisite missing; install zlib."
|
||||
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep ssl)" ] && say_warning "Unable to locate libssl. Probable prerequisite missing; install libssl."
|
||||
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libicu)" ] && say_warning "Unable to locate libicu. Probable prerequisite missing; install libicu."
|
||||
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep lttng)" ] && say_warning "Unable to locate liblttng. Probable prerequisite missing; install libcurl."
|
||||
[ -z "$($LDCONFIG_COMMAND 2>/dev/null | grep libcurl)" ] && say_warning "Unable to locate libcurl. Probable prerequisite missing; install libcurl."
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# input - $1
|
||||
to_lowercase() {
|
||||
@@ -332,11 +299,11 @@ get_machine_architecture() {
|
||||
if command -v uname > /dev/null; then
|
||||
CPUName=$(uname -m)
|
||||
case $CPUName in
|
||||
armv7l)
|
||||
armv*l)
|
||||
echo "arm"
|
||||
return 0
|
||||
;;
|
||||
aarch64)
|
||||
aarch64|arm64)
|
||||
echo "arm64"
|
||||
return 0
|
||||
;;
|
||||
@@ -373,10 +340,34 @@ get_normalized_architecture_from_architecture() {
|
||||
;;
|
||||
esac
|
||||
|
||||
say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/sdk/issues"
|
||||
say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues"
|
||||
return 1
|
||||
}
|
||||
|
||||
# args:
|
||||
# user_defined_os - $1
|
||||
get_normalized_os() {
|
||||
eval $invocation
|
||||
|
||||
local osname="$(to_lowercase "$1")"
|
||||
if [ ! -z "$osname" ]; then
|
||||
case "$osname" in
|
||||
osx | freebsd | rhel.6 | linux-musl | linux)
|
||||
echo "$osname"
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
say_err "'$user_defined_os' is not a supported value for --os option, supported values are: osx, linux, linux-musl, freebsd, rhel.6. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues."
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
else
|
||||
osname="$(get_current_os_name)" || return 1
|
||||
fi
|
||||
echo "$osname"
|
||||
return 0
|
||||
}
|
||||
|
||||
# The version text returned from the feeds is a 1-line or 2-line string:
|
||||
# For the SDK and the dotnet runtime (2 lines):
|
||||
# Line 1: # commit_hash
|
||||
@@ -418,14 +409,12 @@ is_dotnet_package_installed() {
|
||||
# azure_feed - $1
|
||||
# channel - $2
|
||||
# normalized_architecture - $3
|
||||
# coherent - $4
|
||||
get_latest_version_info() {
|
||||
eval $invocation
|
||||
|
||||
local azure_feed="$1"
|
||||
local channel="$2"
|
||||
local normalized_architecture="$3"
|
||||
local coherent="$4"
|
||||
|
||||
local version_file_url=null
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
@@ -433,11 +422,7 @@ get_latest_version_info() {
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
version_file_url="$uncached_feed/aspnetcore/Runtime/$channel/latest.version"
|
||||
elif [ -z "$runtime" ]; then
|
||||
if [ "$coherent" = true ]; then
|
||||
version_file_url="$uncached_feed/Sdk/$channel/latest.coherent.version"
|
||||
else
|
||||
version_file_url="$uncached_feed/Sdk/$channel/latest.version"
|
||||
fi
|
||||
version_file_url="$uncached_feed/Sdk/$channel/latest.version"
|
||||
else
|
||||
say_err "Invalid value for \$runtime"
|
||||
return 1
|
||||
@@ -468,7 +453,6 @@ parse_jsonfile_for_version() {
|
||||
sdk_list=$(echo $sdk_section | awk -F"[{}]" '{print $2}')
|
||||
sdk_list=${sdk_list//[\" ]/}
|
||||
sdk_list=${sdk_list//,/$'\n'}
|
||||
sdk_list="$(echo -e "${sdk_list}" | tr -d '[[:space:]]')"
|
||||
|
||||
local version_info=""
|
||||
while read -r line; do
|
||||
@@ -505,26 +489,16 @@ get_specific_version_from_version() {
|
||||
local json_file="$5"
|
||||
|
||||
if [ -z "$json_file" ]; then
|
||||
case "$version" in
|
||||
latest)
|
||||
local version_info
|
||||
version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1
|
||||
say_verbose "get_specific_version_from_version: version_info=$version_info"
|
||||
echo "$version_info" | get_version_from_version_info
|
||||
return 0
|
||||
;;
|
||||
coherent)
|
||||
local version_info
|
||||
version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" true)" || return 1
|
||||
say_verbose "get_specific_version_from_version: version_info=$version_info"
|
||||
echo "$version_info" | get_version_from_version_info
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
echo "$version"
|
||||
return 0
|
||||
;;
|
||||
esac
|
||||
if [[ "$version" == "latest" ]]; then
|
||||
local version_info
|
||||
version_info="$(get_latest_version_info "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1
|
||||
say_verbose "get_specific_version_from_version: version_info=$version_info"
|
||||
echo "$version_info" | get_version_from_version_info
|
||||
return 0
|
||||
else
|
||||
echo "$version"
|
||||
return 0
|
||||
fi
|
||||
else
|
||||
local version_info
|
||||
version_info="$(parse_jsonfile_for_version "$json_file")" || return 1
|
||||
@@ -538,6 +512,7 @@ get_specific_version_from_version() {
|
||||
# channel - $2
|
||||
# normalized_architecture - $3
|
||||
# specific_version - $4
|
||||
# normalized_os - $5
|
||||
construct_download_link() {
|
||||
eval $invocation
|
||||
|
||||
@@ -545,17 +520,16 @@ construct_download_link() {
|
||||
local channel="$2"
|
||||
local normalized_architecture="$3"
|
||||
local specific_version="${4//[$'\t\r\n']}"
|
||||
|
||||
local osname
|
||||
osname="$(get_current_os_name)" || return 1
|
||||
local specific_product_version="$(get_specific_product_version "$1" "$4")"
|
||||
local osname="$5"
|
||||
|
||||
local download_link=null
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_version-$osname-$normalized_architecture.tar.gz"
|
||||
download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_version-$osname-$normalized_architecture.tar.gz"
|
||||
download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
|
||||
elif [ -z "$runtime" ]; then
|
||||
download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_version-$osname-$normalized_architecture.tar.gz"
|
||||
download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_product_version-$osname-$normalized_architecture.tar.gz"
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
@@ -564,6 +538,50 @@ construct_download_link() {
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# azure_feed - $1
|
||||
# specific_version - $2
|
||||
get_specific_product_version() {
|
||||
# If we find a 'productVersion.txt' at the root of any folder, we'll use its contents
|
||||
# to resolve the version of what's in the folder, superseding the specified version.
|
||||
eval $invocation
|
||||
|
||||
local azure_feed="$1"
|
||||
local specific_version="${2//[$'\t\r\n']}"
|
||||
local specific_product_version=$specific_version
|
||||
|
||||
local download_link=null
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
download_link="$azure_feed/Runtime/$specific_version/productVersion.txt${feed_credential}"
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
download_link="$azure_feed/aspnetcore/Runtime/$specific_version/productVersion.txt${feed_credential}"
|
||||
elif [ -z "$runtime" ]; then
|
||||
download_link="$azure_feed/Sdk/$specific_version/productVersion.txt${feed_credential}"
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
|
||||
if machine_has "curl"
|
||||
then
|
||||
specific_product_version=$(curl -s --fail "$download_link")
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
specific_product_version=$specific_version
|
||||
fi
|
||||
elif machine_has "wget"
|
||||
then
|
||||
specific_product_version=$(wget -qO- "$download_link")
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
specific_product_version=$specific_version
|
||||
fi
|
||||
fi
|
||||
specific_product_version="${specific_product_version//[$'\t\r\n']}"
|
||||
|
||||
echo "$specific_product_version"
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# azure_feed - $1
|
||||
# channel - $2
|
||||
@@ -684,11 +702,31 @@ extract_dotnet_package() {
|
||||
find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files"
|
||||
|
||||
rm -rf "$temp_out_path"
|
||||
rm -f "$zip_path" && say_verbose "Temporary zip file $zip_path was removed"
|
||||
|
||||
if [ "$failed" = true ]; then
|
||||
say_err "Extraction failed"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
get_http_header_curl() {
|
||||
eval $invocation
|
||||
local remote_path="$1"
|
||||
remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
curl_options="-I -sSL --retry 5 --retry-delay 2 --connect-timeout 15 "
|
||||
curl $curl_options "$remote_path_with_credential" || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
get_http_header_wget() {
|
||||
eval $invocation
|
||||
local remote_path="$1"
|
||||
remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
wget_options="-q -S --spider --tries 5 --waitretry 2 --connect-timeout 15 "
|
||||
wget $wget_options "$remote_path_with_credential" 2>&1 || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
@@ -706,13 +744,30 @@ download() {
|
||||
fi
|
||||
|
||||
local failed=false
|
||||
if machine_has "curl"; then
|
||||
downloadcurl "$remote_path" "$out_path" || failed=true
|
||||
elif machine_has "wget"; then
|
||||
downloadwget "$remote_path" "$out_path" || failed=true
|
||||
else
|
||||
failed=true
|
||||
fi
|
||||
local attempts=0
|
||||
while [ $attempts -lt 3 ]; do
|
||||
attempts=$((attempts+1))
|
||||
failed=false
|
||||
if machine_has "curl"; then
|
||||
downloadcurl "$remote_path" "$out_path" || failed=true
|
||||
elif machine_has "wget"; then
|
||||
downloadwget "$remote_path" "$out_path" || failed=true
|
||||
else
|
||||
say_err "Missing dependency: neither curl nor wget was found."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ "$failed" = false ] || [ $attempts -ge 3 ] || { [ ! -z $http_code ] && [ $http_code = "404" ]; }; then
|
||||
break
|
||||
fi
|
||||
|
||||
say "Download attempt #$attempts has failed: $http_code $download_error_msg"
|
||||
say "Attempt #$((attempts+1)) will start in $((attempts*10)) seconds."
|
||||
sleep $((attempts*20))
|
||||
done
|
||||
|
||||
|
||||
|
||||
if [ "$failed" = true ]; then
|
||||
say_verbose "Download failed: $remote_path"
|
||||
return 1
|
||||
@@ -720,44 +775,60 @@ download() {
|
||||
return 0
|
||||
}
|
||||
|
||||
# Updates global variables $http_code and $download_error_msg
|
||||
downloadcurl() {
|
||||
eval $invocation
|
||||
unset http_code
|
||||
unset download_error_msg
|
||||
local remote_path="$1"
|
||||
local out_path="${2:-}"
|
||||
|
||||
# Append feed_credential as late as possible before calling curl to avoid logging feed_credential
|
||||
remote_path="${remote_path}${feed_credential}"
|
||||
|
||||
local remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
|
||||
local failed=false
|
||||
if [ -z "$out_path" ]; then
|
||||
curl $curl_options "$remote_path" || failed=true
|
||||
curl $curl_options "$remote_path_with_credential" || failed=true
|
||||
else
|
||||
curl $curl_options -o "$out_path" "$remote_path" || failed=true
|
||||
curl $curl_options -o "$out_path" "$remote_path_with_credential" || failed=true
|
||||
fi
|
||||
if [ "$failed" = true ]; then
|
||||
say_verbose "Curl download failed"
|
||||
local response=$(get_http_header_curl $remote_path_with_credential)
|
||||
http_code=$( echo "$response" | awk '/^HTTP/{print $2}' | tail -1 )
|
||||
download_error_msg="Unable to download $remote_path."
|
||||
if [[ $http_code != 2* ]]; then
|
||||
download_error_msg+=" Returned HTTP status code: $http_code."
|
||||
fi
|
||||
say_verbose "$download_error_msg"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
# Updates global variables $http_code and $download_error_msg
|
||||
downloadwget() {
|
||||
eval $invocation
|
||||
unset http_code
|
||||
unset download_error_msg
|
||||
local remote_path="$1"
|
||||
local out_path="${2:-}"
|
||||
|
||||
# Append feed_credential as late as possible before calling wget to avoid logging feed_credential
|
||||
remote_path="${remote_path}${feed_credential}"
|
||||
local remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
local wget_options="--tries 20 --waitretry 2 --connect-timeout 15 "
|
||||
local failed=false
|
||||
if [ -z "$out_path" ]; then
|
||||
wget -q $wget_options -O - "$remote_path" || failed=true
|
||||
wget -q $wget_options -O - "$remote_path_with_credential" || failed=true
|
||||
else
|
||||
wget $wget_options -O "$out_path" "$remote_path" || failed=true
|
||||
wget $wget_options -O "$out_path" "$remote_path_with_credential" || failed=true
|
||||
fi
|
||||
if [ "$failed" = true ]; then
|
||||
say_verbose "Wget download failed"
|
||||
local response=$(get_http_header_wget $remote_path_with_credential)
|
||||
http_code=$( echo "$response" | awk '/^ HTTP/{print $2}' | tail -1 )
|
||||
download_error_msg="Unable to download $remote_path."
|
||||
if [[ $http_code != 2* ]]; then
|
||||
download_error_msg+=" Returned HTTP status code: $http_code."
|
||||
fi
|
||||
say_verbose "$download_error_msg"
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
@@ -770,14 +841,18 @@ calculate_vars() {
|
||||
normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")"
|
||||
say_verbose "normalized_architecture=$normalized_architecture"
|
||||
|
||||
normalized_os="$(get_normalized_os "$user_defined_os")"
|
||||
say_verbose "normalized_os=$normalized_os"
|
||||
|
||||
specific_version="$(get_specific_version_from_version "$azure_feed" "$channel" "$normalized_architecture" "$version" "$json_file")"
|
||||
specific_product_version="$(get_specific_product_version "$azure_feed" "$specific_version")"
|
||||
say_verbose "specific_version=$specific_version"
|
||||
if [ -z "$specific_version" ]; then
|
||||
say_err "Could not resolve version information."
|
||||
return 1
|
||||
fi
|
||||
|
||||
download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")"
|
||||
download_link="$(construct_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version" "$normalized_os")"
|
||||
say_verbose "Constructed primary named payload URL: $download_link"
|
||||
|
||||
legacy_download_link="$(construct_legacy_download_link "$azure_feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false
|
||||
@@ -822,38 +897,74 @@ install_dotnet() {
|
||||
zip_path="$(mktemp "$temporary_file_template")"
|
||||
say_verbose "Zip path: $zip_path"
|
||||
|
||||
say "Downloading link: $download_link"
|
||||
|
||||
# Failures are normal in the non-legacy case for ultimately legacy downloads.
|
||||
# Do not output to stderr, since output to stderr is considered an error.
|
||||
say "Downloading primary link $download_link"
|
||||
|
||||
# The download function will set variables $http_code and $download_error_msg in case of failure.
|
||||
download "$download_link" "$zip_path" 2>&1 || download_failed=true
|
||||
|
||||
# if the download fails, download the legacy_download_link
|
||||
if [ "$download_failed" = true ]; then
|
||||
say "Cannot download: $download_link"
|
||||
|
||||
primary_path_http_code="$http_code"; primary_path_download_error_msg="$download_error_msg"
|
||||
case $primary_path_http_code in
|
||||
404)
|
||||
say "The resource at $download_link is not available."
|
||||
;;
|
||||
*)
|
||||
say "$primary_path_download_error_msg"
|
||||
;;
|
||||
esac
|
||||
rm -f "$zip_path" 2>&1 && say_verbose "Temporary zip file $zip_path was removed"
|
||||
if [ "$valid_legacy_download_link" = true ]; then
|
||||
download_failed=false
|
||||
download_link="$legacy_download_link"
|
||||
zip_path="$(mktemp "$temporary_file_template")"
|
||||
say_verbose "Legacy zip path: $zip_path"
|
||||
say "Downloading legacy link: $download_link"
|
||||
|
||||
say "Downloading legacy link $download_link"
|
||||
|
||||
# The download function will set variables $http_code and $download_error_msg in case of failure.
|
||||
download "$download_link" "$zip_path" 2>&1 || download_failed=true
|
||||
|
||||
if [ "$download_failed" = true ]; then
|
||||
say "Cannot download: $download_link"
|
||||
legacy_path_http_code="$http_code"; legacy_path_download_error_msg="$download_error_msg"
|
||||
case $legacy_path_http_code in
|
||||
404)
|
||||
say "The resource at $download_link is not available."
|
||||
;;
|
||||
*)
|
||||
say "$legacy_path_download_error_msg"
|
||||
;;
|
||||
esac
|
||||
rm -f "$zip_path" 2>&1 && say_verbose "Temporary zip file $zip_path was removed"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ "$download_failed" = true ]; then
|
||||
say_err "Could not find/download: \`$asset_name\` with version = $specific_version"
|
||||
say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
if [[ "$primary_path_http_code" = "404" && ( "$valid_legacy_download_link" = false || "$legacy_path_http_code" = "404") ]]; then
|
||||
say_err "Could not find \`$asset_name\` with version = $specific_version"
|
||||
say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
|
||||
else
|
||||
say_err "Could not download: \`$asset_name\` with version = $specific_version"
|
||||
# 404-NotFound is an expected response if it goes from only one of the links, do not show that error.
|
||||
# If primary path is available (not 404-NotFound) then show the primary error else show the legacy error.
|
||||
if [ "$primary_path_http_code" != "404" ]; then
|
||||
say_err "$primary_path_download_error_msg"
|
||||
return 1
|
||||
fi
|
||||
if [[ "$valid_legacy_download_link" = true && "$legacy_path_http_code" != "404" ]]; then
|
||||
say_err "$legacy_path_download_error_msg"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
say "Extracting zip from $download_link"
|
||||
extract_dotnet_package "$zip_path" "$install_root"
|
||||
extract_dotnet_package "$zip_path" "$install_root" || return 1
|
||||
|
||||
# Check if the SDK version is installed; if not, fail the installation.
|
||||
# if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
|
||||
@@ -869,12 +980,14 @@ install_dotnet() {
|
||||
fi
|
||||
|
||||
# Check if the standard SDK version is installed.
|
||||
say_verbose "Checking installation: version = $specific_version"
|
||||
if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_version"; then
|
||||
say_verbose "Checking installation: version = $specific_product_version"
|
||||
if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$specific_product_version"; then
|
||||
return 0
|
||||
fi
|
||||
|
||||
say_err "\`$asset_name\` with version = $specific_version failed to install with an unknown error."
|
||||
# Version verification failed. More likely something is wrong either with the downloaded content or with the verification algorithm.
|
||||
say_err "Failed to verify the version of installed \`$asset_name\`.\nInstallation source: $download_link.\nInstallation location: $install_root.\nReport the bug at https://github.com/dotnet/install-scripts/issues."
|
||||
say_err "\`$asset_name\` with version = $specific_product_version failed to install with an unknown error."
|
||||
return 1
|
||||
}
|
||||
|
||||
@@ -900,6 +1013,7 @@ runtime=""
|
||||
runtime_id=""
|
||||
override_non_versioned_files=true
|
||||
non_dynamic_parameters=""
|
||||
user_defined_os=""
|
||||
|
||||
while [ $# -ne 0 ]
|
||||
do
|
||||
@@ -921,6 +1035,10 @@ do
|
||||
shift
|
||||
architecture="$1"
|
||||
;;
|
||||
--os|-[Oo][SS])
|
||||
shift
|
||||
user_defined_os="$1"
|
||||
;;
|
||||
--shared-runtime|-[Ss]hared[Rr]untime)
|
||||
say_warning "The --shared-runtime flag is obsolete and may be removed in a future version of this script. The recommended usage is to specify '--runtime dotnet'."
|
||||
if [ -z "$runtime" ]; then
|
||||
@@ -972,6 +1090,7 @@ do
|
||||
shift
|
||||
runtime_id="$1"
|
||||
non_dynamic_parameters+=" $name "\""$1"\"""
|
||||
say_warning "Use of --runtime-id is obsolete and should be limited to the versions below 2.1. To override architecture, use --architecture option instead. To override OS, use --os option instead."
|
||||
;;
|
||||
--jsonfile|-[Jj][Ss]on[Ff]ile)
|
||||
shift
|
||||
@@ -1004,8 +1123,6 @@ do
|
||||
echo " -Version"
|
||||
echo " Possible values:"
|
||||
echo " - latest - most latest build on specific channel"
|
||||
echo " - coherent - most latest coherent build on specific channel"
|
||||
echo " coherent applies only to SDK downloads"
|
||||
echo " - 3-part version in a format A.B.C - represents specific version of build"
|
||||
echo " examples: 2.0.0-preview2-006120; 1.1.0"
|
||||
echo " -i,--install-dir <DIR> Install under specified location (see Install Location below)"
|
||||
@@ -1013,6 +1130,11 @@ do
|
||||
echo " --architecture <ARCHITECTURE> Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`."
|
||||
echo " --arch,-Architecture,-Arch"
|
||||
echo " Possible values: x64, arm, and arm64"
|
||||
echo " --os <system> Specifies operating system to be used when selecting the installer."
|
||||
echo " Overrides the OS determination approach used by the script. Supported values: osx, linux, linux-musl, freebsd, rhel.6."
|
||||
echo " In case any other value is provided, the platform will be determined by the script based on machine configuration."
|
||||
echo " Not supported for legacy links. Use --runtime-id to specify platform for legacy links."
|
||||
echo " Refer to: https://aka.ms/dotnet-os-lifecycle for more information."
|
||||
echo " --runtime <RUNTIME> Installs a shared runtime only, without the SDK."
|
||||
echo " -Runtime"
|
||||
echo " Possible values:"
|
||||
@@ -1029,14 +1151,15 @@ do
|
||||
echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly."
|
||||
echo " --jsonfile <JSONFILE> Determines the SDK version from a user specified global.json file."
|
||||
echo " Note: global.json must have a value for 'SDK:Version'"
|
||||
echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)."
|
||||
echo " -RuntimeId"
|
||||
echo " -?,--?,-h,--help,-Help Shows this help message"
|
||||
echo ""
|
||||
echo "Obsolete parameters:"
|
||||
echo " --shared-runtime The recommended alternative is '--runtime dotnet'."
|
||||
echo " This parameter is obsolete and may be removed in a future version of this script."
|
||||
echo " Installs just the shared runtime bits, not the entire SDK."
|
||||
echo " --runtime-id Installs the .NET Tools for the given platform (use linux-x64 for portable linux)."
|
||||
echo " -RuntimeId" The parameter is obsolete and may be removed in a future version of this script. Should be used only for versions below 2.1.
|
||||
echo " For primary links to override OS or/and architecture, use --os and --architecture option instead."
|
||||
echo ""
|
||||
echo "Install Location:"
|
||||
echo " Location is chosen in following order:"
|
||||
@@ -1058,6 +1181,11 @@ if [ "$no_cdn" = true ]; then
|
||||
azure_feed="$uncached_feed"
|
||||
fi
|
||||
|
||||
say "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:"
|
||||
say "- The SDK needs to be installed without user interaction and without admin rights."
|
||||
say "- The SDK installation doesn't need to persist across multiple CI runs."
|
||||
say "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.\n"
|
||||
|
||||
check_min_reqs
|
||||
calculate_vars
|
||||
script_name=$(basename "$0")
|
||||
@@ -1068,7 +1196,7 @@ if [ "$dry_run" = true ]; then
|
||||
if [ "$valid_legacy_download_link" = true ]; then
|
||||
say "Legacy named payload URL: $legacy_download_link"
|
||||
fi
|
||||
repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"""
|
||||
repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"" --os "\""$normalized_os"\"""
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
repeatable_command+=" --runtime "\""dotnet"\"""
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
@@ -1079,7 +1207,6 @@ if [ "$dry_run" = true ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
check_pre_reqs
|
||||
install_dotnet
|
||||
|
||||
bin_path="$(get_absolute_path "$(combine_paths "$install_root" "$bin_folder_relative_path")")"
|
||||
@@ -1090,4 +1217,6 @@ else
|
||||
say "Binaries of dotnet can be found in $bin_path"
|
||||
fi
|
||||
|
||||
say "Note that the script does not resolve dependencies during installation."
|
||||
say "To check the list of dependencies, go to https://docs.microsoft.com/dotnet/core/install, select your operating system and check the \"Dependencies\" section."
|
||||
say "Installation finished successfully."
|
||||
|
||||
12
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
12
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
@@ -5,9 +5,9 @@
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"@actions/core": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.0.tgz",
|
||||
"integrity": "sha512-ZKdyhlSlyz38S6YFfPnyNgCDZuAF2T0Qv5eHflNWytPS8Qjvz39bZFMry9Bb/dpSnqWcNeav5yM2CTYpJeY+Dw=="
|
||||
"version": "1.2.6",
|
||||
"resolved": "https://registry.npmjs.org/@actions/core/-/core-1.2.6.tgz",
|
||||
"integrity": "sha512-ZQYitnqiyBc3D+k7LsgSBmMDVkOVidaagDG7j3fOym77jNunWRuYx7VSHa9GNfFZh+zh61xsCjRj4JxMZlDqTA=="
|
||||
},
|
||||
"@actions/glob": {
|
||||
"version": "0.1.0",
|
||||
@@ -1683,9 +1683,9 @@
|
||||
}
|
||||
},
|
||||
"lodash": {
|
||||
"version": "4.17.15",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
|
||||
"integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==",
|
||||
"version": "4.17.19",
|
||||
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz",
|
||||
"integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==",
|
||||
"dev": true
|
||||
},
|
||||
"lodash.unescape": {
|
||||
|
||||
@@ -16,11 +16,11 @@ if (supported.indexOf(process.platform) == -1) {
|
||||
var stopping = false;
|
||||
var listener = null;
|
||||
|
||||
var runService = function() {
|
||||
var runService = function () {
|
||||
var listenerExePath = path.join(__dirname, '../bin/Runner.Listener');
|
||||
var interactive = process.argv[2] === "interactive";
|
||||
|
||||
if(!stopping) {
|
||||
if (!stopping) {
|
||||
try {
|
||||
if (interactive) {
|
||||
console.log('Starting Runner listener interactively');
|
||||
@@ -30,8 +30,8 @@ var runService = function() {
|
||||
listener = childProcess.spawn(listenerExePath, ['run', '--startuptype', 'service'], { env: process.env });
|
||||
}
|
||||
|
||||
console.log('Started listener process');
|
||||
|
||||
console.log(`Started listener process, pid: ${listener.pid}`);
|
||||
|
||||
listener.stdout.on('data', (data) => {
|
||||
process.stdout.write(data.toString('utf8'));
|
||||
});
|
||||
@@ -40,6 +40,10 @@ var runService = function() {
|
||||
process.stdout.write(data.toString('utf8'));
|
||||
});
|
||||
|
||||
listener.on("error", (err) => {
|
||||
console.log(`Runner listener fail to start with error ${err.message}`);
|
||||
});
|
||||
|
||||
listener.on('close', (code) => {
|
||||
console.log(`Runner listener exited with error code ${code}`);
|
||||
|
||||
@@ -56,13 +60,13 @@ var runService = function() {
|
||||
} else {
|
||||
console.log('Runner listener exit with undefined return code, re-launch runner in 5 seconds.');
|
||||
}
|
||||
|
||||
if(!stopping) {
|
||||
|
||||
if (!stopping) {
|
||||
setTimeout(runService, 5000);
|
||||
}
|
||||
});
|
||||
|
||||
} catch(ex) {
|
||||
} catch (ex) {
|
||||
console.log(ex);
|
||||
}
|
||||
}
|
||||
@@ -71,7 +75,7 @@ var runService = function() {
|
||||
runService();
|
||||
console.log('Started running service');
|
||||
|
||||
var gracefulShutdown = function(code) {
|
||||
var gracefulShutdown = function (code) {
|
||||
console.log('Shutting down runner listener');
|
||||
stopping = true;
|
||||
if (listener) {
|
||||
|
||||
@@ -23,5 +23,7 @@
|
||||
<key>ACTIONS_RUNNER_SVC</key>
|
||||
<string>1</string>
|
||||
</dict>
|
||||
<key>ProcessType</key>
|
||||
<string>Interactive</string>
|
||||
</dict>
|
||||
</plist>
|
||||
|
||||
115
src/Misc/layoutbin/checkScripts/downloadCert.js
Normal file
115
src/Misc/layoutbin/checkScripts/downloadCert.js
Normal file
@@ -0,0 +1,115 @@
|
||||
const https = require('https')
|
||||
const fs = require('fs')
|
||||
const http = require('http')
|
||||
const hostname = process.env['HOSTNAME'] || ''
|
||||
const port = process.env['PORT'] || ''
|
||||
const path = process.env['PATH'] || ''
|
||||
const pat = process.env['PAT'] || ''
|
||||
const proxyHost = process.env['PROXYHOST'] || ''
|
||||
const proxyPort = process.env['PROXYPORT'] || ''
|
||||
const proxyUsername = process.env['PROXYUSERNAME'] || ''
|
||||
const proxyPassword = process.env['PROXYPASSWORD'] || ''
|
||||
|
||||
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = '0'
|
||||
|
||||
if (proxyHost === '') {
|
||||
const options = {
|
||||
hostname: hostname,
|
||||
port: port,
|
||||
path: path,
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'User-Agent': 'GitHubActionsRunnerCheck/1.0',
|
||||
'Authorization': `token ${pat}`
|
||||
},
|
||||
}
|
||||
const req = https.request(options, res => {
|
||||
console.log(`statusCode: ${res.statusCode}`)
|
||||
console.log(`headers: ${JSON.stringify(res.headers)}`)
|
||||
let cert = socket.getPeerCertificate(true)
|
||||
let certPEM = ''
|
||||
let fingerprints = {}
|
||||
while (cert != null && fingerprints[cert.fingerprint] != '1') {
|
||||
fingerprints[cert.fingerprint] = '1'
|
||||
certPEM = certPEM + '-----BEGIN CERTIFICATE-----\n'
|
||||
let certEncoded = cert.raw.toString('base64')
|
||||
for (let i = 0; i < certEncoded.length; i++) {
|
||||
certPEM = certPEM + certEncoded[i]
|
||||
if (i != certEncoded.length - 1 && (i + 1) % 64 == 0) {
|
||||
certPEM = certPEM + '\n'
|
||||
}
|
||||
}
|
||||
certPEM = certPEM + '\n-----END CERTIFICATE-----\n'
|
||||
cert = cert.issuerCertificate
|
||||
}
|
||||
console.log(certPEM)
|
||||
fs.writeFileSync('./download_ca_cert.pem', certPEM)
|
||||
res.on('data', d => {
|
||||
process.stdout.write(d)
|
||||
})
|
||||
})
|
||||
req.on('error', error => {
|
||||
console.error(error)
|
||||
})
|
||||
req.end()
|
||||
}
|
||||
else {
|
||||
const auth = 'Basic ' + Buffer.from(proxyUsername + ':' + proxyPassword).toString('base64')
|
||||
|
||||
const options = {
|
||||
host: proxyHost,
|
||||
port: proxyPort,
|
||||
method: 'CONNECT',
|
||||
path: `${hostname}:${port}`,
|
||||
}
|
||||
|
||||
if (proxyUsername != '' || proxyPassword != '') {
|
||||
options.headers = {
|
||||
'Proxy-Authorization': auth,
|
||||
}
|
||||
}
|
||||
|
||||
http.request(options).on('connect', (res, socket) => {
|
||||
if (res.statusCode != 200) {
|
||||
throw new Error(`Proxy returns code: ${res.statusCode}`)
|
||||
}
|
||||
|
||||
https.get({
|
||||
host: hostname,
|
||||
port: port,
|
||||
socket: socket,
|
||||
agent: false,
|
||||
path: '/',
|
||||
headers: {
|
||||
'User-Agent': 'GitHubActionsRunnerCheck/1.0',
|
||||
'Authorization': `token ${pat}`
|
||||
}
|
||||
}, (res) => {
|
||||
let cert = res.socket.getPeerCertificate(true)
|
||||
let certPEM = ''
|
||||
let fingerprints = {}
|
||||
while (cert != null && fingerprints[cert.fingerprint] != '1') {
|
||||
fingerprints[cert.fingerprint] = '1'
|
||||
certPEM = certPEM + '-----BEGIN CERTIFICATE-----\n'
|
||||
let certEncoded = cert.raw.toString('base64')
|
||||
for (let i = 0; i < certEncoded.length; i++) {
|
||||
certPEM = certPEM + certEncoded[i]
|
||||
if (i != certEncoded.length - 1 && (i + 1) % 64 == 0) {
|
||||
certPEM = certPEM + '\n'
|
||||
}
|
||||
}
|
||||
certPEM = certPEM + '\n-----END CERTIFICATE-----\n'
|
||||
cert = cert.issuerCertificate
|
||||
}
|
||||
console.log(certPEM)
|
||||
fs.writeFileSync('./download_ca_cert.pem', certPEM)
|
||||
console.log(`statusCode: ${res.statusCode}`)
|
||||
console.log(`headers: ${JSON.stringify(res.headers)}`)
|
||||
res.on('data', d => {
|
||||
process.stdout.write(d)
|
||||
})
|
||||
})
|
||||
}).on('error', (err) => {
|
||||
console.error('error', err)
|
||||
}).end()
|
||||
}
|
||||
75
src/Misc/layoutbin/checkScripts/makeWebRequest.js
Normal file
75
src/Misc/layoutbin/checkScripts/makeWebRequest.js
Normal file
@@ -0,0 +1,75 @@
|
||||
const https = require('https')
|
||||
const http = require('http')
|
||||
const hostname = process.env['HOSTNAME'] || ''
|
||||
const port = process.env['PORT'] || ''
|
||||
const path = process.env['PATH'] || ''
|
||||
const pat = process.env['PAT'] || ''
|
||||
const proxyHost = process.env['PROXYHOST'] || ''
|
||||
const proxyPort = process.env['PROXYPORT'] || ''
|
||||
const proxyUsername = process.env['PROXYUSERNAME'] || ''
|
||||
const proxyPassword = process.env['PROXYPASSWORD'] || ''
|
||||
|
||||
if (proxyHost === '') {
|
||||
const options = {
|
||||
hostname: hostname,
|
||||
port: port,
|
||||
path: path,
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'User-Agent': 'GitHubActionsRunnerCheck/1.0',
|
||||
'Authorization': `token ${pat}`,
|
||||
}
|
||||
}
|
||||
const req = https.request(options, res => {
|
||||
console.log(`statusCode: ${res.statusCode}`)
|
||||
console.log(`headers: ${JSON.stringify(res.headers)}`)
|
||||
|
||||
res.on('data', d => {
|
||||
process.stdout.write(d)
|
||||
})
|
||||
})
|
||||
req.on('error', error => {
|
||||
console.error(error)
|
||||
})
|
||||
req.end()
|
||||
}
|
||||
else {
|
||||
const proxyAuth = 'Basic ' + Buffer.from(proxyUsername + ':' + proxyPassword).toString('base64')
|
||||
const options = {
|
||||
hostname: proxyHost,
|
||||
port: proxyPort,
|
||||
method: 'CONNECT',
|
||||
path: `${hostname}:${port}`
|
||||
}
|
||||
|
||||
if (proxyUsername != '' || proxyPassword != '') {
|
||||
options.headers = {
|
||||
'Proxy-Authorization': proxyAuth,
|
||||
}
|
||||
}
|
||||
http.request(options).on('connect', (res, socket) => {
|
||||
if (res.statusCode != 200) {
|
||||
throw new Error(`Proxy returns code: ${res.statusCode}`)
|
||||
}
|
||||
https.get({
|
||||
host: hostname,
|
||||
port: port,
|
||||
socket: socket,
|
||||
agent: false,
|
||||
path: path,
|
||||
headers: {
|
||||
'User-Agent': 'GitHubActionsRunnerCheck/1.0',
|
||||
'Authorization': `token ${pat}`,
|
||||
}
|
||||
}, (res) => {
|
||||
console.log(`statusCode: ${res.statusCode}`)
|
||||
console.log(`headers: ${JSON.stringify(res.headers)}`)
|
||||
|
||||
res.on('data', d => {
|
||||
process.stdout.write(d)
|
||||
})
|
||||
})
|
||||
}).on('error', (err) => {
|
||||
console.error('error', err)
|
||||
}).end()
|
||||
}
|
||||
@@ -49,70 +49,68 @@ then
|
||||
cat /etc/debian_version
|
||||
echo "------------------------------"
|
||||
|
||||
# prefer apt over apt-get
|
||||
command -v apt
|
||||
# prefer apt-get over apt
|
||||
command -v apt-get
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
apt update && apt install -y liblttng-ust0 libkrb5-3 zlib1g
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'apt' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# libissl version prefer: libssl1.1 -> libssl1.0.2 -> libssl1.0.0
|
||||
apt install -y libssl1.1$ || apt install -y libssl1.0.2$ || apt install -y libssl1.0.0$
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'apt' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
|
||||
apt install -y libicu66 || apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'apt' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
apt_get=apt-get
|
||||
else
|
||||
command -v apt-get
|
||||
command -v apt
|
||||
if [ $? -eq 0 ]
|
||||
then
|
||||
apt-get update && apt-get install -y liblttng-ust0 libkrb5-3 zlib1g
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'apt-get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# libissl version prefer: libssl1.1 -> libssl1.0.2 -> libssl1.0.0
|
||||
apt-get install -y libssl1.1$ || apt-get install -y libssl1.0.2$ || apt install -y libssl1.0.0$
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'apt-get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
|
||||
apt-get install -y libicu66 || apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'apt-get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
apt_get=apt
|
||||
else
|
||||
echo "Can not find 'apt' or 'apt-get'"
|
||||
echo "Found neither 'apt-get' nor 'apt'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
$apt_get update && $apt_get install -y liblttng-ust0 libkrb5-3 zlib1g
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'$apt_get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
apt_get_with_fallbacks() {
|
||||
$apt_get install -y $1
|
||||
fail=$?
|
||||
if [ $fail -eq 0 ]
|
||||
then
|
||||
if [ "${1#"${1%?}"}" = '$' ]; then
|
||||
dpkg -l "${1%?}" > /dev/null 2> /dev/null
|
||||
fail=$?
|
||||
fi
|
||||
fi
|
||||
if [ $fail -ne 0 ]
|
||||
then
|
||||
shift
|
||||
if [ -n "$1" ]
|
||||
then
|
||||
apt_get_with_fallbacks "$@"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
# libssl version prefer: libssl1.1 -> libssl1.0.2 -> libssl1.0.0
|
||||
apt_get_with_fallbacks libssl1.1$ libssl1.0.2$ libssl1.0.0$
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'$apt_get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
|
||||
apt_get_with_fallbacks libicu66 libicu63 libicu60 libicu57 libicu55 libicu52
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
echo "'$apt_get' failed with exit code '$?'"
|
||||
print_errormessage
|
||||
exit 1
|
||||
fi
|
||||
elif [ -e /etc/redhat-release ]
|
||||
then
|
||||
echo "The current OS is Fedora based"
|
||||
|
||||
4
src/Misc/layoutbin/update.sh.template
Normal file → Executable file
4
src/Misc/layoutbin/update.sh.template
Normal file → Executable file
@@ -28,13 +28,13 @@ date "+[%F %T-%4N] Waiting for $runnerprocessname ($runnerpid) to complete" >> "
|
||||
while [ -e /proc/$runnerpid ]
|
||||
do
|
||||
date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1
|
||||
ping -c 2 127.0.0.1 >nul
|
||||
sleep 2
|
||||
done
|
||||
date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1
|
||||
|
||||
# start re-organize folders
|
||||
date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1
|
||||
ping -c 2 127.0.0.1 >nul
|
||||
sleep 1
|
||||
|
||||
# the folder structure under runner root will be
|
||||
# ./bin -> bin.2.100.0 (junction folder)
|
||||
|
||||
@@ -18,24 +18,26 @@ then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
message="Execute sudo ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies."
|
||||
|
||||
ldd ./bin/libcoreclr.so | grep 'not found'
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Dependencies is missing for Dotnet Core 3.0"
|
||||
echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies."
|
||||
echo $message
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ldd ./bin/System.Security.Cryptography.Native.OpenSsl.so | grep 'not found'
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Dependencies is missing for Dotnet Core 3.0"
|
||||
echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies."
|
||||
echo $message
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ldd ./bin/System.IO.Compression.Native.so | grep 'not found'
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Dependencies is missing for Dotnet Core 3.0"
|
||||
echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies."
|
||||
echo $message
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -50,10 +52,10 @@ then
|
||||
fi
|
||||
|
||||
libpath=${LD_LIBRARY_PATH:-}
|
||||
$LDCONFIG_COMMAND -NXv ${libpath//:/} 2>&1 | grep libicu >/dev/null 2>&1
|
||||
$LDCONFIG_COMMAND -NXv ${libpath//:/ } 2>&1 | grep libicu >/dev/null 2>&1
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Libicu's dependencies is missing for Dotnet Core 3.0"
|
||||
echo "Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies."
|
||||
echo $message
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
@@ -67,7 +69,7 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
||||
done
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
cd $DIR
|
||||
cd "$DIR"
|
||||
|
||||
source ./env.sh
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ varCheckList=(
|
||||
'ANT_HOME'
|
||||
'M2_HOME'
|
||||
'ANDROID_HOME'
|
||||
'ANDROID_SDK_ROOT'
|
||||
'GRADLE_HOME'
|
||||
'NVM_BIN'
|
||||
'NVM_PATH'
|
||||
|
||||
@@ -26,25 +26,23 @@ if [[ "$1" == "localRun" ]]; then
|
||||
else
|
||||
"$DIR"/bin/Runner.Listener run $*
|
||||
|
||||
# Return code 4 means the run once runner received an update message.
|
||||
# Sleep 5 seconds to wait for the update process finish and run the runner again.
|
||||
# Return code 3 means the run once runner received an update message.
|
||||
# Sleep 5 seconds to wait for the update process finish
|
||||
returnCode=$?
|
||||
if [[ $returnCode == 4 ]]; then
|
||||
if [[ $returnCode == 3 ]]; then
|
||||
if [ ! -x "$(command -v sleep)" ]; then
|
||||
if [ ! -x "$(command -v ping)" ]; then
|
||||
COUNT="0"
|
||||
while [[ $COUNT != 5000 ]]; do
|
||||
echo "SLEEP" >nul
|
||||
echo "SLEEP" > /dev/null
|
||||
COUNT=$[$COUNT+1]
|
||||
done
|
||||
else
|
||||
ping -n 5 127.0.0.1 >nul
|
||||
ping -c 5 127.0.0.1 > /dev/null
|
||||
fi
|
||||
else
|
||||
sleep 5 >nul
|
||||
sleep 5
|
||||
fi
|
||||
|
||||
"$DIR"/bin/Runner.Listener run $*
|
||||
else
|
||||
exit $returnCode
|
||||
fi
|
||||
|
||||
@@ -90,7 +90,7 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string Labels = "labels";
|
||||
public static readonly string MonitorSocketAddress = "monitorsocketaddress";
|
||||
public static readonly string Name = "name";
|
||||
public static readonly string Pool = "pool";
|
||||
public static readonly string RunnerGroup = "runnergroup";
|
||||
public static readonly string StartupType = "startuptype";
|
||||
public static readonly string Url = "url";
|
||||
public static readonly string UserName = "username";
|
||||
@@ -99,9 +99,11 @@ namespace GitHub.Runner.Common
|
||||
|
||||
// Secret args. Must be added to the "Secrets" getter as well.
|
||||
public static readonly string Token = "token";
|
||||
public static readonly string PAT = "pat";
|
||||
public static readonly string WindowsLogonPassword = "windowslogonpassword";
|
||||
public static string[] Secrets => new[]
|
||||
{
|
||||
PAT,
|
||||
Token,
|
||||
WindowsLogonPassword,
|
||||
};
|
||||
@@ -119,6 +121,7 @@ namespace GitHub.Runner.Common
|
||||
//validFlags array as well present in the CommandSettings.cs
|
||||
public static class Flags
|
||||
{
|
||||
public static readonly string Check = "check";
|
||||
public static readonly string Commit = "commit";
|
||||
public static readonly string Help = "help";
|
||||
public static readonly string Replace = "replace";
|
||||
@@ -138,8 +141,16 @@ namespace GitHub.Runner.Common
|
||||
public const int RunOnceRunnerUpdating = 4;
|
||||
}
|
||||
|
||||
public static class Features
|
||||
{
|
||||
public static readonly string DiskSpaceWarning = "runner.diskspace.warning";
|
||||
}
|
||||
|
||||
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
|
||||
public static readonly string WorkerCrash = "WORKER_CRASH";
|
||||
public static readonly string LowDiskSpace = "LOW_DISK_SPACE";
|
||||
public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND";
|
||||
public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/";
|
||||
}
|
||||
|
||||
public static class RunnerEvent
|
||||
@@ -198,6 +209,7 @@ namespace GitHub.Runner.Common
|
||||
//
|
||||
// Keep alphabetical
|
||||
//
|
||||
public static readonly string AllowUnsupportedCommands = "ACTIONS_ALLOW_UNSECURE_COMMANDS";
|
||||
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
|
||||
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
|
||||
}
|
||||
|
||||
@@ -56,6 +56,16 @@ namespace GitHub.Runner.Common
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.EndGroupCommandExtension, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.EchoCommandExtension, Runner.Worker");
|
||||
break;
|
||||
case "GitHub.Runner.Worker.IFileCommandExtension":
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.AddPathFileCommand, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.SetEnvFileCommand, Runner.Worker");
|
||||
break;
|
||||
case "GitHub.Runner.Listener.Check.ICheckExtension":
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.InternetCheck, Runner.Listener");
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.ActionsCheck, Runner.Listener");
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.GitCheck, Runner.Listener");
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.NodeJsCheck, Runner.Listener");
|
||||
break;
|
||||
default:
|
||||
// This should never happen.
|
||||
throw new NotSupportedException($"Unexpected extension type: '{typeof(T).FullName}'");
|
||||
|
||||
@@ -84,6 +84,7 @@ namespace GitHub.Runner.Common
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscape);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift1);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.Base64StringEscapeShift2);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.CommandLineArgumentEscape);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.ExpressionStringEscape);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
|
||||
this.SecretMasker.AddValueEncoder(ValueEncoders.UriDataEscape);
|
||||
|
||||
@@ -16,6 +16,7 @@ namespace GitHub.Runner.Common
|
||||
// logging and console
|
||||
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
|
||||
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken);
|
||||
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken);
|
||||
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
|
||||
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
|
||||
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
|
||||
@@ -79,6 +80,12 @@ namespace GitHub.Runner.Common
|
||||
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
|
||||
@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common
|
||||
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
|
||||
Task ShutdownAsync();
|
||||
void Start(Pipelines.AgentJobRequestMessage jobRequest);
|
||||
void QueueWebConsoleLine(Guid stepRecordId, string line);
|
||||
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
|
||||
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
|
||||
void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord);
|
||||
}
|
||||
@@ -155,10 +155,10 @@ namespace GitHub.Runner.Common
|
||||
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
|
||||
}
|
||||
|
||||
public void QueueWebConsoleLine(Guid stepRecordId, string line)
|
||||
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
|
||||
{
|
||||
Trace.Verbose("Enqueue web console line queue: {0}", line);
|
||||
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line));
|
||||
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line, lineNumber));
|
||||
}
|
||||
|
||||
public void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource)
|
||||
@@ -214,7 +214,7 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
|
||||
// Group consolelines by timeline record of each step
|
||||
Dictionary<Guid, List<string>> stepsConsoleLines = new Dictionary<Guid, List<string>>();
|
||||
Dictionary<Guid, List<TimelineRecordLogLine>> stepsConsoleLines = new Dictionary<Guid, List<TimelineRecordLogLine>>();
|
||||
List<Guid> stepRecordIds = new List<Guid>(); // We need to keep lines in order
|
||||
int linesCounter = 0;
|
||||
ConsoleLineInfo lineInfo;
|
||||
@@ -222,7 +222,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
if (!stepsConsoleLines.ContainsKey(lineInfo.StepRecordId))
|
||||
{
|
||||
stepsConsoleLines[lineInfo.StepRecordId] = new List<string>();
|
||||
stepsConsoleLines[lineInfo.StepRecordId] = new List<TimelineRecordLogLine>();
|
||||
stepRecordIds.Add(lineInfo.StepRecordId);
|
||||
}
|
||||
|
||||
@@ -232,7 +232,7 @@ namespace GitHub.Runner.Common
|
||||
lineInfo.Line = $"{lineInfo.Line.Substring(0, 1024)}...";
|
||||
}
|
||||
|
||||
stepsConsoleLines[lineInfo.StepRecordId].Add(lineInfo.Line);
|
||||
stepsConsoleLines[lineInfo.StepRecordId].Add(new TimelineRecordLogLine(lineInfo.Line, lineInfo.LineNumber));
|
||||
linesCounter++;
|
||||
|
||||
// process at most about 500 lines of web console line during regular timer dequeue task.
|
||||
@@ -247,13 +247,13 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
// Split consolelines into batch, each batch will container at most 100 lines.
|
||||
int batchCounter = 0;
|
||||
List<List<string>> batchedLines = new List<List<string>>();
|
||||
List<List<TimelineRecordLogLine>> batchedLines = new List<List<TimelineRecordLogLine>>();
|
||||
foreach (var line in stepsConsoleLines[stepRecordId])
|
||||
{
|
||||
var currentBatch = batchedLines.ElementAtOrDefault(batchCounter);
|
||||
if (currentBatch == null)
|
||||
{
|
||||
batchedLines.Add(new List<string>());
|
||||
batchedLines.Add(new List<TimelineRecordLogLine>());
|
||||
currentBatch = batchedLines.ElementAt(batchCounter);
|
||||
}
|
||||
|
||||
@@ -275,7 +275,6 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Trace.Info($"Skip {batchedLines.Count - 2} batches web console lines for last run");
|
||||
batchedLines = batchedLines.TakeLast(2).ToList();
|
||||
batchedLines[0].Insert(0, "...");
|
||||
}
|
||||
|
||||
int errorCount = 0;
|
||||
@@ -284,7 +283,15 @@ namespace GitHub.Runner.Common
|
||||
try
|
||||
{
|
||||
// we will not requeue failed batch, since the web console lines are time sensitive.
|
||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch, default(CancellationToken));
|
||||
if (batch[0].LineNumber.HasValue)
|
||||
{
|
||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber.Value, default(CancellationToken));
|
||||
}
|
||||
else
|
||||
{
|
||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), default(CancellationToken));
|
||||
}
|
||||
|
||||
if (_firstConsoleOutputs)
|
||||
{
|
||||
HostContext.WritePerfCounter($"WorkerJobServerQueueAppendFirstConsoleOutput_{_planId.ToString()}");
|
||||
@@ -653,13 +660,15 @@ namespace GitHub.Runner.Common
|
||||
|
||||
internal class ConsoleLineInfo
|
||||
{
|
||||
public ConsoleLineInfo(Guid recordId, string line)
|
||||
public ConsoleLineInfo(Guid recordId, string line, long? lineNumber)
|
||||
{
|
||||
this.StepRecordId = recordId;
|
||||
this.Line = line;
|
||||
this.LineNumber = lineNumber;
|
||||
}
|
||||
|
||||
public Guid StepRecordId { get; set; }
|
||||
public string Line { get; set; }
|
||||
public long? LineNumber { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -101,7 +101,7 @@ namespace GitHub.Runner.Common
|
||||
EndPage();
|
||||
_byteCount = 0;
|
||||
_dataFileName = Path.Combine(_pagesFolder, $"{_timelineId}_{_timelineRecordId}_{++_pageCount}.log");
|
||||
_pageData = new FileStream(_dataFileName, FileMode.CreateNew);
|
||||
_pageData = new FileStream(_dataFileName, FileMode.CreateNew, FileAccess.ReadWrite, FileShare.ReadWrite);
|
||||
_pageWriter = new StreamWriter(_pageData, System.Text.Encoding.UTF8);
|
||||
}
|
||||
|
||||
|
||||
@@ -45,8 +45,8 @@ namespace GitHub.Runner.Common
|
||||
Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken);
|
||||
|
||||
// agent package
|
||||
Task<List<PackageMetadata>> GetPackagesAsync(string packageType, string platform, int top, CancellationToken cancellationToken);
|
||||
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, CancellationToken cancellationToken);
|
||||
Task<List<PackageMetadata>> GetPackagesAsync(string packageType, string platform, int top, bool includeToken, CancellationToken cancellationToken);
|
||||
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
|
||||
|
||||
// agent update
|
||||
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState);
|
||||
@@ -317,16 +317,16 @@ namespace GitHub.Runner.Common
|
||||
//-----------------------------------------------------------------
|
||||
// Agent Package
|
||||
//-----------------------------------------------------------------
|
||||
public Task<List<PackageMetadata>> GetPackagesAsync(string packageType, string platform, int top, CancellationToken cancellationToken)
|
||||
public Task<List<PackageMetadata>> GetPackagesAsync(string packageType, string platform, int top, bool includeToken, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.Generic);
|
||||
return _genericTaskAgentClient.GetPackagesAsync(packageType, platform, top, cancellationToken: cancellationToken);
|
||||
return _genericTaskAgentClient.GetPackagesAsync(packageType, platform, top, includeToken, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, CancellationToken cancellationToken)
|
||||
public Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.Generic);
|
||||
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, cancellationToken: cancellationToken);
|
||||
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState)
|
||||
|
||||
51
src/Runner.Common/Util/EncodingUtil.cs
Normal file
51
src/Runner.Common/Util/EncodingUtil.cs
Normal file
@@ -0,0 +1,51 @@
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Common;
|
||||
|
||||
namespace GitHub.Runner.Common.Util
|
||||
{
|
||||
public static class EncodingUtil
|
||||
{
|
||||
public static async Task SetEncoding(IHostContext hostContext, Tracing trace, CancellationToken cancellationToken)
|
||||
{
|
||||
#if OS_WINDOWS
|
||||
try
|
||||
{
|
||||
if (Console.InputEncoding.CodePage != 65001)
|
||||
{
|
||||
using (var p = hostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
// Use UTF8 code page
|
||||
int exitCode = await p.ExecuteAsync(workingDirectory: hostContext.GetDirectory(WellKnownDirectory.Work),
|
||||
fileName: WhichUtil.Which("chcp", true, trace),
|
||||
arguments: "65001",
|
||||
environment: null,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: false,
|
||||
redirectStandardIn: null,
|
||||
inheritConsoleHandler: true,
|
||||
cancellationToken: cancellationToken);
|
||||
if (exitCode == 0)
|
||||
{
|
||||
trace.Info("Successfully returned to code page 65001 (UTF8)");
|
||||
}
|
||||
else
|
||||
{
|
||||
trace.Warning($"'chcp 65001' failed with exit code {exitCode}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
trace.Warning($"'chcp 65001' failed with exception {ex.Message}");
|
||||
}
|
||||
#endif
|
||||
// Dummy variable to prevent compiler error CS1998: "This async method lacks 'await' operators and will run synchronously..."
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
93
src/Runner.Listener/Checks/ActionsCheck.cs
Normal file
93
src/Runner.Listener/Checks/ActionsCheck.cs
Normal file
@@ -0,0 +1,93 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public sealed class ActionsCheck : RunnerService, ICheckExtension
|
||||
{
|
||||
private string _logFile = null;
|
||||
|
||||
public int Order => 2;
|
||||
|
||||
public string CheckName => "GitHub Actions Connection";
|
||||
|
||||
public string CheckDescription => "Check if the Actions runner has access to the GitHub Actions service.";
|
||||
|
||||
public string CheckLog => _logFile;
|
||||
|
||||
public string HelpLink => "https://github.com/actions/runner/blob/main/docs/checks/actions.md";
|
||||
|
||||
public Type ExtensionType => typeof(ICheckExtension);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_logFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), StringUtil.Format("{0}_{1:yyyyMMdd-HHmmss}-utc.log", nameof(ActionsCheck), DateTime.UtcNow));
|
||||
}
|
||||
|
||||
// runner access to actions service
|
||||
public async Task<bool> RunCheck(string url, string pat)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
|
||||
|
||||
var checkTasks = new List<Task<CheckResult>>();
|
||||
string githubApiUrl = null;
|
||||
string actionsTokenServiceUrl = null;
|
||||
string actionsPipelinesServiceUrl = null;
|
||||
var urlBuilder = new UriBuilder(url);
|
||||
if (UrlUtil.IsHostedServer(urlBuilder))
|
||||
{
|
||||
urlBuilder.Host = $"api.{urlBuilder.Host}";
|
||||
urlBuilder.Path = "";
|
||||
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
actionsTokenServiceUrl = "https://vstoken.actions.githubusercontent.com/_apis/health";
|
||||
actionsPipelinesServiceUrl = "https://pipelines.actions.githubusercontent.com/_apis/health";
|
||||
}
|
||||
else
|
||||
{
|
||||
urlBuilder.Path = "api/v3";
|
||||
githubApiUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
urlBuilder.Path = "_services/vstoken/_apis/health";
|
||||
actionsTokenServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
urlBuilder.Path = "_services/pipelines/_apis/health";
|
||||
actionsPipelinesServiceUrl = urlBuilder.Uri.AbsoluteUri;
|
||||
}
|
||||
|
||||
// check github api
|
||||
checkTasks.Add(CheckUtil.CheckDns(githubApiUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(githubApiUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(githubApiUrl, pat, expectedHeader: "X-GitHub-Request-Id"));
|
||||
|
||||
// check actions token service
|
||||
checkTasks.Add(CheckUtil.CheckDns(actionsTokenServiceUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(actionsTokenServiceUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(actionsTokenServiceUrl, pat, expectedHeader: "x-vss-e2eid"));
|
||||
|
||||
// check actions pipelines service
|
||||
checkTasks.Add(CheckUtil.CheckDns(actionsPipelinesServiceUrl));
|
||||
checkTasks.Add(CheckUtil.CheckPing(actionsPipelinesServiceUrl));
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests(actionsPipelinesServiceUrl, pat, expectedHeader: "x-vss-e2eid"));
|
||||
|
||||
// check HTTP POST to actions pipelines service
|
||||
checkTasks.Add(HostContext.CheckHttpsPostRequests(actionsPipelinesServiceUrl, pat, expectedHeader: "x-vss-e2eid"));
|
||||
|
||||
var result = true;
|
||||
while (checkTasks.Count > 0)
|
||||
{
|
||||
var finishedCheckTask = await Task.WhenAny<CheckResult>(checkTasks);
|
||||
var finishedCheck = await finishedCheckTask;
|
||||
result = result && finishedCheck.Pass;
|
||||
await File.AppendAllLinesAsync(_logFile, finishedCheck.Logs);
|
||||
checkTasks.Remove(finishedCheckTask);
|
||||
}
|
||||
|
||||
await Task.WhenAll(checkTasks);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
417
src/Runner.Listener/Checks/CheckUtil.cs
Normal file
417
src/Runner.Listener/Checks/CheckUtil.cs
Normal file
@@ -0,0 +1,417 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics.Tracing;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Net.NetworkInformation;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public static class CheckUtil
|
||||
{
|
||||
public static List<string> WarnLog(this IHostContext hostContext)
|
||||
{
|
||||
var logs = new List<string>();
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** !!! WARNING !!! ");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** DO NOT share the log in public place! The log may contains secrets in plain text. ");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** !!! WARNING !!! ");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
return logs;
|
||||
}
|
||||
|
||||
public static List<string> CheckProxy(this IHostContext hostContext)
|
||||
{
|
||||
var logs = new List<string>();
|
||||
if (!string.IsNullOrEmpty(hostContext.WebProxy.HttpProxyAddress) ||
|
||||
!string.IsNullOrEmpty(hostContext.WebProxy.HttpsProxyAddress))
|
||||
{
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** Runner is behind web proxy {hostContext.WebProxy.HttpsProxyAddress ?? hostContext.WebProxy.HttpProxyAddress} ");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return logs;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> CheckDns(string targetUrl)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
var url = new Uri(targetUrl);
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Try DNS lookup for {url.Host} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
IPHostEntry host = await Dns.GetHostEntryAsync(url.Host);
|
||||
foreach (var address in host.AddressList)
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Resolved DNS for {url.Host} to '{address}'");
|
||||
}
|
||||
|
||||
result.Pass = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Resolved DNS for {url.Host} failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> CheckPing(string targetUrl)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
var url = new Uri(targetUrl);
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Try ping {url.Host} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
using (var ping = new Ping())
|
||||
{
|
||||
var reply = await ping.SendPingAsync(url.Host);
|
||||
if (reply.Status == IPStatus.Success)
|
||||
{
|
||||
result.Pass = true;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Ping {url.Host} ({reply.Address}) succeed within to '{reply.RoundtripTime} ms'");
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Ping {url.Host} ({reply.Address}) failed with '{reply.Status}'");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Ping api.github.com failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> CheckHttpsGetRequests(this IHostContext hostContext, string url, string pat, string expectedHeader)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Send HTTPS Request (GET) to {url} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
using (var _ = new HttpEventSourceListener(result.Logs))
|
||||
using (var httpClientHandler = hostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(hostContext.UserAgents);
|
||||
if (!string.IsNullOrEmpty(pat))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("token", pat);
|
||||
}
|
||||
|
||||
var response = await httpClient.GetAsync(url);
|
||||
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http status code: {response.StatusCode}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http response headers: {response.Headers}");
|
||||
|
||||
var responseContent = await response.Content.ReadAsStringAsync();
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http response body: {responseContent}");
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
if (response.Headers.Contains(expectedHeader))
|
||||
{
|
||||
result.Pass = true;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'GET' to {url} succeed");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'GET' to {url} succeed but doesn't have expected HTTP response Header '{expectedHeader}'.");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'GET' to {url} failed with {response.StatusCode}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Https request 'GET' to {url} failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> CheckHttpsPostRequests(this IHostContext hostContext, string url, string pat, string expectedHeader)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Send HTTPS Request (POST) to {url} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
using (var _ = new HttpEventSourceListener(result.Logs))
|
||||
using (var httpClientHandler = hostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(hostContext.UserAgents);
|
||||
if (!string.IsNullOrEmpty(pat))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("token", pat);
|
||||
}
|
||||
|
||||
// Send empty JSON '{}' to service
|
||||
var response = await httpClient.PostAsJsonAsync<Dictionary<string, string>>(url, new Dictionary<string, string>());
|
||||
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http status code: {response.StatusCode}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http response headers: {response.Headers}");
|
||||
|
||||
var responseContent = await response.Content.ReadAsStringAsync();
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http response body: {responseContent}");
|
||||
if (response.Headers.Contains(expectedHeader))
|
||||
{
|
||||
result.Pass = true;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'POST' to {url} has expected HTTP response header");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Http request 'POST' to {url} doesn't have expected HTTP response Header '{expectedHeader}'.");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Https request 'POST' to {url} failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static async Task<CheckResult> DownloadExtraCA(this IHostContext hostContext, string url, string pat)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Download SSL Certificate from {url} ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
|
||||
var uri = new Uri(url);
|
||||
var env = new Dictionary<string, string>()
|
||||
{
|
||||
{ "HOSTNAME", uri.Host },
|
||||
{ "PORT", uri.IsDefaultPort ? (uri.Scheme.ToLowerInvariant() == "https" ? "443" : "80") : uri.Port.ToString() },
|
||||
{ "PATH", uri.AbsolutePath },
|
||||
{ "PAT", pat }
|
||||
};
|
||||
|
||||
var proxy = hostContext.WebProxy.GetProxy(uri);
|
||||
if (proxy != null)
|
||||
{
|
||||
env["PROXYHOST"] = proxy.Host;
|
||||
env["PROXYPORT"] = proxy.IsDefaultPort ? (proxy.Scheme.ToLowerInvariant() == "https" ? "443" : "80") : proxy.Port.ToString();
|
||||
if (hostContext.WebProxy.HttpProxyUsername != null ||
|
||||
hostContext.WebProxy.HttpsProxyUsername != null)
|
||||
{
|
||||
env["PROXYUSERNAME"] = hostContext.WebProxy.HttpProxyUsername ?? hostContext.WebProxy.HttpsProxyUsername;
|
||||
env["PROXYPASSWORD"] = hostContext.WebProxy.HttpProxyPassword ?? hostContext.WebProxy.HttpsProxyPassword;
|
||||
}
|
||||
else
|
||||
{
|
||||
env["PROXYUSERNAME"] = "";
|
||||
env["PROXYPASSWORD"] = "";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
env["PROXYHOST"] = "";
|
||||
env["PROXYPORT"] = "";
|
||||
env["PROXYUSERNAME"] = "";
|
||||
env["PROXYPASSWORD"] = "";
|
||||
}
|
||||
|
||||
using (var processInvoker = hostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} [STDOUT] {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} [STDERR] {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
var downloadCertScript = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "downloadCert");
|
||||
var node12 = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{downloadCertScript}\"' ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
|
||||
await processInvoker.ExecuteAsync(
|
||||
hostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
node12,
|
||||
$"\"{downloadCertScript}\"",
|
||||
env,
|
||||
true,
|
||||
CancellationToken.None);
|
||||
}
|
||||
|
||||
result.Pass = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Download SSL Certificate from '{url}' failed with error: {ex}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
// EventSource listener for dotnet debug trace for HTTP and SSL
|
||||
public sealed class HttpEventSourceListener : EventListener
|
||||
{
|
||||
private readonly List<string> _logs;
|
||||
private readonly object _lock = new object();
|
||||
private readonly Dictionary<string, HashSet<string>> _ignoredEvent = new Dictionary<string, HashSet<string>>
|
||||
{
|
||||
{
|
||||
"Microsoft-System-Net-Http",
|
||||
new HashSet<string>
|
||||
{
|
||||
"Info",
|
||||
"Associate",
|
||||
"Enter",
|
||||
"Exit"
|
||||
}
|
||||
},
|
||||
{
|
||||
"Microsoft-System-Net-Security",
|
||||
new HashSet<string>
|
||||
{
|
||||
"Enter",
|
||||
"Exit",
|
||||
"Info",
|
||||
"DumpBuffer",
|
||||
"SslStreamCtor",
|
||||
"SecureChannelCtor",
|
||||
"NoDelegateNoClientCert",
|
||||
"CertsAfterFiltering",
|
||||
"UsingCachedCredential",
|
||||
"SspiSelectedCipherSuite"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
public HttpEventSourceListener(List<string> logs)
|
||||
{
|
||||
_logs = logs;
|
||||
if (Environment.GetEnvironmentVariable("ACTIONS_RUNNER_TRACE_ALL_HTTP_EVENT") == "1")
|
||||
{
|
||||
_ignoredEvent.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
protected override void OnEventSourceCreated(EventSource eventSource)
|
||||
{
|
||||
base.OnEventSourceCreated(eventSource);
|
||||
|
||||
if (eventSource.Name == "Microsoft-System-Net-Http" ||
|
||||
eventSource.Name == "Microsoft-System-Net-Security")
|
||||
{
|
||||
EnableEvents(eventSource, EventLevel.Verbose, EventKeywords.All);
|
||||
}
|
||||
}
|
||||
|
||||
protected override void OnEventWritten(EventWrittenEventArgs eventData)
|
||||
{
|
||||
base.OnEventWritten(eventData);
|
||||
lock (_lock)
|
||||
{
|
||||
if (_ignoredEvent.TryGetValue(eventData.EventSource.Name, out var ignored) &&
|
||||
ignored.Contains(eventData.EventName))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_logs.Add($"{DateTime.UtcNow.ToString("O")} [START {eventData.EventSource.Name} - {eventData.EventName}]");
|
||||
_logs.AddRange(eventData.Payload.Select(x => string.Join(Environment.NewLine, x.ToString().Split(Environment.NewLine).Select(y => $"{DateTime.UtcNow.ToString("O")} {y}"))));
|
||||
_logs.Add($"{DateTime.UtcNow.ToString("O")} [END {eventData.EventSource.Name} - {eventData.EventName}]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
171
src/Runner.Listener/Checks/GitCheck.cs
Normal file
171
src/Runner.Listener/Checks/GitCheck.cs
Normal file
@@ -0,0 +1,171 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public sealed class GitCheck : RunnerService, ICheckExtension
|
||||
{
|
||||
private string _logFile = null;
|
||||
private string _gitPath = null;
|
||||
|
||||
public int Order => 3;
|
||||
|
||||
public string CheckName => "Git Certificate/Proxy Validation";
|
||||
|
||||
public string CheckDescription => "Check if the Git CLI can access GitHub.com or GitHub Enterprise Server.";
|
||||
|
||||
public string CheckLog => _logFile;
|
||||
|
||||
public string HelpLink => "https://github.com/actions/runner/blob/main/docs/checks/git.md";
|
||||
|
||||
public Type ExtensionType => typeof(ICheckExtension);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_logFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), StringUtil.Format("{0}_{1:yyyyMMdd-HHmmss}-utc.log", nameof(GitCheck), DateTime.UtcNow));
|
||||
_gitPath = WhichUtil.Which("git");
|
||||
}
|
||||
|
||||
// git access to ghes/gh
|
||||
public async Task<bool> RunCheck(string url, string pat)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
|
||||
|
||||
if (string.IsNullOrEmpty(_gitPath))
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, new[] { $"{DateTime.UtcNow.ToString("O")} Can't verify git with GitHub.com or GitHub Enterprise Server since git is not installed." });
|
||||
return false;
|
||||
}
|
||||
|
||||
var checkGit = await CheckGit(url, pat);
|
||||
var result = checkGit.Pass;
|
||||
await File.AppendAllLinesAsync(_logFile, checkGit.Logs);
|
||||
|
||||
// try fix SSL error by providing extra CA certificate.
|
||||
if (checkGit.SslError)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, new[] { $"{DateTime.UtcNow.ToString("O")} Try fix SSL error by providing extra CA certificate." });
|
||||
var downloadCert = await HostContext.DownloadExtraCA(url, pat);
|
||||
await File.AppendAllLinesAsync(_logFile, downloadCert.Logs);
|
||||
|
||||
if (downloadCert.Pass)
|
||||
{
|
||||
var recheckGit = await CheckGit(url, pat, extraCA: true);
|
||||
await File.AppendAllLinesAsync(_logFile, recheckGit.Logs);
|
||||
if (recheckGit.Pass)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, new[] { $"{DateTime.UtcNow.ToString("O")} Fixed SSL error by providing extra CA certs." });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task<CheckResult> CheckGit(string url, string pat, bool extraCA = false)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Validate server cert and proxy configuration with Git ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
var repoUrlBuilder = new UriBuilder(url);
|
||||
repoUrlBuilder.Path = "actions/checkout";
|
||||
repoUrlBuilder.UserName = "gh";
|
||||
repoUrlBuilder.Password = pat;
|
||||
|
||||
var gitProxy = "";
|
||||
var proxy = HostContext.WebProxy.GetProxy(repoUrlBuilder.Uri);
|
||||
if (proxy != null)
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Runner is behind http proxy '{proxy.AbsoluteUri}'");
|
||||
if (HostContext.WebProxy.HttpProxyUsername != null ||
|
||||
HostContext.WebProxy.HttpsProxyUsername != null)
|
||||
{
|
||||
var proxyUrlWithCred = UrlUtil.GetCredentialEmbeddedUrl(
|
||||
proxy,
|
||||
HostContext.WebProxy.HttpProxyUsername ?? HostContext.WebProxy.HttpsProxyUsername,
|
||||
HostContext.WebProxy.HttpProxyPassword ?? HostContext.WebProxy.HttpsProxyPassword);
|
||||
gitProxy = $"-c http.proxy={proxyUrlWithCred}";
|
||||
}
|
||||
else
|
||||
{
|
||||
gitProxy = $"-c http.proxy={proxy.AbsoluteUri}";
|
||||
}
|
||||
}
|
||||
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
var gitArgs = $"{gitProxy} ls-remote --exit-code {repoUrlBuilder.Uri.AbsoluteUri} HEAD";
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run 'git {gitArgs}' ");
|
||||
|
||||
var env = new Dictionary<string, string>
|
||||
{
|
||||
{ "GIT_TRACE", "1" },
|
||||
{ "GIT_CURL_VERBOSE", "1" }
|
||||
};
|
||||
|
||||
if (extraCA)
|
||||
{
|
||||
env["GIT_SSL_CAINFO"] = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), "download_ca_cert.pem");
|
||||
}
|
||||
|
||||
await processInvoker.ExecuteAsync(
|
||||
HostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
_gitPath,
|
||||
gitArgs,
|
||||
env,
|
||||
true,
|
||||
CancellationToken.None);
|
||||
}
|
||||
|
||||
result.Pass = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** git ls-remote failed with error: {ex}");
|
||||
if (result.Logs.Any(x => x.Contains("SSL Certificate problem", StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** git ls-remote failed due to SSL cert issue.");
|
||||
result.SslError = true;
|
||||
}
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
30
src/Runner.Listener/Checks/ICheckExtension.cs
Normal file
30
src/Runner.Listener/Checks/ICheckExtension.cs
Normal file
@@ -0,0 +1,30 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public interface ICheckExtension : IExtension
|
||||
{
|
||||
int Order { get; }
|
||||
string CheckName { get; }
|
||||
string CheckDescription { get; }
|
||||
string CheckLog { get; }
|
||||
string HelpLink { get; }
|
||||
Task<bool> RunCheck(string url, string pat);
|
||||
}
|
||||
|
||||
public class CheckResult
|
||||
{
|
||||
public CheckResult()
|
||||
{
|
||||
Logs = new List<string>();
|
||||
}
|
||||
|
||||
public bool Pass { get; set; }
|
||||
|
||||
public bool SslError { get; set; }
|
||||
|
||||
public List<string> Logs { get; set; }
|
||||
}
|
||||
}
|
||||
59
src/Runner.Listener/Checks/InternetCheck.cs
Normal file
59
src/Runner.Listener/Checks/InternetCheck.cs
Normal file
@@ -0,0 +1,59 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public sealed class InternetCheck : RunnerService, ICheckExtension
|
||||
{
|
||||
private string _logFile = null;
|
||||
|
||||
public int Order => 1;
|
||||
|
||||
public string CheckName => "Internet Connection";
|
||||
|
||||
public string CheckDescription => "Check if the Actions runner has internet access.";
|
||||
|
||||
public string CheckLog => _logFile;
|
||||
|
||||
public string HelpLink => "https://github.com/actions/runner/blob/main/docs/checks/internet.md";
|
||||
|
||||
public Type ExtensionType => typeof(ICheckExtension);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_logFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), StringUtil.Format("{0}_{1:yyyyMMdd-HHmmss}-utc.log", nameof(InternetCheck), DateTime.UtcNow));
|
||||
}
|
||||
|
||||
// check runner access to api.github.com
|
||||
public async Task<bool> RunCheck(string url, string pat)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
|
||||
|
||||
var checkTasks = new List<Task<CheckResult>>();
|
||||
checkTasks.Add(CheckUtil.CheckDns("https://api.github.com"));
|
||||
checkTasks.Add(CheckUtil.CheckPing("https://api.github.com"));
|
||||
|
||||
// We don't need to pass a PAT since it might be a token for GHES.
|
||||
checkTasks.Add(HostContext.CheckHttpsGetRequests("https://api.github.com", pat: null, expectedHeader: "X-GitHub-Request-Id"));
|
||||
|
||||
var result = true;
|
||||
while (checkTasks.Count > 0)
|
||||
{
|
||||
var finishedCheckTask = await Task.WhenAny<CheckResult>(checkTasks);
|
||||
var finishedCheck = await finishedCheckTask;
|
||||
result = result && finishedCheck.Pass;
|
||||
await File.AppendAllLinesAsync(_logFile, finishedCheck.Logs);
|
||||
checkTasks.Remove(finishedCheckTask);
|
||||
}
|
||||
|
||||
await Task.WhenAll(checkTasks);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
181
src/Runner.Listener/Checks/NodeJsCheck.cs
Normal file
181
src/Runner.Listener/Checks/NodeJsCheck.cs
Normal file
@@ -0,0 +1,181 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
{
|
||||
public sealed class NodeJsCheck : RunnerService, ICheckExtension
|
||||
{
|
||||
private string _logFile = null;
|
||||
|
||||
public int Order => 4;
|
||||
|
||||
public string CheckName => "Node.js Certificate/Proxy Validation";
|
||||
|
||||
public string CheckDescription => "Check if Node.js has access to GitHub.com or GitHub Enterprise Server.";
|
||||
|
||||
public string CheckLog => _logFile;
|
||||
|
||||
public string HelpLink => "https://github.com/actions/runner/blob/main/docs/checks/nodejs.md";
|
||||
|
||||
public Type ExtensionType => typeof(ICheckExtension);
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_logFile = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), StringUtil.Format("{0}_{1:yyyyMMdd-HHmmss}-utc.log", nameof(NodeJsCheck), DateTime.UtcNow));
|
||||
}
|
||||
|
||||
// node access to ghes/gh
|
||||
public async Task<bool> RunCheck(string url, string pat)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.WarnLog());
|
||||
await File.AppendAllLinesAsync(_logFile, HostContext.CheckProxy());
|
||||
|
||||
// Request to github.com or ghes server
|
||||
var urlBuilder = new UriBuilder(url);
|
||||
if (UrlUtil.IsHostedServer(urlBuilder))
|
||||
{
|
||||
urlBuilder.Host = $"api.{urlBuilder.Host}";
|
||||
urlBuilder.Path = "";
|
||||
}
|
||||
else
|
||||
{
|
||||
urlBuilder.Path = "api/v3";
|
||||
}
|
||||
|
||||
var checkNode = await CheckNodeJs(urlBuilder.Uri.AbsoluteUri, pat);
|
||||
var result = checkNode.Pass;
|
||||
await File.AppendAllLinesAsync(_logFile, checkNode.Logs);
|
||||
|
||||
// try fix SSL error by providing extra CA certificate.
|
||||
if (checkNode.SslError)
|
||||
{
|
||||
var downloadCert = await HostContext.DownloadExtraCA(urlBuilder.Uri.AbsoluteUri, pat);
|
||||
await File.AppendAllLinesAsync(_logFile, downloadCert.Logs);
|
||||
|
||||
if (downloadCert.Pass)
|
||||
{
|
||||
var recheckNode = await CheckNodeJs(urlBuilder.Uri.AbsoluteUri, pat, extraCA: true);
|
||||
await File.AppendAllLinesAsync(_logFile, recheckNode.Logs);
|
||||
if (recheckNode.Pass)
|
||||
{
|
||||
await File.AppendAllLinesAsync(_logFile, new[] { $"{DateTime.UtcNow.ToString("O")} Fixed SSL error by providing extra CA certs." });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task<CheckResult> CheckNodeJs(string url, string pat, bool extraCA = false)
|
||||
{
|
||||
var result = new CheckResult();
|
||||
try
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Make Http request to {url} using node.js ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
|
||||
// Request to github.com or ghes server
|
||||
Uri requestUrl = new Uri(url);
|
||||
var env = new Dictionary<string, string>()
|
||||
{
|
||||
{ "HOSTNAME", requestUrl.Host },
|
||||
{ "PORT", requestUrl.IsDefaultPort ? (requestUrl.Scheme.ToLowerInvariant() == "https" ? "443" : "80") : requestUrl.Port.ToString() },
|
||||
{ "PATH", requestUrl.AbsolutePath },
|
||||
{ "PAT", pat }
|
||||
};
|
||||
|
||||
var proxy = HostContext.WebProxy.GetProxy(requestUrl);
|
||||
if (proxy != null)
|
||||
{
|
||||
env["PROXYHOST"] = proxy.Host;
|
||||
env["PROXYPORT"] = proxy.IsDefaultPort ? (proxy.Scheme.ToLowerInvariant() == "https" ? "443" : "80") : proxy.Port.ToString();
|
||||
if (HostContext.WebProxy.HttpProxyUsername != null ||
|
||||
HostContext.WebProxy.HttpsProxyUsername != null)
|
||||
{
|
||||
env["PROXYUSERNAME"] = HostContext.WebProxy.HttpProxyUsername ?? HostContext.WebProxy.HttpsProxyUsername;
|
||||
env["PROXYPASSWORD"] = HostContext.WebProxy.HttpProxyPassword ?? HostContext.WebProxy.HttpsProxyPassword;
|
||||
}
|
||||
else
|
||||
{
|
||||
env["PROXYUSERNAME"] = "";
|
||||
env["PROXYPASSWORD"] = "";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
env["PROXYHOST"] = "";
|
||||
env["PROXYPORT"] = "";
|
||||
env["PROXYUSERNAME"] = "";
|
||||
env["PROXYPASSWORD"] = "";
|
||||
}
|
||||
|
||||
if (extraCA)
|
||||
{
|
||||
env["NODE_EXTRA_CA_CERTS"] = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), "download_ca_cert.pem");
|
||||
}
|
||||
|
||||
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} [STDOUT] {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} [STDERR] {args.Data}");
|
||||
}
|
||||
});
|
||||
|
||||
var makeWebRequestScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "makeWebRequest.js");
|
||||
var node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{makeWebRequestScript}\"' ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
|
||||
await processInvoker.ExecuteAsync(
|
||||
HostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
node12,
|
||||
$"\"{makeWebRequestScript}\"",
|
||||
env,
|
||||
true,
|
||||
CancellationToken.None);
|
||||
}
|
||||
|
||||
result.Pass = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
result.Pass = false;
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Make https request to {url} using node.js failed with error: {ex}");
|
||||
if (result.Logs.Any(x => x.Contains("UNABLE_TO_VERIFY_LEAF_SIGNATURE") ||
|
||||
x.Contains("UNABLE_TO_GET_ISSUER_CERT_LOCALLY") ||
|
||||
x.Contains("SELF_SIGNED_CERT_IN_CHAIN")))
|
||||
{
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** Https request failed due to SSL cert issue.");
|
||||
result.SslError = true;
|
||||
}
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} **** ****");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} ***************************************************************************************************************");
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -27,6 +27,7 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
private readonly string[] validFlags =
|
||||
{
|
||||
Constants.Runner.CommandLine.Flags.Check,
|
||||
Constants.Runner.CommandLine.Flags.Commit,
|
||||
Constants.Runner.CommandLine.Flags.Help,
|
||||
Constants.Runner.CommandLine.Flags.Replace,
|
||||
@@ -42,7 +43,8 @@ namespace GitHub.Runner.Listener
|
||||
Constants.Runner.CommandLine.Args.Labels,
|
||||
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
|
||||
Constants.Runner.CommandLine.Args.Name,
|
||||
Constants.Runner.CommandLine.Args.Pool,
|
||||
Constants.Runner.CommandLine.Args.PAT,
|
||||
Constants.Runner.CommandLine.Args.RunnerGroup,
|
||||
Constants.Runner.CommandLine.Args.StartupType,
|
||||
Constants.Runner.CommandLine.Args.Token,
|
||||
Constants.Runner.CommandLine.Args.Url,
|
||||
@@ -59,6 +61,7 @@ namespace GitHub.Runner.Listener
|
||||
public bool Warmup => TestCommand(Constants.Runner.CommandLine.Commands.Warmup);
|
||||
|
||||
// Flags.
|
||||
public bool Check => TestFlag(Constants.Runner.CommandLine.Flags.Check);
|
||||
public bool Commit => TestFlag(Constants.Runner.CommandLine.Flags.Commit);
|
||||
public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help);
|
||||
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
|
||||
@@ -169,6 +172,15 @@ namespace GitHub.Runner.Listener
|
||||
validator: Validators.NonEmptyValidator);
|
||||
}
|
||||
|
||||
public string GetRunnerGroupName(string defaultPoolName = null)
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
name: Constants.Runner.CommandLine.Args.RunnerGroup,
|
||||
description: "Enter the name of the runner group to add this runner to:",
|
||||
defaultValue: defaultPoolName ?? "default",
|
||||
validator: Validators.NonEmptyValidator);
|
||||
}
|
||||
|
||||
public string GetToken()
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
@@ -178,6 +190,22 @@ namespace GitHub.Runner.Listener
|
||||
validator: Validators.NonEmptyValidator);
|
||||
}
|
||||
|
||||
public string GetGitHubPersonalAccessToken(bool required = false)
|
||||
{
|
||||
if (required)
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
name: Constants.Runner.CommandLine.Args.PAT,
|
||||
description: "What is your GitHub personal access token?",
|
||||
defaultValue: string.Empty,
|
||||
validator: Validators.NonEmptyValidator);
|
||||
}
|
||||
else
|
||||
{
|
||||
return GetArg(name: Constants.Runner.CommandLine.Args.PAT);
|
||||
}
|
||||
}
|
||||
|
||||
public string GetRunnerRegisterToken()
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
|
||||
@@ -4,7 +4,6 @@ using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.OAuth;
|
||||
using GitHub.Services.WebApi;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
@@ -12,6 +11,7 @@ using System.Net.Http;
|
||||
using System.Net.Http.Headers;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace GitHub.Runner.Listener.Configuration
|
||||
@@ -107,8 +107,8 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
else
|
||||
{
|
||||
runnerSettings.GitHubUrl = inputUrl;
|
||||
var githubToken = command.GetRunnerRegisterToken();
|
||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken, Constants.RunnerEvent.Register);
|
||||
var registerToken = await GetRunnerTokenAsync(command, inputUrl, "registration");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
|
||||
runnerSettings.ServerUrl = authResult.TenantUrl;
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
@@ -117,7 +117,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
try
|
||||
{
|
||||
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
|
||||
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
|
||||
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || UrlUtil.IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
|
||||
|
||||
// Warn if the Actions server url and GHES server url has different Host
|
||||
if (!runnerSettings.IsHostedServer)
|
||||
@@ -159,17 +159,34 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
_term.WriteSection("Runner Registration");
|
||||
|
||||
//Get all the agent pools, and select the first private pool
|
||||
// If we have more than one runner group available, allow the user to specify which one to be added into
|
||||
string poolName = null;
|
||||
TaskAgentPool agentPool = null;
|
||||
List<TaskAgentPool> agentPools = await _runnerServer.GetAgentPoolsAsync();
|
||||
TaskAgentPool agentPool = agentPools?.Where(x => x.IsHosted == false).FirstOrDefault();
|
||||
TaskAgentPool defaultPool = agentPools?.Where(x => x.IsInternal).FirstOrDefault();
|
||||
|
||||
if (agentPool == null)
|
||||
if (agentPools?.Where(x => !x.IsHosted).Count() > 1)
|
||||
{
|
||||
throw new TaskAgentPoolNotFoundException($"Could not find any private pool. Contact support.");
|
||||
poolName = command.GetRunnerGroupName(defaultPool?.Name);
|
||||
_term.WriteLine();
|
||||
agentPool = agentPools.Where(x => string.Equals(poolName, x.Name, StringComparison.OrdinalIgnoreCase) && !x.IsHosted).FirstOrDefault();
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info("Found a private pool with id {1} and name {2}", agentPool.Id, agentPool.Name);
|
||||
agentPool = defaultPool;
|
||||
}
|
||||
|
||||
if (agentPool == null && poolName == null)
|
||||
{
|
||||
throw new TaskAgentPoolNotFoundException($"Could not find any self-hosted runner groups. Contact support.");
|
||||
}
|
||||
else if (agentPool == null && poolName != null)
|
||||
{
|
||||
throw new TaskAgentPoolNotFoundException($"Could not find any self-hosted runner group named \"{poolName}\".");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info("Found a self-hosted runner group with id {1} and name {2}", agentPool.Id, agentPool.Name);
|
||||
runnerSettings.PoolId = agentPool.Id;
|
||||
runnerSettings.PoolName = agentPool.Name;
|
||||
}
|
||||
@@ -246,6 +263,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
{ "clientId", agent.Authorization.ClientId.ToString("D") },
|
||||
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
|
||||
{ "requireFipsCryptography", agent.Properties.GetValue("RequireFipsCryptography", false).ToString() }
|
||||
},
|
||||
};
|
||||
|
||||
@@ -356,8 +374,8 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
else
|
||||
{
|
||||
var githubToken = command.GetRunnerDeletionToken();
|
||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken, Constants.RunnerEvent.Remove);
|
||||
var deletionToken = await GetRunnerTokenAsync(command, settings.GitHubUrl, "remove");
|
||||
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove);
|
||||
creds = authResult.ToVssCredentials();
|
||||
Trace.Info("cred retrieved via GitHub auth");
|
||||
}
|
||||
@@ -491,18 +509,107 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
return agent;
|
||||
}
|
||||
|
||||
private bool IsHostedServer(UriBuilder gitHubUrl)
|
||||
private async Task<string> GetRunnerTokenAsync(CommandSettings command, string githubUrl, string tokenType)
|
||||
{
|
||||
return string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
|
||||
var githubPAT = command.GetGitHubPersonalAccessToken();
|
||||
var runnerToken = string.Empty;
|
||||
if (!string.IsNullOrEmpty(githubPAT))
|
||||
{
|
||||
Trace.Info($"Retriving runner {tokenType} token using GitHub PAT.");
|
||||
var jitToken = await GetJITRunnerTokenAsync(githubUrl, githubPAT, tokenType);
|
||||
Trace.Info($"Retrived runner {tokenType} token is good to {jitToken.ExpiresAt}.");
|
||||
HostContext.SecretMasker.AddValue(jitToken.Token);
|
||||
runnerToken = jitToken.Token;
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(runnerToken))
|
||||
{
|
||||
if (string.Equals("registration", tokenType, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
runnerToken = command.GetRunnerRegisterToken();
|
||||
}
|
||||
else
|
||||
{
|
||||
runnerToken = command.GetRunnerDeletionToken();
|
||||
}
|
||||
}
|
||||
|
||||
return runnerToken;
|
||||
}
|
||||
|
||||
private async Task<GitHubRunnerRegisterToken> GetJITRunnerTokenAsync(string githubUrl, string githubToken, string tokenType)
|
||||
{
|
||||
var githubApiUrl = "";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||
if (path.Length == 1)
|
||||
{
|
||||
// org runner
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runners/{tokenType}-token";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runners/{tokenType}-token";
|
||||
}
|
||||
}
|
||||
else if (path.Length == 2)
|
||||
{
|
||||
// repo or enterprise runner.
|
||||
var repoScope = "repos/";
|
||||
if (string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
repoScope = "";
|
||||
}
|
||||
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{repoScope}{path[0]}/{path[1]}/actions/runners/{tokenType}-token";
|
||||
}
|
||||
else
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{repoScope}{path[0]}/{path[1]}/actions/runners/{tokenType}-token";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new ArgumentException($"'{githubUrl}' should point to an org or repository.");
|
||||
}
|
||||
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
|
||||
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var jsonResponse = await response.Content.ReadAsStringAsync();
|
||||
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var errorResponse = await response.Content.ReadAsStringAsync();
|
||||
_term.WriteError(errorResponse);
|
||||
response.EnsureSuccessStatusCode();
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
|
||||
{
|
||||
var githubApiUrl = "";
|
||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||
if (IsHostedServer(gitHubUrlBuilder))
|
||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||
{
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runner-registration";
|
||||
}
|
||||
|
||||
@@ -71,6 +71,16 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
}
|
||||
|
||||
[DataContract]
|
||||
public sealed class GitHubRunnerRegisterToken
|
||||
{
|
||||
[DataMember(Name = "token")]
|
||||
public string Token { get; set; }
|
||||
|
||||
[DataMember(Name = "expires_at")]
|
||||
public string ExpiresAt { get; set; }
|
||||
}
|
||||
|
||||
[DataContract]
|
||||
public sealed class GitHubAuthResult
|
||||
{
|
||||
|
||||
@@ -20,7 +20,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
/// key is returned to the caller.
|
||||
/// </summary>
|
||||
/// <returns>An <c>RSACryptoServiceProvider</c> instance representing the key for the runner</returns>
|
||||
RSACryptoServiceProvider CreateKey();
|
||||
RSA CreateKey();
|
||||
|
||||
/// <summary>
|
||||
/// Deletes the RSA key managed by the key manager.
|
||||
@@ -32,7 +32,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
/// </summary>
|
||||
/// <returns>An <c>RSACryptoServiceProvider</c> instance representing the key for the runner</returns>
|
||||
/// <exception cref="CryptographicException">No key exists in the store</exception>
|
||||
RSACryptoServiceProvider GetKey();
|
||||
RSA GetKey();
|
||||
}
|
||||
|
||||
// Newtonsoft 10 is not working properly with dotnet RSAParameters class
|
||||
|
||||
@@ -36,7 +36,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
// We expect the key to be in the machine store at this point. Configuration should have set all of
|
||||
// this up correctly so we can use the key to generate access tokens.
|
||||
var keyManager = context.GetService<IRSAKeyManager>();
|
||||
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
|
||||
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey(), StringUtil.ConvertToBoolean(CredentialData.Data.GetValueOrDefault("requireFipsCryptography"), false));
|
||||
var clientCredential = new VssOAuthJwtBearerClientCredential(clientId, authorizationUrl, signingCredentials);
|
||||
var agentCredential = new VssOAuthCredential(new Uri(oauthEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
|
||||
|
||||
|
||||
@@ -13,14 +13,14 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
private string _keyFile;
|
||||
private IHostContext _context;
|
||||
|
||||
public RSACryptoServiceProvider CreateKey()
|
||||
public RSA CreateKey()
|
||||
{
|
||||
RSACryptoServiceProvider rsa = null;
|
||||
RSA rsa = null;
|
||||
if (!File.Exists(_keyFile))
|
||||
{
|
||||
Trace.Info("Creating new RSA key using 2048-bit key length");
|
||||
|
||||
rsa = new RSACryptoServiceProvider(2048);
|
||||
rsa = RSA.Create(2048);
|
||||
|
||||
// Now write the parameters to disk
|
||||
SaveParameters(rsa.ExportParameters(true));
|
||||
@@ -30,7 +30,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
Trace.Info("Found existing RSA key parameters file {0}", _keyFile);
|
||||
|
||||
rsa = new RSACryptoServiceProvider();
|
||||
rsa = RSA.Create();
|
||||
rsa.ImportParameters(LoadParameters());
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
}
|
||||
|
||||
public RSACryptoServiceProvider GetKey()
|
||||
public RSA GetKey()
|
||||
{
|
||||
if (!File.Exists(_keyFile))
|
||||
{
|
||||
@@ -55,7 +55,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
Trace.Info("Loading RSA key parameters from file {0}", _keyFile);
|
||||
|
||||
var rsa = new RSACryptoServiceProvider();
|
||||
var rsa = RSA.Create();
|
||||
rsa.ImportParameters(LoadParameters());
|
||||
return rsa;
|
||||
}
|
||||
|
||||
@@ -14,14 +14,14 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
private string _keyFile;
|
||||
private IHostContext _context;
|
||||
|
||||
public RSACryptoServiceProvider CreateKey()
|
||||
public RSA CreateKey()
|
||||
{
|
||||
RSACryptoServiceProvider rsa = null;
|
||||
RSA rsa = null;
|
||||
if (!File.Exists(_keyFile))
|
||||
{
|
||||
Trace.Info("Creating new RSA key using 2048-bit key length");
|
||||
|
||||
rsa = new RSACryptoServiceProvider(2048);
|
||||
rsa = RSA.Create(2048);
|
||||
|
||||
// Now write the parameters to disk
|
||||
IOUtil.SaveObject(new RSAParametersSerializable(rsa.ExportParameters(true)), _keyFile);
|
||||
@@ -54,7 +54,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
{
|
||||
Trace.Info("Found existing RSA key parameters file {0}", _keyFile);
|
||||
|
||||
rsa = new RSACryptoServiceProvider();
|
||||
rsa = RSA.Create();
|
||||
rsa.ImportParameters(IOUtil.LoadObject<RSAParametersSerializable>(_keyFile).RSAParameters);
|
||||
}
|
||||
|
||||
@@ -70,7 +70,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
}
|
||||
|
||||
public RSACryptoServiceProvider GetKey()
|
||||
public RSA GetKey()
|
||||
{
|
||||
if (!File.Exists(_keyFile))
|
||||
{
|
||||
@@ -80,7 +80,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
Trace.Info("Loading RSA key parameters from file {0}", _keyFile);
|
||||
|
||||
var parameters = IOUtil.LoadObject<RSAParametersSerializable>(_keyFile).RSAParameters;
|
||||
var rsa = new RSACryptoServiceProvider();
|
||||
var rsa = RSA.Create();
|
||||
rsa.ImportParameters(parameters);
|
||||
return rsa;
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@ namespace GitHub.Runner.Listener
|
||||
int channelTimeoutSeconds;
|
||||
if (!int.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_CHANNEL_TIMEOUT") ?? string.Empty, out channelTimeoutSeconds))
|
||||
{
|
||||
channelTimeoutSeconds = 30;
|
||||
channelTimeoutSeconds = 300;
|
||||
}
|
||||
|
||||
// _channelTimeout should in range [30, 300] seconds
|
||||
@@ -439,6 +439,11 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
Trace.Info($"Send job request message to worker for job {message.JobId}.");
|
||||
HostContext.WritePerfCounter($"RunnerSendingJobToWorker_{message.JobId}");
|
||||
for (var i = 0; i < 10000; i++)
|
||||
{
|
||||
message.Variables.Add(i.ToString(), "1234567890");
|
||||
}
|
||||
HostContext.GetService<ITerminal>().WriteLine($" Job message size: {JsonUtility.ToString(message).Length}");
|
||||
using (var csSendJobRequest = new CancellationTokenSource(_channelTimeout))
|
||||
{
|
||||
await processChannel.SendAsync(
|
||||
|
||||
@@ -319,7 +319,8 @@ namespace GitHub.Runner.Listener
|
||||
var keyManager = HostContext.GetService<IRSAKeyManager>();
|
||||
using (var rsa = keyManager.GetKey())
|
||||
{
|
||||
return aes.CreateDecryptor(rsa.Decrypt(_session.EncryptionKey.Value, RSAEncryptionPadding.OaepSHA1), message.IV);
|
||||
var padding = _session.UseFipsEncryption ? RSAEncryptionPadding.OaepSHA256 : RSAEncryptionPadding.OaepSHA1;
|
||||
return aes.CreateDecryptor(rsa.Decrypt(_session.EncryptionKey.Value, padding), message.IV);
|
||||
}
|
||||
}
|
||||
else
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using System;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
@@ -11,6 +10,8 @@ using System.Reflection;
|
||||
using System.Runtime.CompilerServices;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Linq;
|
||||
using GitHub.Runner.Listener.Check;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -72,6 +73,46 @@ namespace GitHub.Runner.Listener
|
||||
return Constants.Runner.ReturnCode.Success;
|
||||
}
|
||||
|
||||
if (command.Check)
|
||||
{
|
||||
var url = command.GetUrl();
|
||||
var pat = command.GetGitHubPersonalAccessToken(required: true);
|
||||
var checkExtensions = HostContext.GetService<IExtensionManager>().GetExtensions<ICheckExtension>();
|
||||
var sortedChecks = checkExtensions.OrderBy(x => x.Order);
|
||||
foreach (var check in sortedChecks)
|
||||
{
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
_term.WriteLine($"** Check: {check.CheckName}");
|
||||
_term.WriteLine($"** Description: {check.CheckDescription}");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
var result = await check.RunCheck(url, pat);
|
||||
if (!result)
|
||||
{
|
||||
_term.WriteLine($"** **");
|
||||
_term.WriteLine($"** F A I L **");
|
||||
_term.WriteLine($"** **");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
_term.WriteLine($"** Log: {check.CheckLog}");
|
||||
_term.WriteLine($"** Help Doc: {check.HelpLink}");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteLine($"** **");
|
||||
_term.WriteLine($"** P A S S **");
|
||||
_term.WriteLine($"** **");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
_term.WriteLine($"** Log: {check.CheckLog}");
|
||||
_term.WriteLine($"**********************************************************************************************************************");
|
||||
}
|
||||
|
||||
_term.WriteLine();
|
||||
_term.WriteLine();
|
||||
}
|
||||
|
||||
return Constants.Runner.ReturnCode.Success;
|
||||
}
|
||||
|
||||
// Configure runner prompt for args if not supplied
|
||||
// Unattended configure mode will not prompt for args if not supplied and error on any missing or invalid value.
|
||||
if (command.Configure)
|
||||
@@ -460,15 +501,18 @@ Options:
|
||||
--help Prints the help for each command
|
||||
--version Prints the runner version
|
||||
--commit Prints the runner commit
|
||||
--check Check the runner's network connectivity with GitHub server
|
||||
|
||||
Config Options:
|
||||
--unattended Disable interactive prompts for missing arguments. Defaults will be used for missing options
|
||||
--url string Repository to add the runner to. Required if unattended
|
||||
--token string Registration token. Required if unattended
|
||||
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
|
||||
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
|
||||
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
|
||||
--replace Replace any existing runner with the same name (default false)");
|
||||
--unattended Disable interactive prompts for missing arguments. Defaults will be used for missing options
|
||||
--url string Repository to add the runner to. Required if unattended
|
||||
--token string Registration token. Required if unattended
|
||||
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
|
||||
--runnergroup string Name of the runner group to add this runner to (defaults to the default runner group)
|
||||
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
|
||||
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
|
||||
--replace Replace any existing runner with the same name (default false)
|
||||
--pat GitHub personal access token used for checking network connectivity when executing `.{separator}run.{ext} --check`");
|
||||
#if OS_WINDOWS
|
||||
_term.WriteLine($@" --runasservice Run the runner as a service");
|
||||
_term.WriteLine($@" --windowslogonaccount string Account to run the service as. Requires runasservice");
|
||||
@@ -476,6 +520,8 @@ Config Options:
|
||||
#endif
|
||||
_term.WriteLine($@"
|
||||
Examples:
|
||||
Check GitHub server network connectivity:
|
||||
.{separator}run.{ext} --check --url <url> --pat <pat>
|
||||
Configure a runner non-interactively:
|
||||
.{separator}config.{ext} --unattended --url <url> --token <token>
|
||||
Configure a runner non-interactively, replacing any existing runner with the same name:
|
||||
|
||||
@@ -8,7 +8,9 @@ using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Security.Cryptography;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
@@ -110,7 +112,7 @@ namespace GitHub.Runner.Listener
|
||||
// old server won't send target version as part of update message.
|
||||
if (string.IsNullOrEmpty(targetVersion))
|
||||
{
|
||||
var packages = await _runnerServer.GetPackagesAsync(_packageType, _platform, 1, token);
|
||||
var packages = await _runnerServer.GetPackagesAsync(_packageType, _platform, 1, true, token);
|
||||
if (packages == null || packages.Count == 0)
|
||||
{
|
||||
Trace.Info($"There is no package for {_packageType} and {_platform}.");
|
||||
@@ -121,7 +123,7 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
else
|
||||
{
|
||||
_targetPackage = await _runnerServer.GetPackageAsync(_packageType, _platform, targetVersion, token);
|
||||
_targetPackage = await _runnerServer.GetPackageAsync(_packageType, _platform, targetVersion, true, token);
|
||||
if (_targetPackage == null)
|
||||
{
|
||||
Trace.Info($"There is no package for {_packageType} and {_platform} with version {targetVersion}.");
|
||||
@@ -211,12 +213,22 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
//open zip stream in async mode
|
||||
using (HttpClient httpClient = new HttpClient(HostContext.CreateHttpClientHandler()))
|
||||
using (FileStream fs = new FileStream(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: 4096, useAsync: true))
|
||||
using (Stream result = await httpClient.GetStreamAsync(_targetPackage.DownloadUrl))
|
||||
{
|
||||
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||
await result.CopyToAsync(fs, 81920, downloadCts.Token);
|
||||
await fs.FlushAsync(downloadCts.Token);
|
||||
if (!string.IsNullOrEmpty(_targetPackage.Token))
|
||||
{
|
||||
Trace.Info($"Adding authorization token ({_targetPackage.Token.Length} chars)");
|
||||
httpClient.DefaultRequestHeaders.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", _targetPackage.Token);
|
||||
}
|
||||
|
||||
Trace.Info($"Downloading {_targetPackage.DownloadUrl}");
|
||||
|
||||
using (FileStream fs = new FileStream(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: 4096, useAsync: true))
|
||||
using (Stream result = await httpClient.GetStreamAsync(_targetPackage.DownloadUrl))
|
||||
{
|
||||
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
|
||||
await result.CopyToAsync(fs, 81920, downloadCts.Token);
|
||||
await fs.FlushAsync(downloadCts.Token);
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Download runner: finished download");
|
||||
@@ -246,6 +258,24 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
|
||||
// If we got this far, we know that we've successfully downloaded the runner package
|
||||
// Validate Hash Matches if it is provided
|
||||
using (FileStream stream = File.OpenRead(archiveFile))
|
||||
{
|
||||
if (!String.IsNullOrEmpty(_targetPackage.HashValue))
|
||||
{
|
||||
using (SHA256 sha256 = SHA256.Create())
|
||||
{
|
||||
byte[] srcHashBytes = await sha256.ComputeHashAsync(stream);
|
||||
var hash = PrimitiveExtensions.ConvertToHexString(srcHashBytes);
|
||||
if (hash != _targetPackage.HashValue)
|
||||
{
|
||||
// Hash did not match, we can't recover from this, just throw
|
||||
throw new Exception($"Computed runner hash {hash} did not match expected Runner Hash {_targetPackage.HashValue} for {_targetPackage.Filename}");
|
||||
}
|
||||
Trace.Info($"Validated Runner Hash matches {_targetPackage.Filename} : {_targetPackage.HashValue}");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (archiveFile.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
ZipFile.ExtractToDirectory(archiveFile, latestRunnerDirectory);
|
||||
@@ -327,8 +357,13 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Copy any remaining .sh/.cmd files into runner root.");
|
||||
foreach (FileInfo file in new DirectoryInfo(latestRunnerDirectory).GetFiles() ?? new FileInfo[0])
|
||||
{
|
||||
// Copy and replace the file.
|
||||
file.CopyTo(Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), file.Name), true);
|
||||
string destination = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), file.Name);
|
||||
|
||||
// Removing the file instead of just trying to overwrite it works around permissions issues on linux.
|
||||
// https://github.com/actions/runner/issues/981
|
||||
Trace.Info($"Copy {file.FullName} to {destination}");
|
||||
IOUtil.DeleteFile(destination);
|
||||
file.CopyTo(destination, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
16
src/Runner.Sdk/BuildConstants.cs
Normal file
16
src/Runner.Sdk/BuildConstants.cs
Normal file
@@ -0,0 +1,16 @@
|
||||
namespace GitHub.Runner.Sdk
|
||||
{
|
||||
public static class BuildConstants
|
||||
{
|
||||
public static class Source
|
||||
{
|
||||
public static readonly string CommitHash = "ad819dcda7a20fb7ce0b61b5fe8c39be2a4f7afd";
|
||||
}
|
||||
|
||||
public static class RunnerPackage
|
||||
{
|
||||
public static readonly string PackageName = "osx-x64";
|
||||
public static readonly string Version = "2.278.1";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Net;
|
||||
using System.Text.RegularExpressions;
|
||||
@@ -71,7 +71,7 @@ namespace GitHub.Runner.Sdk
|
||||
|
||||
if (!string.IsNullOrEmpty(httpProxyAddress) && Uri.TryCreate(httpProxyAddress, UriKind.Absolute, out var proxyHttpUri))
|
||||
{
|
||||
_httpProxyAddress = proxyHttpUri.AbsoluteUri;
|
||||
_httpProxyAddress = proxyHttpUri.OriginalString;
|
||||
|
||||
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
|
||||
Environment.SetEnvironmentVariable("HTTP_PROXY", _httpProxyAddress);
|
||||
@@ -101,7 +101,7 @@ namespace GitHub.Runner.Sdk
|
||||
|
||||
if (!string.IsNullOrEmpty(httpsProxyAddress) && Uri.TryCreate(httpsProxyAddress, UriKind.Absolute, out var proxyHttpsUri))
|
||||
{
|
||||
_httpsProxyAddress = proxyHttpsUri.AbsoluteUri;
|
||||
_httpsProxyAddress = proxyHttpsUri.OriginalString;
|
||||
|
||||
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
|
||||
Environment.SetEnvironmentVariable("HTTPS_PROXY", _httpsProxyAddress);
|
||||
|
||||
@@ -30,7 +30,7 @@ namespace GitHub.Runner.Sdk
|
||||
//
|
||||
// For example, on an en-US box, this is required for loading the encoding for the
|
||||
// default console output code page '437'. Without loading the correct encoding for
|
||||
// code page IBM437, some characters cannot be translated correctly, e.g. write 'ç'
|
||||
// code page IBM437, some characters cannot be translated correctly, e.g. write 'ç'
|
||||
// from powershell.exe.
|
||||
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
|
||||
#endif
|
||||
|
||||
@@ -4,6 +4,13 @@ namespace GitHub.Runner.Sdk
|
||||
{
|
||||
public static class UrlUtil
|
||||
{
|
||||
public static bool IsHostedServer(UriBuilder gitHubUrl)
|
||||
{
|
||||
return string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
public static Uri GetCredentialEmbeddedUrl(Uri baseUrl, string username, string password)
|
||||
{
|
||||
ArgUtil.NotNull(baseUrl, nameof(baseUrl));
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
@@ -183,12 +184,49 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
var allowUnsecureCommands = false;
|
||||
bool.TryParse(Environment.GetEnvironmentVariable(Constants.Variables.Actions.AllowUnsupportedCommands), out allowUnsecureCommands);
|
||||
|
||||
// Apply environment from env context, env context contains job level env and action's env block
|
||||
#if OS_WINDOWS
|
||||
var envContext = context.ExpressionValues["env"] as DictionaryContextData;
|
||||
#else
|
||||
var envContext = context.ExpressionValues["env"] as CaseSensitiveDictionaryContextData;
|
||||
#endif
|
||||
if (!allowUnsecureCommands && envContext.ContainsKey(Constants.Variables.Actions.AllowUnsupportedCommands))
|
||||
{
|
||||
bool.TryParse(envContext[Constants.Variables.Actions.AllowUnsupportedCommands].ToString(), out allowUnsecureCommands);
|
||||
}
|
||||
|
||||
if (!allowUnsecureCommands)
|
||||
{
|
||||
throw new Exception(String.Format(Constants.Runner.UnsupportedCommandMessageDisabled, this.Command));
|
||||
}
|
||||
|
||||
if (!command.Properties.TryGetValue(SetEnvCommandProperties.Name, out string envName) || string.IsNullOrEmpty(envName))
|
||||
{
|
||||
throw new Exception("Required field 'name' is missing in ##[set-env] command.");
|
||||
}
|
||||
|
||||
context.EnvironmentVariables[envName] = command.Data;
|
||||
|
||||
foreach (var blocked in _setEnvBlockList)
|
||||
{
|
||||
if (string.Equals(blocked, envName, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// Log Telemetry and let user know they shouldn't do this
|
||||
var issue = new Issue()
|
||||
{
|
||||
Type = IssueType.Error,
|
||||
Message = $"Can't update {blocked} environment variable using ::set-env:: command."
|
||||
};
|
||||
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = $"{Constants.Runner.UnsupportedCommand}_{envName}";
|
||||
context.AddIssue(issue);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
context.Global.EnvironmentVariables[envName] = command.Data;
|
||||
context.SetEnvContext(envName, command.Data);
|
||||
context.Debug($"{envName}='{command.Data}'");
|
||||
}
|
||||
@@ -197,6 +235,11 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
public const String Name = "name";
|
||||
}
|
||||
|
||||
private string[] _setEnvBlockList =
|
||||
{
|
||||
"NODE_OPTIONS"
|
||||
};
|
||||
}
|
||||
|
||||
public sealed class SetOutputCommandExtension : RunnerService, IActionCommandExtension
|
||||
@@ -281,10 +324,29 @@ namespace GitHub.Runner.Worker
|
||||
public Type ExtensionType => typeof(IActionCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
|
||||
{
|
||||
{
|
||||
var allowUnsecureCommands = false;
|
||||
bool.TryParse(Environment.GetEnvironmentVariable(Constants.Variables.Actions.AllowUnsupportedCommands), out allowUnsecureCommands);
|
||||
|
||||
// Apply environment from env context, env context contains job level env and action's env block
|
||||
#if OS_WINDOWS
|
||||
var envContext = context.ExpressionValues["env"] as DictionaryContextData;
|
||||
#else
|
||||
var envContext = context.ExpressionValues["env"] as CaseSensitiveDictionaryContextData;
|
||||
#endif
|
||||
if (!allowUnsecureCommands && envContext.ContainsKey(Constants.Variables.Actions.AllowUnsupportedCommands))
|
||||
{
|
||||
bool.TryParse(envContext[Constants.Variables.Actions.AllowUnsupportedCommands].ToString(), out allowUnsecureCommands);
|
||||
}
|
||||
|
||||
if (!allowUnsecureCommands)
|
||||
{
|
||||
throw new Exception(String.Format(Constants.Runner.UnsupportedCommandMessageDisabled, this.Command));
|
||||
}
|
||||
|
||||
ArgUtil.NotNullOrEmpty(command.Data, "path");
|
||||
context.PrependPath.RemoveAll(x => string.Equals(x, command.Data, StringComparison.CurrentCulture));
|
||||
context.PrependPath.Add(command.Data);
|
||||
context.Global.PrependPath.RemoveAll(x => string.Equals(x, command.Data, StringComparison.CurrentCulture));
|
||||
context.Global.PrependPath.Add(command.Data);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -66,7 +66,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
|
||||
// Log even if we aren't using it to ensure users know.
|
||||
if (!string.IsNullOrEmpty(executionContext.Variables.Get("PREVIEW_ACTION_TOKEN")))
|
||||
if (!string.IsNullOrEmpty(executionContext.Global.Variables.Get("PREVIEW_ACTION_TOKEN")))
|
||||
{
|
||||
executionContext.Warning("The 'PREVIEW_ACTION_TOKEN' secret is deprecated. Please remove it from the repository's secrets");
|
||||
}
|
||||
@@ -75,7 +75,7 @@ namespace GitHub.Runner.Worker
|
||||
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
|
||||
|
||||
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
|
||||
var newActionMetadata = executionContext.Variables.GetBoolean("DistributedTask.NewActionMetadata") ?? false;
|
||||
var newActionMetadata = executionContext.Global.Variables.GetBoolean("DistributedTask.NewActionMetadata") ?? false;
|
||||
|
||||
var repositoryActions = new List<Pipelines.ActionStep>();
|
||||
|
||||
@@ -395,7 +395,7 @@ namespace GitHub.Runner.Worker
|
||||
Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}.");
|
||||
}
|
||||
}
|
||||
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
|
||||
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Composite)
|
||||
{
|
||||
var compositeAction = definition.Data.Execution as CompositeActionExecutionData;
|
||||
Trace.Info($"Load {compositeAction.Steps?.Count ?? 0} action steps.");
|
||||
@@ -468,7 +468,7 @@ namespace GitHub.Runner.Worker
|
||||
ArgUtil.NotNull(setupInfo, nameof(setupInfo));
|
||||
ArgUtil.NotNullOrEmpty(setupInfo.Container.Image, nameof(setupInfo.Container.Image));
|
||||
|
||||
executionContext.Output($"Pull down action image '{setupInfo.Container.Image}'");
|
||||
executionContext.Output($"##[group]Pull down action image '{setupInfo.Container.Image}'");
|
||||
|
||||
// Pull down docker image with retry up to 3 times
|
||||
var dockerManger = HostContext.GetService<IDockerCommandManager>();
|
||||
@@ -492,6 +492,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
}
|
||||
executionContext.Output("##[endgroup]");
|
||||
|
||||
if (retryCount == 3 && pullExitCode != 0)
|
||||
{
|
||||
@@ -511,7 +512,7 @@ namespace GitHub.Runner.Worker
|
||||
ArgUtil.NotNull(setupInfo, nameof(setupInfo));
|
||||
ArgUtil.NotNullOrEmpty(setupInfo.Container.Dockerfile, nameof(setupInfo.Container.Dockerfile));
|
||||
|
||||
executionContext.Output($"Build container for action use: '{setupInfo.Container.Dockerfile}'.");
|
||||
executionContext.Output($"##[group]Build container for action use: '{setupInfo.Container.Dockerfile}'.");
|
||||
|
||||
// Build docker image with retry up to 3 times
|
||||
var dockerManger = HostContext.GetService<IDockerCommandManager>();
|
||||
@@ -541,6 +542,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
}
|
||||
executionContext.Output("##[endgroup]");
|
||||
|
||||
if (retryCount == 3 && buildExitCode != 0)
|
||||
{
|
||||
@@ -589,18 +591,36 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
try
|
||||
{
|
||||
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Plan.ScopeIdentifier, executionContext.Plan.PlanType, executionContext.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
|
||||
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
|
||||
break;
|
||||
}
|
||||
catch (Exception ex) when (attempt < 3)
|
||||
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is canceled.
|
||||
{
|
||||
executionContext.Output($"Failed to resolve action download info. Error: {ex.Message}");
|
||||
executionContext.Debug(ex.ToString());
|
||||
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
|
||||
if (attempt < 3)
|
||||
{
|
||||
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
|
||||
executionContext.Output($"Retrying in {backoff.TotalSeconds} seconds");
|
||||
await Task.Delay(backoff);
|
||||
executionContext.Output($"Failed to resolve action download info. Error: {ex.Message}");
|
||||
executionContext.Debug(ex.ToString());
|
||||
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
|
||||
{
|
||||
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
|
||||
executionContext.Output($"Retrying in {backoff.TotalSeconds} seconds");
|
||||
await Task.Delay(backoff);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Some possible cases are:
|
||||
// * Repo is rate limited
|
||||
// * Repo or tag doesn't exist, or isn't public
|
||||
if (ex is WebApi.UnresolvableActionDownloadInfoException)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
else
|
||||
{
|
||||
// This exception will be traced as an infrastructure failure
|
||||
throw new WebApi.FailedToResolveActionDownloadInfoException("Failed to resolve action download info.", ex);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -947,7 +967,7 @@ namespace GitHub.Runner.Worker
|
||||
if (string.IsNullOrEmpty(authToken))
|
||||
{
|
||||
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
|
||||
authToken = executionContext.Variables.Get("PREVIEW_ACTION_TOKEN");
|
||||
authToken = executionContext.Global.Variables.Get("PREVIEW_ACTION_TOKEN");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(authToken))
|
||||
@@ -1046,7 +1066,7 @@ namespace GitHub.Runner.Worker
|
||||
Trace.Info($"Action plugin: {(actionDefinitionData.Execution as PluginActionExecutionData).Plugin}, no more preparation.");
|
||||
return null;
|
||||
}
|
||||
else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
|
||||
else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Composite)
|
||||
{
|
||||
Trace.Info($"Action composite: {(actionDefinitionData.Execution as CompositeActionExecutionData).Steps}, no more preparation.");
|
||||
return null;
|
||||
|
||||
@@ -30,8 +30,6 @@ namespace GitHub.Runner.Worker
|
||||
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
|
||||
|
||||
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
|
||||
|
||||
void SetAllCompositeOutputs(IExecutionContext parentExecutionContext, DictionaryContextData actionOutputs);
|
||||
}
|
||||
|
||||
public sealed class ActionManifestManager : RunnerService, IActionManifestManager
|
||||
@@ -57,7 +55,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
|
||||
{
|
||||
var templateContext = CreateContext(executionContext);
|
||||
var templateContext = CreateTemplateContext(executionContext);
|
||||
ActionDefinitionData actionDefinition = new ActionDefinitionData();
|
||||
|
||||
// Clean up file name real quick
|
||||
@@ -79,9 +77,9 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// Add this file to the FileTable in executionContext if it hasn't been added already
|
||||
// we use > since fileID is 1 indexed
|
||||
if (fileId > executionContext.FileTable.Count)
|
||||
if (fileId > executionContext.Global.FileTable.Count)
|
||||
{
|
||||
executionContext.FileTable.Add(fileRelativePath);
|
||||
executionContext.Global.FileTable.Add(fileRelativePath);
|
||||
}
|
||||
|
||||
// Read the file
|
||||
@@ -107,12 +105,7 @@ namespace GitHub.Runner.Worker
|
||||
break;
|
||||
|
||||
case "outputs":
|
||||
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
|
||||
{
|
||||
actionOutputs = actionPair.Value.AssertMapping("outputs");
|
||||
break;
|
||||
}
|
||||
Trace.Info($"Ignore action property outputs. Outputs for a whole action is not supported yet.");
|
||||
actionOutputs = actionPair.Value.AssertMapping("outputs");
|
||||
break;
|
||||
|
||||
case "description":
|
||||
@@ -120,7 +113,7 @@ namespace GitHub.Runner.Worker
|
||||
break;
|
||||
|
||||
case "inputs":
|
||||
ConvertInputs(templateContext, actionPair.Value, actionDefinition);
|
||||
ConvertInputs(actionPair.Value, actionDefinition);
|
||||
break;
|
||||
|
||||
case "runs":
|
||||
@@ -137,7 +130,7 @@ namespace GitHub.Runner.Worker
|
||||
// Evaluate Runs Last
|
||||
if (actionRunValueToken != null)
|
||||
{
|
||||
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionRunValueToken, actionOutputs);
|
||||
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionRunValueToken, fileRelativePath, actionOutputs);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -170,34 +163,6 @@ namespace GitHub.Runner.Worker
|
||||
return actionDefinition;
|
||||
}
|
||||
|
||||
public void SetAllCompositeOutputs(
|
||||
IExecutionContext parentExecutionContext,
|
||||
DictionaryContextData actionOutputs)
|
||||
{
|
||||
// Each pair is structured like this
|
||||
// We ignore "description" for now
|
||||
// {
|
||||
// "the-output-name": {
|
||||
// "description": "",
|
||||
// "value": "the value"
|
||||
// },
|
||||
// ...
|
||||
// }
|
||||
foreach (var pair in actionOutputs)
|
||||
{
|
||||
var outputsName = pair.Key;
|
||||
var outputsAttributes = pair.Value as DictionaryContextData;
|
||||
outputsAttributes.TryGetValue("value", out var val);
|
||||
var outputsValue = val as StringContextData;
|
||||
|
||||
// Set output in the whole composite scope.
|
||||
if (!String.IsNullOrEmpty(outputsName) && !String.IsNullOrEmpty(outputsValue))
|
||||
{
|
||||
parentExecutionContext.SetOutput(outputsName, outputsValue, out _);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public DictionaryContextData EvaluateCompositeOutputs(
|
||||
IExecutionContext executionContext,
|
||||
TemplateToken token,
|
||||
@@ -207,19 +172,19 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
if (token != null)
|
||||
{
|
||||
var context = CreateContext(executionContext, extraExpressionValues);
|
||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||
try
|
||||
{
|
||||
token = TemplateEvaluator.Evaluate(context, "outputs", token, 0, null, omitHeader: true);
|
||||
context.Errors.Check();
|
||||
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null, omitHeader: true);
|
||||
templateContext.Errors.Check();
|
||||
result = token.ToContextData().AssertDictionary("composite outputs");
|
||||
}
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
context.Errors.Add(ex);
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
context.Errors.Check();
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result ?? new DictionaryContextData();
|
||||
@@ -234,11 +199,11 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
if (token != null)
|
||||
{
|
||||
var context = CreateContext(executionContext, extraExpressionValues);
|
||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||
try
|
||||
{
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-args", token, 0, null, omitHeader: true);
|
||||
context.Errors.Check();
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null, omitHeader: true);
|
||||
templateContext.Errors.Check();
|
||||
|
||||
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||
|
||||
@@ -255,10 +220,10 @@ namespace GitHub.Runner.Worker
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
context.Errors.Add(ex);
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
context.Errors.Check();
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -273,11 +238,11 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
if (token != null)
|
||||
{
|
||||
var context = CreateContext(executionContext, extraExpressionValues);
|
||||
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
|
||||
try
|
||||
{
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-env", token, 0, null, omitHeader: true);
|
||||
context.Errors.Check();
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null, omitHeader: true);
|
||||
templateContext.Errors.Check();
|
||||
|
||||
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||
|
||||
@@ -299,10 +264,10 @@ namespace GitHub.Runner.Worker
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
context.Errors.Add(ex);
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
context.Errors.Check();
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -316,11 +281,11 @@ namespace GitHub.Runner.Worker
|
||||
string result = "";
|
||||
if (token != null)
|
||||
{
|
||||
var context = CreateContext(executionContext);
|
||||
var templateContext = CreateTemplateContext(executionContext);
|
||||
try
|
||||
{
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(context, "input-default-context", token, 0, null, omitHeader: true);
|
||||
context.Errors.Check();
|
||||
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null, omitHeader: true);
|
||||
templateContext.Errors.Check();
|
||||
|
||||
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
|
||||
|
||||
@@ -330,23 +295,23 @@ namespace GitHub.Runner.Worker
|
||||
catch (Exception ex) when (!(ex is TemplateValidationException))
|
||||
{
|
||||
Trace.Error(ex);
|
||||
context.Errors.Add(ex);
|
||||
templateContext.Errors.Add(ex);
|
||||
}
|
||||
|
||||
context.Errors.Check();
|
||||
templateContext.Errors.Check();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private TemplateContext CreateContext(
|
||||
private TemplateContext CreateTemplateContext(
|
||||
IExecutionContext executionContext,
|
||||
IDictionary<string, PipelineContextData> extraExpressionValues = null)
|
||||
{
|
||||
var result = new TemplateContext
|
||||
{
|
||||
CancellationToken = CancellationToken.None,
|
||||
Errors = new TemplateValidationErrors(10, 500),
|
||||
Errors = new TemplateValidationErrors(10, int.MaxValue), // Don't truncate error messages otherwise we might not scrub secrets correctly
|
||||
Memory = new TemplateMemory(
|
||||
maxDepth: 100,
|
||||
maxEvents: 1000000,
|
||||
@@ -377,9 +342,9 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
// Add the file table from the Execution Context
|
||||
for (var i = 0; i < executionContext.FileTable.Count; i++)
|
||||
for (var i = 0; i < executionContext.Global.FileTable.Count; i++)
|
||||
{
|
||||
result.GetFileId(executionContext.FileTable[i]);
|
||||
result.GetFileId(executionContext.Global.FileTable[i]);
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -387,8 +352,9 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
private ActionExecutionData ConvertRuns(
|
||||
IExecutionContext executionContext,
|
||||
TemplateContext context,
|
||||
TemplateContext templateContext,
|
||||
TemplateToken inputsToken,
|
||||
String fileRelativePath,
|
||||
MappingToken outputs = null)
|
||||
{
|
||||
var runsMapping = inputsToken.AssertMapping("runs");
|
||||
@@ -405,7 +371,7 @@ namespace GitHub.Runner.Worker
|
||||
var postToken = default(StringToken);
|
||||
var postEntrypointToken = default(StringToken);
|
||||
var postIfToken = default(StringToken);
|
||||
var stepsLoaded = default(List<Pipelines.ActionStep>);
|
||||
var steps = default(List<Pipelines.Step>);
|
||||
|
||||
foreach (var run in runsMapping)
|
||||
{
|
||||
@@ -452,14 +418,10 @@ namespace GitHub.Runner.Worker
|
||||
preIfToken = run.Value.AssertString("pre-if");
|
||||
break;
|
||||
case "steps":
|
||||
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
|
||||
{
|
||||
var steps = run.Value.AssertSequence("steps");
|
||||
var evaluator = executionContext.ToPipelineTemplateEvaluator();
|
||||
stepsLoaded = evaluator.LoadCompositeSteps(steps);
|
||||
break;
|
||||
}
|
||||
throw new Exception("You aren't supposed to be using Composite Actions yet!");
|
||||
var stepsToken = run.Value.AssertSequence("steps");
|
||||
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
|
||||
templateContext.Errors.Check();
|
||||
break;
|
||||
default:
|
||||
Trace.Info($"Ignore run property {runsKey}.");
|
||||
break;
|
||||
@@ -472,7 +434,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
if (string.IsNullOrEmpty(imageToken?.Value))
|
||||
{
|
||||
throw new ArgumentNullException($"Image is not provided.");
|
||||
throw new ArgumentNullException($"You are using a Container Action but an image is not provided in {fileRelativePath}.");
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -493,7 +455,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
if (string.IsNullOrEmpty(mainToken?.Value))
|
||||
{
|
||||
throw new ArgumentNullException($"Entry javascript file is not provided.");
|
||||
throw new ArgumentNullException($"You are using a JavaScript Action but there is not an entry JavaScript file provided in {fileRelativePath}.");
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -507,18 +469,17 @@ namespace GitHub.Runner.Worker
|
||||
};
|
||||
}
|
||||
}
|
||||
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase) && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
|
||||
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (stepsLoaded == null)
|
||||
if (steps == null)
|
||||
{
|
||||
// TODO: Add a more helpful error message + including file name, etc. to show user that it's because of their yaml file
|
||||
throw new ArgumentNullException($"No steps provided.");
|
||||
throw new ArgumentNullException($"You are using a composite action but there are no steps provided in {fileRelativePath}.");
|
||||
}
|
||||
else
|
||||
{
|
||||
return new CompositeActionExecutionData()
|
||||
{
|
||||
Steps = stepsLoaded,
|
||||
Steps = steps.Cast<Pipelines.ActionStep>().ToList(),
|
||||
Outputs = outputs
|
||||
};
|
||||
}
|
||||
@@ -540,7 +501,6 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
private void ConvertInputs(
|
||||
TemplateContext context,
|
||||
TemplateToken inputsToken,
|
||||
ActionDefinitionData actionDefinition)
|
||||
{
|
||||
|
||||
@@ -135,16 +135,33 @@ namespace GitHub.Runner.Worker
|
||||
ExecutionContext.SetGitHubContext("event_path", workflowFile);
|
||||
}
|
||||
|
||||
// Set GITHUB_ACTION_REPOSITORY if this Action is from a repository
|
||||
if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction &&
|
||||
!string.Equals(repoPathReferenceAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
ExecutionContext.SetGitHubContext("action_repository", repoPathReferenceAction.Name);
|
||||
ExecutionContext.SetGitHubContext("action_ref", repoPathReferenceAction.Ref);
|
||||
}
|
||||
else
|
||||
{
|
||||
ExecutionContext.SetGitHubContext("action_repository", null);
|
||||
ExecutionContext.SetGitHubContext("action_ref", null);
|
||||
}
|
||||
|
||||
// Setup container stephost for running inside the container.
|
||||
if (ExecutionContext.Container != null)
|
||||
if (ExecutionContext.Global.Container != null)
|
||||
{
|
||||
// Make sure required container is already created.
|
||||
ArgUtil.NotNullOrEmpty(ExecutionContext.Container.ContainerId, nameof(ExecutionContext.Container.ContainerId));
|
||||
ArgUtil.NotNullOrEmpty(ExecutionContext.Global.Container.ContainerId, nameof(ExecutionContext.Global.Container.ContainerId));
|
||||
var containerStepHost = HostContext.CreateService<IContainerStepHost>();
|
||||
containerStepHost.Container = ExecutionContext.Container;
|
||||
containerStepHost.Container = ExecutionContext.Global.Container;
|
||||
stepHost = containerStepHost;
|
||||
}
|
||||
|
||||
// Setup File Command Manager
|
||||
var fileCommandManager = HostContext.CreateService<IFileCommandManager>();
|
||||
fileCommandManager.InitializeFiles(ExecutionContext, null);
|
||||
|
||||
// Load the inputs.
|
||||
ExecutionContext.Debug("Loading inputs");
|
||||
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
|
||||
@@ -231,14 +248,22 @@ namespace GitHub.Runner.Worker
|
||||
handlerData,
|
||||
inputs,
|
||||
environment,
|
||||
ExecutionContext.Variables,
|
||||
ExecutionContext.Global.Variables,
|
||||
actionDirectory: definition.Directory);
|
||||
|
||||
// Print out action details
|
||||
handler.PrintActionDetails(Stage);
|
||||
|
||||
// Run the task.
|
||||
await handler.RunAsync(Stage);
|
||||
try
|
||||
{
|
||||
await handler.RunAsync(Stage);
|
||||
}
|
||||
finally
|
||||
{
|
||||
fileCommandManager.ProcessFiles(ExecutionContext, ExecutionContext.Global.Container);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context)
|
||||
|
||||
@@ -21,6 +21,11 @@ namespace GitHub.Runner.Worker.Container
|
||||
{
|
||||
}
|
||||
|
||||
public ContainerInfo(IHostContext hostContext)
|
||||
{
|
||||
UpdateWebProxyEnv(hostContext.WebProxy);
|
||||
}
|
||||
|
||||
public ContainerInfo(IHostContext hostContext, Pipelines.JobContainer container, bool isJobContainer = true, string networkAlias = null)
|
||||
{
|
||||
this.ContainerName = container.Alias;
|
||||
@@ -34,6 +39,9 @@ namespace GitHub.Runner.Worker.Container
|
||||
_environmentVariables = container.Environment;
|
||||
this.IsJobContainer = isJobContainer;
|
||||
this.ContainerNetworkAlias = networkAlias;
|
||||
this.RegistryAuthUsername = container.Credentials?.Username;
|
||||
this.RegistryAuthPassword = container.Credentials?.Password;
|
||||
this.RegistryServer = DockerUtil.ParseRegistryHostnameFromImageName(this.ContainerImage);
|
||||
|
||||
#if OS_WINDOWS
|
||||
_pathMappings.Add(new PathMapping(hostContext.GetDirectory(WellKnownDirectory.Work), "C:\\__w"));
|
||||
@@ -79,6 +87,9 @@ namespace GitHub.Runner.Worker.Container
|
||||
public string ContainerWorkDirectory { get; set; }
|
||||
public string ContainerCreateOptions { get; private set; }
|
||||
public string ContainerRuntimePath { get; set; }
|
||||
public string RegistryServer { get; set; }
|
||||
public string RegistryAuthUsername { get; set; }
|
||||
public string RegistryAuthPassword { get; set; }
|
||||
public bool IsJobContainer { get; set; }
|
||||
|
||||
public IDictionary<string, string> ContainerEnvironmentVariables
|
||||
|
||||
@@ -4,6 +4,7 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading;
|
||||
using System.Threading.Channels;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
@@ -17,6 +18,7 @@ namespace GitHub.Runner.Worker.Container
|
||||
string DockerInstanceLabel { get; }
|
||||
Task<DockerVersion> DockerVersion(IExecutionContext context);
|
||||
Task<int> DockerPull(IExecutionContext context, string image);
|
||||
Task<int> DockerPull(IExecutionContext context, string image, string configFileDirectory);
|
||||
Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag);
|
||||
Task<string> DockerCreate(IExecutionContext context, ContainerInfo container);
|
||||
Task<int> DockerRun(IExecutionContext context, ContainerInfo container, EventHandler<ProcessDataReceivedEventArgs> stdoutDataReceived, EventHandler<ProcessDataReceivedEventArgs> stderrDataReceived);
|
||||
@@ -31,6 +33,7 @@ namespace GitHub.Runner.Worker.Container
|
||||
Task<int> DockerExec(IExecutionContext context, string containerId, string options, string command, List<string> outputs);
|
||||
Task<List<string>> DockerInspect(IExecutionContext context, string dockerObject, string options);
|
||||
Task<List<PortMapping>> DockerPort(IExecutionContext context, string containerId);
|
||||
Task<int> DockerLogin(IExecutionContext context, string configFileDirectory, string registry, string username, string password);
|
||||
}
|
||||
|
||||
public class DockerCommandManager : RunnerService, IDockerCommandManager
|
||||
@@ -82,9 +85,18 @@ namespace GitHub.Runner.Worker.Container
|
||||
return new DockerVersion(serverVersion, clientVersion);
|
||||
}
|
||||
|
||||
public async Task<int> DockerPull(IExecutionContext context, string image)
|
||||
public Task<int> DockerPull(IExecutionContext context, string image)
|
||||
{
|
||||
return await ExecuteDockerCommandAsync(context, "pull", image, context.CancellationToken);
|
||||
return DockerPull(context, image, null);
|
||||
}
|
||||
|
||||
public async Task<int> DockerPull(IExecutionContext context, string image, string configFileDirectory)
|
||||
{
|
||||
if (string.IsNullOrEmpty(configFileDirectory))
|
||||
{
|
||||
return await ExecuteDockerCommandAsync(context, $"pull", image, context.CancellationToken);
|
||||
}
|
||||
return await ExecuteDockerCommandAsync(context, $"--config {configFileDirectory} pull", image, context.CancellationToken);
|
||||
}
|
||||
|
||||
public async Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag)
|
||||
@@ -346,6 +358,28 @@ namespace GitHub.Runner.Worker.Container
|
||||
return DockerUtil.ParseDockerPort(portMappingLines);
|
||||
}
|
||||
|
||||
public Task<int> DockerLogin(IExecutionContext context, string configFileDirectory, string registry, string username, string password)
|
||||
{
|
||||
string args = $"--config {configFileDirectory} login {registry} -u {username} --password-stdin";
|
||||
context.Command($"{DockerPath} {args}");
|
||||
|
||||
var input = Channel.CreateBounded<string>(new BoundedChannelOptions(1) { SingleReader = true, SingleWriter = true });
|
||||
input.Writer.TryWrite(password);
|
||||
|
||||
var processInvoker = HostContext.CreateService<IProcessInvoker>();
|
||||
|
||||
return processInvoker.ExecuteAsync(
|
||||
workingDirectory: context.GetGitHubContext("workspace"),
|
||||
fileName: DockerPath,
|
||||
arguments: args,
|
||||
environment: null,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: false,
|
||||
redirectStandardIn: input,
|
||||
cancellationToken: context.CancellationToken);
|
||||
}
|
||||
|
||||
private Task<int> ExecuteDockerCommandAsync(IExecutionContext context, string command, string options, CancellationToken cancellationToken = default(CancellationToken))
|
||||
{
|
||||
return ExecuteDockerCommandAsync(context, command, options, null, cancellationToken);
|
||||
|
||||
@@ -45,5 +45,21 @@ namespace GitHub.Runner.Worker.Container
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
public static string ParseRegistryHostnameFromImageName(string name)
|
||||
{
|
||||
var nameSplit = name.Split('/');
|
||||
// Single slash is implictly from Dockerhub, unless first part has .tld or :port
|
||||
if (nameSplit.Length == 2 && (nameSplit[0].Contains(":") || nameSplit[0].Contains(".")))
|
||||
{
|
||||
return nameSplit[0];
|
||||
}
|
||||
// All other non Dockerhub registries
|
||||
else if (nameSplit.Length > 2)
|
||||
{
|
||||
return nameSplit[0];
|
||||
}
|
||||
return "";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -91,7 +91,10 @@ namespace GitHub.Runner.Worker
|
||||
#endif
|
||||
|
||||
// Check docker client/server version
|
||||
executionContext.Output("##[group]Checking docker version");
|
||||
DockerVersion dockerVersion = await _dockerManger.DockerVersion(executionContext);
|
||||
executionContext.Output("##[endgroup]");
|
||||
|
||||
ArgUtil.NotNull(dockerVersion.ServerVersion, nameof(dockerVersion.ServerVersion));
|
||||
ArgUtil.NotNull(dockerVersion.ClientVersion, nameof(dockerVersion.ClientVersion));
|
||||
|
||||
@@ -111,7 +114,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
// Clean up containers left by previous runs
|
||||
executionContext.Debug($"Delete stale containers from previous jobs");
|
||||
executionContext.Output("##[group]Clean up resources from previous jobs");
|
||||
var staleContainers = await _dockerManger.DockerPS(executionContext, $"--all --quiet --no-trunc --filter \"label={_dockerManger.DockerInstanceLabel}\"");
|
||||
foreach (var staleContainer in staleContainers)
|
||||
{
|
||||
@@ -122,18 +125,20 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
executionContext.Debug($"Delete stale container networks from previous jobs");
|
||||
int networkPruneExitCode = await _dockerManger.DockerNetworkPrune(executionContext);
|
||||
if (networkPruneExitCode != 0)
|
||||
{
|
||||
executionContext.Warning($"Delete stale container networks failed, docker network prune fail with exit code {networkPruneExitCode}");
|
||||
}
|
||||
executionContext.Output("##[endgroup]");
|
||||
|
||||
// Create local docker network for this job to avoid port conflict when multiple runners run on same machine.
|
||||
// All containers within a job join the same network
|
||||
executionContext.Output("##[group]Create local container network");
|
||||
var containerNetwork = $"github_network_{Guid.NewGuid().ToString("N")}";
|
||||
await CreateContainerNetworkAsync(executionContext, containerNetwork);
|
||||
executionContext.JobContext.Container["network"] = new StringContextData(containerNetwork);
|
||||
executionContext.Output("##[endgroup]");
|
||||
|
||||
foreach (var container in containers)
|
||||
{
|
||||
@@ -141,10 +146,12 @@ namespace GitHub.Runner.Worker
|
||||
await StartContainerAsync(executionContext, container);
|
||||
}
|
||||
|
||||
executionContext.Output("##[group]Waiting for all services to be ready");
|
||||
foreach (var container in containers.Where(c => !c.IsJobContainer))
|
||||
{
|
||||
await ContainerHealthcheck(executionContext, container);
|
||||
}
|
||||
executionContext.Output("##[endgroup]");
|
||||
}
|
||||
|
||||
public async Task StopContainersAsync(IExecutionContext executionContext, object data)
|
||||
@@ -173,6 +180,10 @@ namespace GitHub.Runner.Worker
|
||||
Trace.Info($"Container name: {container.ContainerName}");
|
||||
Trace.Info($"Container image: {container.ContainerImage}");
|
||||
Trace.Info($"Container options: {container.ContainerCreateOptions}");
|
||||
|
||||
var groupName = container.IsJobContainer ? "Starting job container" : $"Starting {container.ContainerNetworkAlias} service container";
|
||||
executionContext.Output($"##[group]{groupName}");
|
||||
|
||||
foreach (var port in container.UserPortMappings)
|
||||
{
|
||||
Trace.Info($"User provided port: {port.Value}");
|
||||
@@ -187,12 +198,17 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
UpdateRegistryAuthForGitHubToken(executionContext, container);
|
||||
|
||||
// Before pulling, generate client authentication if required
|
||||
var configLocation = await ContainerRegistryLogin(executionContext, container);
|
||||
|
||||
// Pull down docker image with retry up to 3 times
|
||||
int retryCount = 0;
|
||||
int pullExitCode = 0;
|
||||
while (retryCount < 3)
|
||||
{
|
||||
pullExitCode = await _dockerManger.DockerPull(executionContext, container.ContainerImage);
|
||||
pullExitCode = await _dockerManger.DockerPull(executionContext, container.ContainerImage, configLocation);
|
||||
if (pullExitCode == 0)
|
||||
{
|
||||
break;
|
||||
@@ -209,6 +225,9 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// Remove credentials after pulling
|
||||
ContainerRegistryLogout(configLocation);
|
||||
|
||||
if (retryCount == 3 && pullExitCode != 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Docker pull failed with exit code {pullExitCode}");
|
||||
@@ -304,6 +323,7 @@ namespace GitHub.Runner.Worker
|
||||
container.ContainerRuntimePath = DockerUtil.ParsePathFromConfigEnv(containerEnv);
|
||||
executionContext.JobContext.Container["id"] = new StringContextData(container.ContainerId);
|
||||
}
|
||||
executionContext.Output("##[endgroup]");
|
||||
}
|
||||
|
||||
private async Task StopContainerAsync(IExecutionContext executionContext, ContainerInfo container)
|
||||
@@ -425,5 +445,66 @@ namespace GitHub.Runner.Worker
|
||||
throw new InvalidOperationException($"Failed to initialize, {container.ContainerNetworkAlias} service is {serviceHealth}.");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<string> ContainerRegistryLogin(IExecutionContext executionContext, ContainerInfo container)
|
||||
{
|
||||
if (string.IsNullOrEmpty(container.RegistryAuthUsername) || string.IsNullOrEmpty(container.RegistryAuthPassword))
|
||||
{
|
||||
// No valid client config can be generated
|
||||
return "";
|
||||
}
|
||||
var configLocation = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Temp), $".docker_{Guid.NewGuid()}");
|
||||
try
|
||||
{
|
||||
var dirInfo = Directory.CreateDirectory(configLocation);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to create directory to store registry client credentials: {e.Message}");
|
||||
}
|
||||
var loginExitCode = await _dockerManger.DockerLogin(
|
||||
executionContext,
|
||||
configLocation,
|
||||
container.RegistryServer,
|
||||
container.RegistryAuthUsername,
|
||||
container.RegistryAuthPassword);
|
||||
|
||||
if (loginExitCode != 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Docker login for '{container.RegistryServer}' failed with exit code {loginExitCode}");
|
||||
}
|
||||
return configLocation;
|
||||
}
|
||||
|
||||
private void ContainerRegistryLogout(string configLocation)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!string.IsNullOrEmpty(configLocation) && Directory.Exists(configLocation))
|
||||
{
|
||||
Directory.Delete(configLocation, recursive: true);
|
||||
}
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to remove directory containing Docker client credentials: {e.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
private void UpdateRegistryAuthForGitHubToken(IExecutionContext executionContext, ContainerInfo container)
|
||||
{
|
||||
var registryIsTokenCompatible = container.RegistryServer.Equals("ghcr.io", StringComparison.OrdinalIgnoreCase) || container.RegistryServer.Equals("containers.pkg.github.com", StringComparison.OrdinalIgnoreCase);
|
||||
if (!registryIsTokenCompatible)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var registryCredentialsNotSupplied = string.IsNullOrEmpty(container.RegistryAuthUsername) && string.IsNullOrEmpty(container.RegistryAuthPassword);
|
||||
if (registryCredentialsNotSupplied)
|
||||
{
|
||||
container.RegistryAuthUsername = executionContext.GetGitHubContext("actor");
|
||||
container.RegistryAuthPassword = executionContext.GetGitHubContext("token");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,9 +86,9 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
executionContext.Debug("Zipping diagnostic files.");
|
||||
|
||||
string buildNumber = executionContext.Variables.Build_Number ?? "UnknownBuildNumber";
|
||||
string buildNumber = executionContext.Global.Variables.Build_Number ?? "UnknownBuildNumber";
|
||||
string buildName = $"Build {buildNumber}";
|
||||
string phaseName = executionContext.Variables.System_PhaseDisplayName ?? "UnknownPhaseName";
|
||||
string phaseName = executionContext.Global.Variables.System_PhaseDisplayName ?? "UnknownPhaseName";
|
||||
|
||||
// zip the files
|
||||
string diagnosticsZipFileName = $"{buildName}-{phaseName}.zip";
|
||||
|
||||
@@ -44,41 +44,33 @@ namespace GitHub.Runner.Worker
|
||||
string ResultCode { get; set; }
|
||||
TaskResult? CommandResult { get; set; }
|
||||
CancellationToken CancellationToken { get; }
|
||||
List<ServiceEndpoint> Endpoints { get; }
|
||||
TaskOrchestrationPlanReference Plan { get; }
|
||||
GlobalContext Global { get; }
|
||||
|
||||
PlanFeatures Features { get; }
|
||||
Variables Variables { get; }
|
||||
Dictionary<string, string> IntraActionState { get; }
|
||||
IDictionary<String, IDictionary<String, String>> JobDefaults { get; }
|
||||
Dictionary<string, VariableValue> JobOutputs { get; }
|
||||
IDictionary<String, String> EnvironmentVariables { get; }
|
||||
IList<String> FileTable { get; }
|
||||
StepsContext StepsContext { get; }
|
||||
ActionsEnvironmentReference ActionsEnvironment { get; }
|
||||
DictionaryContextData ExpressionValues { get; }
|
||||
IList<IFunctionInfo> ExpressionFunctions { get; }
|
||||
List<string> PrependPath { get; }
|
||||
ContainerInfo Container { get; set; }
|
||||
List<ContainerInfo> ServiceContainers { get; }
|
||||
JobContext JobContext { get; }
|
||||
|
||||
// Only job level ExecutionContext has JobSteps
|
||||
List<IStep> JobSteps { get; }
|
||||
Queue<IStep> JobSteps { get; }
|
||||
|
||||
// Only job level ExecutionContext has PostJobSteps
|
||||
Stack<IStep> PostJobSteps { get; }
|
||||
|
||||
bool EchoOnActionCommand { get; set; }
|
||||
|
||||
IExecutionContext FinalizeContext { get; set; }
|
||||
bool InsideComposite { get; }
|
||||
|
||||
ExecutionContext Root { get; }
|
||||
|
||||
// Initialize
|
||||
void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token);
|
||||
void CancelToken();
|
||||
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null);
|
||||
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool insideComposite = false, CancellationTokenSource cancellationTokenSource = null);
|
||||
|
||||
// logging
|
||||
bool WriteDebug { get; }
|
||||
long Write(string tag, string message);
|
||||
void QueueAttachFile(string type, string name, string filePath);
|
||||
|
||||
@@ -107,7 +99,7 @@ namespace GitHub.Runner.Worker
|
||||
// others
|
||||
void ForceTaskComplete();
|
||||
void RegisterPostJobStep(IStep step);
|
||||
IStep RegisterNestedStep(IActionRunner step, DictionaryContextData inputsData, int location, Dictionary<string, string> envData, bool cleanUp = false);
|
||||
IStep CreateCompositeStep(string scopeName, IActionRunner step, DictionaryContextData inputsData, Dictionary<string, string> envData);
|
||||
}
|
||||
|
||||
public sealed class ExecutionContext : RunnerService, IExecutionContext
|
||||
@@ -122,9 +114,6 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
private event OnMatcherChanged _onMatcherChanged;
|
||||
|
||||
// Regex used for checking if ScopeName meets the condition that shows that its id is null.
|
||||
private readonly static Regex _generatedContextNamePattern = new Regex("^__[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
|
||||
|
||||
private IssueMatcherConfig[] _matchers;
|
||||
|
||||
private IPagingLogger _logger;
|
||||
@@ -147,24 +136,18 @@ namespace GitHub.Runner.Worker
|
||||
public string ContextName { get; private set; }
|
||||
public Task ForceCompleted => _forceCompleted.Task;
|
||||
public CancellationToken CancellationToken => _cancellationTokenSource.Token;
|
||||
public List<ServiceEndpoint> Endpoints { get; private set; }
|
||||
public TaskOrchestrationPlanReference Plan { get; private set; }
|
||||
public Variables Variables { get; private set; }
|
||||
public Dictionary<string, string> IntraActionState { get; private set; }
|
||||
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; private set; }
|
||||
public Dictionary<string, VariableValue> JobOutputs { get; private set; }
|
||||
public IDictionary<String, String> EnvironmentVariables { get; private set; }
|
||||
public IList<String> FileTable { get; private set; }
|
||||
public StepsContext StepsContext { get; private set; }
|
||||
|
||||
public ActionsEnvironmentReference ActionsEnvironment { get; private set; }
|
||||
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
|
||||
public IList<IFunctionInfo> ExpressionFunctions { get; } = new List<IFunctionInfo>();
|
||||
public bool WriteDebug { get; private set; }
|
||||
public List<string> PrependPath { get; private set; }
|
||||
public ContainerInfo Container { get; set; }
|
||||
public List<ContainerInfo> ServiceContainers { get; private set; }
|
||||
|
||||
// Shared pointer across job-level execution context and step-level execution contexts
|
||||
public GlobalContext Global { get; private set; }
|
||||
|
||||
// Only job level ExecutionContext has JobSteps
|
||||
public List<IStep> JobSteps { get; private set; }
|
||||
public Queue<IStep> JobSteps { get; private set; }
|
||||
|
||||
// Only job level ExecutionContext has PostJobSteps
|
||||
public Stack<IStep> PostJobSteps { get; private set; }
|
||||
@@ -174,7 +157,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public bool EchoOnActionCommand { get; set; }
|
||||
|
||||
public IExecutionContext FinalizeContext { get; set; }
|
||||
public bool InsideComposite { get; private set; }
|
||||
|
||||
public TaskResult? Result
|
||||
{
|
||||
@@ -206,9 +189,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
public PlanFeatures Features { get; private set; }
|
||||
|
||||
private ExecutionContext Root
|
||||
public ExecutionContext Root
|
||||
{
|
||||
get
|
||||
{
|
||||
@@ -274,34 +255,17 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
/// <summary>
|
||||
/// Helper function used in CompositeActionHandler::RunAsync to
|
||||
/// add a child node, aka a step, to the current job to the Root.JobSteps based on the location.
|
||||
/// add a child node, aka a step, to the current job to the Root.JobSteps based on the location.
|
||||
/// </summary>
|
||||
public IStep RegisterNestedStep(
|
||||
public IStep CreateCompositeStep(
|
||||
string scopeName,
|
||||
IActionRunner step,
|
||||
DictionaryContextData inputsData,
|
||||
int location,
|
||||
Dictionary<string, string> envData,
|
||||
bool cleanUp = false)
|
||||
Dictionary<string, string> envData)
|
||||
{
|
||||
// If the context name is empty and the scope name is empty, we would generate a unique scope name for this child in the following format:
|
||||
// "__<GUID>"
|
||||
var safeContextName = !string.IsNullOrEmpty(ContextName) ? ContextName : $"__{Guid.NewGuid()}";
|
||||
|
||||
// Set Scope Name. Note, for our design, we consider each step in a composite action to have the same scope
|
||||
// This makes it much simpler to handle their outputs at the end of the Composite Action
|
||||
var childScopeName = !string.IsNullOrEmpty(ScopeName) ? $"{ScopeName}.{safeContextName}" : safeContextName;
|
||||
|
||||
var childContextName = !string.IsNullOrEmpty(step.Action.ContextName) ? step.Action.ContextName : $"__{Guid.NewGuid()}";
|
||||
|
||||
step.ExecutionContext = Root.CreateChild(_record.Id, step.DisplayName, _record.Id.ToString("N"), childScopeName, childContextName, logger: _logger);
|
||||
|
||||
step.ExecutionContext = Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, step.Action.ContextName, logger: _logger, insideComposite: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token));
|
||||
step.ExecutionContext.ExpressionValues["inputs"] = inputsData;
|
||||
|
||||
// Set Parent Attribute for Clean Up Step
|
||||
if (cleanUp)
|
||||
{
|
||||
step.ExecutionContext.FinalizeContext = this;
|
||||
}
|
||||
step.ExecutionContext.ExpressionValues["steps"] = Global.StepsContext.GetScope(step.ExecutionContext.GetFullyQualifiedContextName());
|
||||
|
||||
// Add the composite action environment variables to each step.
|
||||
#if OS_WINDOWS
|
||||
@@ -315,23 +279,18 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
step.ExecutionContext.ExpressionValues["env"] = envContext;
|
||||
|
||||
Root.JobSteps.Insert(location, step);
|
||||
|
||||
return step;
|
||||
}
|
||||
|
||||
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null)
|
||||
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool insideComposite = false, CancellationTokenSource cancellationTokenSource = null)
|
||||
{
|
||||
Trace.Entering();
|
||||
|
||||
var child = new ExecutionContext();
|
||||
child.Initialize(HostContext);
|
||||
child.Global = Global;
|
||||
child.ScopeName = scopeName;
|
||||
child.ContextName = contextName;
|
||||
child.Features = Features;
|
||||
child.Variables = Variables;
|
||||
child.Endpoints = Endpoints;
|
||||
child.Plan = Plan;
|
||||
if (intraActionState == null)
|
||||
{
|
||||
child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
@@ -340,10 +299,6 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
child.IntraActionState = intraActionState;
|
||||
}
|
||||
child.EnvironmentVariables = EnvironmentVariables;
|
||||
child.JobDefaults = JobDefaults;
|
||||
child.FileTable = FileTable;
|
||||
child.StepsContext = StepsContext;
|
||||
foreach (var pair in ExpressionValues)
|
||||
{
|
||||
child.ExpressionValues[pair.Key] = pair.Value;
|
||||
@@ -352,12 +307,8 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
child.ExpressionFunctions.Add(item);
|
||||
}
|
||||
child._cancellationTokenSource = new CancellationTokenSource();
|
||||
child.WriteDebug = WriteDebug;
|
||||
child._cancellationTokenSource = cancellationTokenSource ?? new CancellationTokenSource();
|
||||
child._parentExecutionContext = this;
|
||||
child.PrependPath = PrependPath;
|
||||
child.Container = Container;
|
||||
child.ServiceContainers = ServiceContainers;
|
||||
child.EchoOnActionCommand = EchoOnActionCommand;
|
||||
|
||||
if (recordOrder != null)
|
||||
@@ -378,6 +329,8 @@ namespace GitHub.Runner.Worker
|
||||
child._logger.Setup(_mainTimelineId, recordId);
|
||||
}
|
||||
|
||||
child.InsideComposite = insideComposite;
|
||||
|
||||
return child;
|
||||
}
|
||||
|
||||
@@ -428,16 +381,17 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
if (Root != this)
|
||||
{
|
||||
// only dispose TokenSource for step level ExecutionContext
|
||||
// only dispose TokenSource for step level ExecutionContext
|
||||
_cancellationTokenSource?.Dispose();
|
||||
}
|
||||
|
||||
_logger.End();
|
||||
|
||||
if (!string.IsNullOrEmpty(ContextName))
|
||||
// Skip if generated context name. Generated context names start with "__". After M271-ish the server will never send an empty context name.
|
||||
if (!string.IsNullOrEmpty(ContextName) && !ContextName.StartsWith("__", StringComparison.Ordinal))
|
||||
{
|
||||
StepsContext.SetOutcome(ScopeName, ContextName, (Outcome ?? Result ?? TaskResult.Succeeded).ToActionResult());
|
||||
StepsContext.SetConclusion(ScopeName, ContextName, (Result ?? TaskResult.Succeeded).ToActionResult());
|
||||
Global.StepsContext.SetOutcome(ScopeName, ContextName, (Outcome ?? Result ?? TaskResult.Succeeded).ToActionResult());
|
||||
Global.StepsContext.SetConclusion(ScopeName, ContextName, (Result ?? TaskResult.Succeeded).ToActionResult());
|
||||
}
|
||||
|
||||
return Result.Value;
|
||||
@@ -496,8 +450,8 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(name, nameof(name));
|
||||
|
||||
// if the ContextName follows the __GUID format which is set as the default value for ContextName if null for Composite Actions.
|
||||
if (String.IsNullOrEmpty(ContextName) || _generatedContextNamePattern.IsMatch(ContextName))
|
||||
// Skip if generated context name. Generated context names start with "__". After M271-ish the server will never send an empty context name.
|
||||
if (string.IsNullOrEmpty(ContextName) || ContextName.StartsWith("__", StringComparison.Ordinal))
|
||||
{
|
||||
reference = null;
|
||||
return;
|
||||
@@ -505,7 +459,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// todo: restrict multiline?
|
||||
|
||||
StepsContext.SetOutput(ScopeName, ContextName, name, value, out reference);
|
||||
Global.StepsContext.SetOutput(ScopeName, ContextName, name, value, out reference);
|
||||
}
|
||||
|
||||
public void SetTimeout(TimeSpan? timeout)
|
||||
@@ -639,33 +593,38 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
_cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
|
||||
|
||||
Global = new GlobalContext();
|
||||
|
||||
// Plan
|
||||
Plan = message.Plan;
|
||||
Features = PlanUtil.GetFeatures(message.Plan);
|
||||
Global.Plan = message.Plan;
|
||||
Global.Features = PlanUtil.GetFeatures(message.Plan);
|
||||
|
||||
// Endpoints
|
||||
Endpoints = message.Resources.Endpoints;
|
||||
Global.Endpoints = message.Resources.Endpoints;
|
||||
|
||||
// Variables
|
||||
Variables = new Variables(HostContext, message.Variables);
|
||||
Global.Variables = new Variables(HostContext, message.Variables);
|
||||
|
||||
// Environment variables shared across all actions
|
||||
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
|
||||
Global.EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
|
||||
|
||||
// Job defaults shared across all actions
|
||||
JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase);
|
||||
Global.JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Job Outputs
|
||||
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Actions environment
|
||||
ActionsEnvironment = message.ActionsEnvironment;
|
||||
|
||||
// Service container info
|
||||
ServiceContainers = new List<ContainerInfo>();
|
||||
Global.ServiceContainers = new List<ContainerInfo>();
|
||||
|
||||
// Steps context (StepsRunner manages adding the scoped steps context)
|
||||
StepsContext = new StepsContext();
|
||||
Global.StepsContext = new StepsContext();
|
||||
|
||||
// File table
|
||||
FileTable = new List<String>(message.FileTable ?? new string[0]);
|
||||
Global.FileTable = new List<String>(message.FileTable ?? new string[0]);
|
||||
|
||||
// Expression values
|
||||
if (message.ContextData?.Count > 0)
|
||||
@@ -676,15 +635,15 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
ExpressionValues["secrets"] = Variables.ToSecretsContext();
|
||||
ExpressionValues["secrets"] = Global.Variables.ToSecretsContext();
|
||||
ExpressionValues["runner"] = new RunnerContext();
|
||||
ExpressionValues["job"] = new JobContext();
|
||||
|
||||
Trace.Info("Initialize GitHub context");
|
||||
var githubAccessToken = new StringContextData(Variables.Get("system.github.token"));
|
||||
var githubAccessToken = new StringContextData(Global.Variables.Get("system.github.token"));
|
||||
var base64EncodedToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{githubAccessToken}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodedToken);
|
||||
var githubJob = Variables.Get("system.github.job");
|
||||
var githubJob = Global.Variables.Get("system.github.job");
|
||||
var githubContext = new GitHubContext();
|
||||
githubContext["token"] = githubAccessToken;
|
||||
if (!string.IsNullOrEmpty(githubJob))
|
||||
@@ -707,10 +666,10 @@ namespace GitHub.Runner.Worker
|
||||
#endif
|
||||
|
||||
// Prepend Path
|
||||
PrependPath = new List<string>();
|
||||
Global.PrependPath = new List<string>();
|
||||
|
||||
// JobSteps for job ExecutionContext
|
||||
JobSteps = new List<IStep>();
|
||||
JobSteps = new Queue<IStep>();
|
||||
|
||||
// PostJobSteps for job ExecutionContext
|
||||
PostJobSteps = new Stack<IStep>();
|
||||
@@ -733,10 +692,10 @@ namespace GitHub.Runner.Worker
|
||||
_logger.Setup(_mainTimelineId, _record.Id);
|
||||
|
||||
// Initialize 'echo on action command success' property, default to false, unless Step_Debug is set
|
||||
EchoOnActionCommand = Variables.Step_Debug ?? false;
|
||||
EchoOnActionCommand = Global.Variables.Step_Debug ?? false;
|
||||
|
||||
// Verbosity (from GitHub.Step_Debug).
|
||||
WriteDebug = Variables.Step_Debug ?? false;
|
||||
Global.WriteDebug = Global.Variables.Step_Debug ?? false;
|
||||
|
||||
// Hook up JobServerQueueThrottling event, we will log warning on server tarpit.
|
||||
_jobServerQueue.JobServerQueueThrottling += JobServerQueueThrottling_EventReceived;
|
||||
@@ -764,7 +723,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
_jobServerQueue.QueueWebConsoleLine(_record.Id, msg);
|
||||
_jobServerQueue.QueueWebConsoleLine(_record.Id, msg, totalLines);
|
||||
return totalLines;
|
||||
}
|
||||
|
||||
@@ -899,6 +858,10 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
_record.ParentId = parentTimelineRecordId;
|
||||
}
|
||||
else if (parentTimelineRecordId == null)
|
||||
{
|
||||
_record.AgentPlatform = VarUtil.OS;
|
||||
}
|
||||
|
||||
var configuration = HostContext.GetService<IConfigurationStore>();
|
||||
_record.WorkerName = configuration.GetSettings().AgentName;
|
||||
@@ -937,6 +900,16 @@ namespace GitHub.Runner.Worker
|
||||
// Otherwise individual overloads would need to be implemented (depending on the unit test).
|
||||
public static class ExecutionContextExtension
|
||||
{
|
||||
public static string GetFullyQualifiedContextName(this IExecutionContext context)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(context.ScopeName))
|
||||
{
|
||||
return $"{context.ScopeName}.{context.ContextName}";
|
||||
}
|
||||
|
||||
return context.ContextName;
|
||||
}
|
||||
|
||||
public static void Error(this IExecutionContext context, Exception ex)
|
||||
{
|
||||
context.Error(ex.Message);
|
||||
@@ -949,6 +922,12 @@ namespace GitHub.Runner.Worker
|
||||
context.AddIssue(new Issue() { Type = IssueType.Error, Message = message });
|
||||
}
|
||||
|
||||
// Do not add a format string overload. See comment on ExecutionContext.Write().
|
||||
public static void InfrastructureError(this IExecutionContext context, string message)
|
||||
{
|
||||
context.AddIssue(new Issue() { Type = IssueType.Error, Message = message, IsInfrastructureIssue = true});
|
||||
}
|
||||
|
||||
// Do not add a format string overload. See comment on ExecutionContext.Write().
|
||||
public static void Warning(this IExecutionContext context, string message)
|
||||
{
|
||||
@@ -975,7 +954,7 @@ namespace GitHub.Runner.Worker
|
||||
// Do not add a format string overload. See comment on ExecutionContext.Write().
|
||||
public static void Debug(this IExecutionContext context, string message)
|
||||
{
|
||||
if (context.WriteDebug)
|
||||
if (context.Global.WriteDebug)
|
||||
{
|
||||
var multilines = message?.Replace("\r\n", "\n")?.Split("\n");
|
||||
if (multilines != null)
|
||||
@@ -1000,7 +979,10 @@ namespace GitHub.Runner.Worker
|
||||
traceWriter = context.ToTemplateTraceWriter();
|
||||
}
|
||||
var schema = PipelineTemplateSchemaFactory.GetSchema();
|
||||
return new PipelineTemplateEvaluator(traceWriter, schema, context.FileTable);
|
||||
return new PipelineTemplateEvaluator(traceWriter, schema, context.Global.FileTable)
|
||||
{
|
||||
MaxErrorMessageLength = int.MaxValue, // Don't truncate error messages otherwise we might not scrub secrets correctly
|
||||
};
|
||||
}
|
||||
|
||||
public static ObjectTemplating.ITraceWriter ToTemplateTraceWriter(this IExecutionContext context)
|
||||
|
||||
262
src/Runner.Worker/FileCommandManager.cs
Normal file
262
src/Runner.Worker/FileCommandManager.cs
Normal file
@@ -0,0 +1,262 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
[ServiceLocator(Default = typeof(FileCommandManager))]
|
||||
public interface IFileCommandManager : IRunnerService
|
||||
{
|
||||
void InitializeFiles(IExecutionContext context, ContainerInfo container);
|
||||
void ProcessFiles(IExecutionContext context, ContainerInfo container);
|
||||
|
||||
}
|
||||
|
||||
public sealed class FileCommandManager : RunnerService, IFileCommandManager
|
||||
{
|
||||
private const string _folderName = "_runner_file_commands";
|
||||
private List<IFileCommandExtension> _commandExtensions;
|
||||
private string _fileSuffix = String.Empty;
|
||||
private string _fileCommandDirectory;
|
||||
private Tracing _trace;
|
||||
|
||||
public override void Initialize(IHostContext hostContext)
|
||||
{
|
||||
base.Initialize(hostContext);
|
||||
_trace = HostContext.GetTrace(nameof(FileCommandManager));
|
||||
|
||||
_fileCommandDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Temp), _folderName);
|
||||
if (!Directory.Exists(_fileCommandDirectory))
|
||||
{
|
||||
Directory.CreateDirectory(_fileCommandDirectory);
|
||||
}
|
||||
|
||||
var extensionManager = hostContext.GetService<IExtensionManager>();
|
||||
_commandExtensions = extensionManager.GetExtensions<IFileCommandExtension>() ?? new List<IFileCommandExtension>();
|
||||
}
|
||||
|
||||
public void InitializeFiles(IExecutionContext context, ContainerInfo container)
|
||||
{
|
||||
var oldSuffix = _fileSuffix;
|
||||
_fileSuffix = Guid.NewGuid().ToString();
|
||||
foreach (var fileCommand in _commandExtensions)
|
||||
{
|
||||
var oldPath = Path.Combine(_fileCommandDirectory, fileCommand.FilePrefix + oldSuffix);
|
||||
if (oldSuffix != String.Empty && File.Exists(oldPath))
|
||||
{
|
||||
TryDeleteFile(oldPath);
|
||||
}
|
||||
|
||||
var newPath = Path.Combine(_fileCommandDirectory, fileCommand.FilePrefix + _fileSuffix);
|
||||
TryDeleteFile(newPath);
|
||||
File.Create(newPath).Dispose();
|
||||
|
||||
var pathToSet = container != null ? container.TranslateToContainerPath(newPath) : newPath;
|
||||
context.SetGitHubContext(fileCommand.ContextName, pathToSet);
|
||||
}
|
||||
}
|
||||
|
||||
public void ProcessFiles(IExecutionContext context, ContainerInfo container)
|
||||
{
|
||||
foreach (var fileCommand in _commandExtensions)
|
||||
{
|
||||
try
|
||||
{
|
||||
fileCommand.ProcessCommand(context, Path.Combine(_fileCommandDirectory, fileCommand.FilePrefix + _fileSuffix),container);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
context.Error($"Unable to process file command '{fileCommand.ContextName}' successfully.");
|
||||
context.Error(ex);
|
||||
context.CommandResult = TaskResult.Failed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private bool TryDeleteFile(string path)
|
||||
{
|
||||
if (!File.Exists(path))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
try
|
||||
{
|
||||
File.Delete(path);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
_trace.Warning($"Unable to delete file {path} for reason: {e.ToString()}");
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public interface IFileCommandExtension : IExtension
|
||||
{
|
||||
string ContextName { get; }
|
||||
string FilePrefix { get; }
|
||||
|
||||
void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container);
|
||||
}
|
||||
|
||||
public sealed class AddPathFileCommand : RunnerService, IFileCommandExtension
|
||||
{
|
||||
public string ContextName => "path";
|
||||
public string FilePrefix => "add_path_";
|
||||
|
||||
public Type ExtensionType => typeof(IFileCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container)
|
||||
{
|
||||
if (File.Exists(filePath))
|
||||
{
|
||||
var lines = File.ReadAllLines(filePath, Encoding.UTF8);
|
||||
foreach(var line in lines)
|
||||
{
|
||||
if (line == string.Empty)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
context.Global.PrependPath.RemoveAll(x => string.Equals(x, line, StringComparison.CurrentCulture));
|
||||
context.Global.PrependPath.Add(line);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class SetEnvFileCommand : RunnerService, IFileCommandExtension
|
||||
{
|
||||
public string ContextName => "env";
|
||||
public string FilePrefix => "set_env_";
|
||||
|
||||
public Type ExtensionType => typeof(IFileCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container)
|
||||
{
|
||||
try
|
||||
{
|
||||
var text = File.ReadAllText(filePath) ?? string.Empty;
|
||||
var index = 0;
|
||||
var line = ReadLine(text, ref index);
|
||||
while (line != null)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(line))
|
||||
{
|
||||
var equalsIndex = line.IndexOf("=", StringComparison.Ordinal);
|
||||
var heredocIndex = line.IndexOf("<<", StringComparison.Ordinal);
|
||||
|
||||
// Normal style NAME=VALUE
|
||||
if (equalsIndex >= 0 && (heredocIndex < 0 || equalsIndex < heredocIndex))
|
||||
{
|
||||
var split = line.Split(new[] { '=' }, 2, StringSplitOptions.None);
|
||||
if (string.IsNullOrEmpty(line))
|
||||
{
|
||||
throw new Exception($"Invalid environment variable format '{line}'. Environment variable name must not be empty");
|
||||
}
|
||||
SetEnvironmentVariable(context, split[0], split[1]);
|
||||
}
|
||||
// Heredoc style NAME<<EOF
|
||||
else if (heredocIndex >= 0 && (equalsIndex < 0 || heredocIndex < equalsIndex))
|
||||
{
|
||||
var split = line.Split(new[] { "<<" }, 2, StringSplitOptions.None);
|
||||
if (string.IsNullOrEmpty(split[0]) || string.IsNullOrEmpty(split[1]))
|
||||
{
|
||||
throw new Exception($"Invalid environment variable format '{line}'. Environment variable name must not be empty and delimiter must not be empty");
|
||||
}
|
||||
var name = split[0];
|
||||
var delimiter = split[1];
|
||||
var startIndex = index; // Start index of the value (inclusive)
|
||||
var endIndex = index; // End index of the value (exclusive)
|
||||
var tempLine = ReadLine(text, ref index, out var newline);
|
||||
while (!string.Equals(tempLine, delimiter, StringComparison.Ordinal))
|
||||
{
|
||||
if (tempLine == null)
|
||||
{
|
||||
throw new Exception($"Invalid environment variable value. Matching delimiter not found '{delimiter}'");
|
||||
}
|
||||
endIndex = index - newline.Length;
|
||||
tempLine = ReadLine(text, ref index, out newline);
|
||||
}
|
||||
|
||||
var value = endIndex > startIndex ? text.Substring(startIndex, endIndex - startIndex) : string.Empty;
|
||||
SetEnvironmentVariable(context, name, value);
|
||||
}
|
||||
else
|
||||
{
|
||||
throw new Exception($"Invalid environment variable format '{line}'");
|
||||
}
|
||||
}
|
||||
|
||||
line = ReadLine(text, ref index);
|
||||
}
|
||||
}
|
||||
catch (DirectoryNotFoundException)
|
||||
{
|
||||
context.Debug($"Environment variables file does not exist '{filePath}'");
|
||||
}
|
||||
catch (FileNotFoundException)
|
||||
{
|
||||
context.Debug($"Environment variables file does not exist '{filePath}'");
|
||||
}
|
||||
}
|
||||
|
||||
private static void SetEnvironmentVariable(
|
||||
IExecutionContext context,
|
||||
string name,
|
||||
string value)
|
||||
{
|
||||
context.Global.EnvironmentVariables[name] = value;
|
||||
context.SetEnvContext(name, value);
|
||||
context.Debug($"{name}='{value}'");
|
||||
}
|
||||
|
||||
private static string ReadLine(
|
||||
string text,
|
||||
ref int index)
|
||||
{
|
||||
return ReadLine(text, ref index, out _);
|
||||
}
|
||||
|
||||
private static string ReadLine(
|
||||
string text,
|
||||
ref int index,
|
||||
out string newline)
|
||||
{
|
||||
if (index >= text.Length)
|
||||
{
|
||||
newline = null;
|
||||
return null;
|
||||
}
|
||||
|
||||
var originalIndex = index;
|
||||
var lfIndex = text.IndexOf("\n", index, StringComparison.Ordinal);
|
||||
if (lfIndex < 0)
|
||||
{
|
||||
index = text.Length;
|
||||
newline = null;
|
||||
return text.Substring(originalIndex);
|
||||
}
|
||||
|
||||
#if OS_WINDOWS
|
||||
var crLFIndex = text.IndexOf("\r\n", index, StringComparison.Ordinal);
|
||||
if (crLFIndex >= 0 && crLFIndex < lfIndex)
|
||||
{
|
||||
index = crLFIndex + 2; // Skip over CRLF
|
||||
newline = "\r\n";
|
||||
return text.Substring(originalIndex, crLFIndex - originalIndex);
|
||||
}
|
||||
#endif
|
||||
|
||||
index = lfIndex + 1; // Skip over LF
|
||||
newline = "\n";
|
||||
return text.Substring(originalIndex, lfIndex - originalIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,20 +6,26 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
public sealed class GitHubContext : DictionaryContextData, IEnvironmentContextData
|
||||
{
|
||||
private readonly HashSet<string> _contextEnvWhitelist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
|
||||
private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
"action",
|
||||
"action_path",
|
||||
"action_ref",
|
||||
"action_repository",
|
||||
"actor",
|
||||
"api_url",
|
||||
"base_ref",
|
||||
"env",
|
||||
"event_name",
|
||||
"event_path",
|
||||
"graphql_url",
|
||||
"head_ref",
|
||||
"job",
|
||||
"path",
|
||||
"ref",
|
||||
"repository",
|
||||
"repository_owner",
|
||||
"retention_days",
|
||||
"run_id",
|
||||
"run_number",
|
||||
"server_url",
|
||||
@@ -32,11 +38,23 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
foreach (var data in this)
|
||||
{
|
||||
if (_contextEnvWhitelist.Contains(data.Key) && data.Value is StringContextData value)
|
||||
if (_contextEnvAllowlist.Contains(data.Key) && data.Value is StringContextData value)
|
||||
{
|
||||
yield return new KeyValuePair<string, string>($"GITHUB_{data.Key.ToUpperInvariant()}", value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public GitHubContext ShallowCopy()
|
||||
{
|
||||
var copy = new GitHubContext();
|
||||
|
||||
foreach (var pair in this)
|
||||
{
|
||||
copy[pair.Key] = pair.Value;
|
||||
}
|
||||
|
||||
return copy;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
24
src/Runner.Worker/GlobalContext.cs
Normal file
24
src/Runner.Worker/GlobalContext.cs
Normal file
@@ -0,0 +1,24 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
public sealed class GlobalContext
|
||||
{
|
||||
public ContainerInfo Container { get; set; }
|
||||
public List<ServiceEndpoint> Endpoints { get; set; }
|
||||
public IDictionary<String, String> EnvironmentVariables { get; set; }
|
||||
public PlanFeatures Features { get; set; }
|
||||
public IList<String> FileTable { get; set; }
|
||||
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; set; }
|
||||
public TaskOrchestrationPlanReference Plan { get; set; }
|
||||
public List<string> PrependPath { get; set; }
|
||||
public List<ContainerInfo> ServiceContainers { get; set; }
|
||||
public StepsContext StepsContext { get; set; }
|
||||
public Variables Variables { get; set; }
|
||||
public bool WriteDebug { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
@@ -23,17 +24,13 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
public CompositeActionExecutionData Data { get; set; }
|
||||
|
||||
public Task RunAsync(ActionRunStage stage)
|
||||
public async Task RunAsync(ActionRunStage stage)
|
||||
{
|
||||
// Validate args.
|
||||
Trace.Entering();
|
||||
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
|
||||
ArgUtil.NotNull(Inputs, nameof(Inputs));
|
||||
|
||||
var githubContext = ExecutionContext.ExpressionValues["github"] as GitHubContext;
|
||||
ArgUtil.NotNull(githubContext, nameof(githubContext));
|
||||
|
||||
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
|
||||
ArgUtil.NotNull(Data.Steps, nameof(Data.Steps));
|
||||
|
||||
// Resolve action steps
|
||||
var actionSteps = Data.Steps;
|
||||
@@ -45,73 +42,241 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
inputsData[i.Key] = new StringContextData(i.Value);
|
||||
}
|
||||
|
||||
// Add each composite action step to the front of the queue
|
||||
int location = 0;
|
||||
// Initialize Composite Steps List of Steps
|
||||
var compositeSteps = new List<IStep>();
|
||||
|
||||
foreach (Pipelines.ActionStep aStep in actionSteps)
|
||||
// Temporary hack until after M271-ish. After M271-ish the server will never send an empty
|
||||
// context name. Generated context names start with "__"
|
||||
var childScopeName = ExecutionContext.GetFullyQualifiedContextName();
|
||||
if (string.IsNullOrEmpty(childScopeName))
|
||||
{
|
||||
// Ex:
|
||||
// runs:
|
||||
// using: "composite"
|
||||
// steps:
|
||||
// - uses: example/test-composite@v2 (a)
|
||||
// - run echo hello world (b)
|
||||
// - run echo hello world 2 (c)
|
||||
//
|
||||
// ethanchewy/test-composite/action.yaml
|
||||
// runs:
|
||||
// using: "composite"
|
||||
// steps:
|
||||
// - run echo hello world 3 (d)
|
||||
// - run echo hello world 4 (e)
|
||||
//
|
||||
// Steps processed as follow:
|
||||
// | a |
|
||||
// | a | => | d |
|
||||
// (Run step d)
|
||||
// | a |
|
||||
// | a | => | e |
|
||||
// (Run step e)
|
||||
// | a |
|
||||
// (Run step a)
|
||||
// | b |
|
||||
// (Run step b)
|
||||
// | c |
|
||||
// (Run step c)
|
||||
// Done.
|
||||
|
||||
var actionRunner = HostContext.CreateService<IActionRunner>();
|
||||
actionRunner.Action = aStep;
|
||||
actionRunner.Stage = stage;
|
||||
actionRunner.Condition = aStep.Condition;
|
||||
|
||||
var step = ExecutionContext.RegisterNestedStep(actionRunner, inputsData, location, Environment);
|
||||
|
||||
InitializeScope(step);
|
||||
|
||||
location++;
|
||||
childScopeName = $"__{Guid.NewGuid()}";
|
||||
}
|
||||
|
||||
// Create a step that handles all the composite action steps' outputs
|
||||
Pipelines.ActionStep cleanOutputsStep = new Pipelines.ActionStep();
|
||||
cleanOutputsStep.ContextName = ExecutionContext.ContextName;
|
||||
// Use the same reference type as our composite steps.
|
||||
cleanOutputsStep.Reference = Action;
|
||||
foreach (Pipelines.ActionStep actionStep in actionSteps)
|
||||
{
|
||||
var actionRunner = HostContext.CreateService<IActionRunner>();
|
||||
actionRunner.Action = actionStep;
|
||||
actionRunner.Stage = stage;
|
||||
actionRunner.Condition = actionStep.Condition;
|
||||
|
||||
var actionRunner2 = HostContext.CreateService<IActionRunner>();
|
||||
actionRunner2.Action = cleanOutputsStep;
|
||||
actionRunner2.Stage = ActionRunStage.Main;
|
||||
actionRunner2.Condition = "always()";
|
||||
ExecutionContext.RegisterNestedStep(actionRunner2, inputsData, location, Environment, true);
|
||||
var step = ExecutionContext.CreateCompositeStep(childScopeName, actionRunner, inputsData, Environment);
|
||||
|
||||
return Task.CompletedTask;
|
||||
// Shallow copy github context
|
||||
var gitHubContext = step.ExecutionContext.ExpressionValues["github"] as GitHubContext;
|
||||
ArgUtil.NotNull(gitHubContext, nameof(gitHubContext));
|
||||
gitHubContext = gitHubContext.ShallowCopy();
|
||||
step.ExecutionContext.ExpressionValues["github"] = gitHubContext;
|
||||
|
||||
// Set GITHUB_ACTION_PATH
|
||||
step.ExecutionContext.SetGitHubContext("action_path", ActionDirectory);
|
||||
|
||||
compositeSteps.Add(step);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
// This is where we run each step.
|
||||
await RunStepsAsync(compositeSteps);
|
||||
|
||||
// Get the pointer of the correct "steps" object and pass it to the ExecutionContext so that we can process the outputs correctly
|
||||
ExecutionContext.ExpressionValues["inputs"] = inputsData;
|
||||
ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(ExecutionContext.GetFullyQualifiedContextName());
|
||||
|
||||
ProcessCompositeActionOutputs();
|
||||
|
||||
ExecutionContext.Global.StepsContext.ClearScope(childScopeName);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Composite StepRunner should never throw exception out.
|
||||
Trace.Error($"Caught exception from composite steps {nameof(CompositeActionHandler)}: {ex}");
|
||||
ExecutionContext.Error(ex);
|
||||
ExecutionContext.Result = TaskResult.Failed;
|
||||
}
|
||||
}
|
||||
|
||||
private void InitializeScope(IStep step)
|
||||
private void ProcessCompositeActionOutputs()
|
||||
{
|
||||
var stepsContext = step.ExecutionContext.StepsContext;
|
||||
var scopeName = step.ExecutionContext.ScopeName;
|
||||
step.ExecutionContext.ExpressionValues["steps"] = stepsContext.GetScope(scopeName);
|
||||
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
|
||||
|
||||
// Evaluate the mapped outputs value
|
||||
if (Data.Outputs != null)
|
||||
{
|
||||
// Evaluate the outputs in the steps context to easily retrieve the values
|
||||
var actionManifestManager = HostContext.GetService<IActionManifestManager>();
|
||||
|
||||
// Format ExpressionValues to Dictionary<string, PipelineContextData>
|
||||
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var pair in ExecutionContext.ExpressionValues)
|
||||
{
|
||||
evaluateContext[pair.Key] = pair.Value;
|
||||
}
|
||||
|
||||
// Get the evluated composite outputs' values mapped to the outputs named
|
||||
DictionaryContextData actionOutputs = actionManifestManager.EvaluateCompositeOutputs(ExecutionContext, Data.Outputs, evaluateContext);
|
||||
|
||||
// Set the outputs for the outputs object in the whole composite action
|
||||
// Each pair is structured like this
|
||||
// We ignore "description" for now
|
||||
// {
|
||||
// "the-output-name": {
|
||||
// "description": "",
|
||||
// "value": "the value"
|
||||
// },
|
||||
// ...
|
||||
// }
|
||||
foreach (var pair in actionOutputs)
|
||||
{
|
||||
var outputsName = pair.Key;
|
||||
var outputsAttributes = pair.Value as DictionaryContextData;
|
||||
outputsAttributes.TryGetValue("value", out var val);
|
||||
|
||||
if (val != null)
|
||||
{
|
||||
var outputsValue = val as StringContextData;
|
||||
// Set output in the whole composite scope.
|
||||
if (!String.IsNullOrEmpty(outputsValue))
|
||||
{
|
||||
ExecutionContext.SetOutput(outputsName, outputsValue, out _);
|
||||
}
|
||||
else
|
||||
{
|
||||
ExecutionContext.SetOutput(outputsName, "", out _);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task RunStepsAsync(List<IStep> compositeSteps)
|
||||
{
|
||||
ArgUtil.NotNull(compositeSteps, nameof(compositeSteps));
|
||||
|
||||
// The parent StepsRunner of the whole Composite Action Step handles the cancellation stuff already.
|
||||
foreach (IStep step in compositeSteps)
|
||||
{
|
||||
Trace.Info($"Processing composite step: DisplayName='{step.DisplayName}'");
|
||||
|
||||
step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(step.ExecutionContext.ScopeName);
|
||||
|
||||
// Populate env context for each step
|
||||
Trace.Info("Initialize Env context for step");
|
||||
#if OS_WINDOWS
|
||||
var envContext = new DictionaryContextData();
|
||||
#else
|
||||
var envContext = new CaseSensitiveDictionaryContextData();
|
||||
#endif
|
||||
|
||||
// Global env
|
||||
foreach (var pair in ExecutionContext.Global.EnvironmentVariables)
|
||||
{
|
||||
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
|
||||
}
|
||||
|
||||
// Stomps over with outside step env
|
||||
if (step.ExecutionContext.ExpressionValues.TryGetValue("env", out var envContextData))
|
||||
{
|
||||
#if OS_WINDOWS
|
||||
var dict = envContextData as DictionaryContextData;
|
||||
#else
|
||||
var dict = envContextData as CaseSensitiveDictionaryContextData;
|
||||
#endif
|
||||
foreach (var pair in dict)
|
||||
{
|
||||
envContext[pair.Key] = pair.Value;
|
||||
}
|
||||
}
|
||||
|
||||
step.ExecutionContext.ExpressionValues["env"] = envContext;
|
||||
|
||||
var actionStep = step as IActionRunner;
|
||||
|
||||
try
|
||||
{
|
||||
// Evaluate and merge action's env block to env context
|
||||
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
|
||||
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, Common.Util.VarUtil.EnvironmentVariableKeyComparer);
|
||||
foreach (var env in actionEnvironment)
|
||||
{
|
||||
envContext[env.Key] = new StringContextData(env.Value ?? string.Empty);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// fail the step since there is an evaluate error.
|
||||
Trace.Info("Caught exception in Composite Steps Runner from expression for step.env");
|
||||
// evaluateStepEnvFailed = true;
|
||||
step.ExecutionContext.Error(ex);
|
||||
step.ExecutionContext.Complete(TaskResult.Failed);
|
||||
}
|
||||
|
||||
await RunStepAsync(step);
|
||||
|
||||
// Directly after the step, check if the step has failed or cancelled
|
||||
// If so, return that to the output
|
||||
if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled)
|
||||
{
|
||||
ExecutionContext.Result = step.ExecutionContext.Result;
|
||||
break;
|
||||
}
|
||||
|
||||
// TODO: Add compat for other types of steps.
|
||||
}
|
||||
// Completion Status handled by StepsRunner for the whole Composite Action Step
|
||||
}
|
||||
|
||||
private async Task RunStepAsync(IStep step)
|
||||
{
|
||||
// Start the step.
|
||||
Trace.Info("Starting the step.");
|
||||
step.ExecutionContext.Debug($"Starting: {step.DisplayName}");
|
||||
|
||||
// TODO: Fix for Step Level Timeout Attributes for an individual Composite Run Step
|
||||
// For now, we are not going to support this for an individual composite run step
|
||||
|
||||
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
|
||||
|
||||
await Common.Util.EncodingUtil.SetEncoding(HostContext, Trace, step.ExecutionContext.CancellationToken);
|
||||
|
||||
try
|
||||
{
|
||||
await step.RunAsync();
|
||||
}
|
||||
catch (OperationCanceledException ex)
|
||||
{
|
||||
if (step.ExecutionContext.CancellationToken.IsCancellationRequested &&
|
||||
!ExecutionContext.Root.CancellationToken.IsCancellationRequested)
|
||||
{
|
||||
Trace.Error($"Caught timeout exception from step: {ex.Message}");
|
||||
step.ExecutionContext.Error("The action has timed out.");
|
||||
step.ExecutionContext.Result = TaskResult.Failed;
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Caught cancellation exception from step: {ex}");
|
||||
step.ExecutionContext.Error(ex);
|
||||
step.ExecutionContext.Result = TaskResult.Canceled;
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Log the error and fail the step.
|
||||
Trace.Error($"Caught exception from step: {ex}");
|
||||
step.ExecutionContext.Error(ex);
|
||||
step.ExecutionContext.Result = TaskResult.Failed;
|
||||
}
|
||||
|
||||
// Merge execution context result with command result
|
||||
if (step.ExecutionContext.CommandResult != null)
|
||||
{
|
||||
step.ExecutionContext.Result = Common.Util.TaskResultUtil.MergeTaskResults(step.ExecutionContext.Result, step.ExecutionContext.CommandResult.Value);
|
||||
}
|
||||
|
||||
Trace.Info($"Step result: {step.ExecutionContext.Result}");
|
||||
|
||||
// Complete the step context.
|
||||
step.ExecutionContext.Debug($"Finishing: {step.DisplayName}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Schema;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
[ServiceLocator(Default = typeof(CompositeActionOutputHandler))]
|
||||
public interface ICompositeActionOutputHandler : IHandler
|
||||
{
|
||||
CompositeActionExecutionData Data { get; set; }
|
||||
}
|
||||
|
||||
public sealed class CompositeActionOutputHandler : Handler, ICompositeActionOutputHandler
|
||||
{
|
||||
public CompositeActionExecutionData Data { get; set; }
|
||||
|
||||
|
||||
public Task RunAsync(ActionRunStage stage)
|
||||
{
|
||||
// Evaluate the mapped outputs value
|
||||
if (Data.Outputs != null)
|
||||
{
|
||||
// Evaluate the outputs in the steps context to easily retrieve the values
|
||||
var actionManifestManager = HostContext.GetService<IActionManifestManager>();
|
||||
|
||||
// Format ExpressionValues to Dictionary<string, PipelineContextData>
|
||||
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var pair in ExecutionContext.ExpressionValues)
|
||||
{
|
||||
evaluateContext[pair.Key] = pair.Value;
|
||||
}
|
||||
|
||||
// Get the evluated composite outputs' values mapped to the outputs named
|
||||
DictionaryContextData actionOutputs = actionManifestManager.EvaluateCompositeOutputs(ExecutionContext, Data.Outputs, evaluateContext);
|
||||
|
||||
// Set the outputs for the outputs object in the whole composite action
|
||||
actionManifestManager.SetAllCompositeOutputs(ExecutionContext.FinalizeContext, actionOutputs);
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -49,8 +49,9 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// ensure docker file exist
|
||||
var dockerFile = Path.Combine(ActionDirectory, Data.Image);
|
||||
ArgUtil.File(dockerFile, nameof(Data.Image));
|
||||
ExecutionContext.Output($"Dockerfile for action: '{dockerFile}'.");
|
||||
|
||||
ExecutionContext.Output($"##[group]Building docker image");
|
||||
ExecutionContext.Output($"Dockerfile for action: '{dockerFile}'.");
|
||||
var imageName = $"{dockerManger.DockerInstanceLabel}:{ExecutionContext.Id.ToString("N")}";
|
||||
var buildExitCode = await dockerManger.DockerBuild(
|
||||
ExecutionContext,
|
||||
@@ -58,6 +59,8 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
dockerFile,
|
||||
Directory.GetParent(dockerFile).FullName,
|
||||
imageName);
|
||||
ExecutionContext.Output("##[endgroup]");
|
||||
|
||||
if (buildExitCode != 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Docker build failed with exit code {buildExitCode}");
|
||||
@@ -67,7 +70,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
|
||||
// run container
|
||||
var container = new ContainerInfo()
|
||||
var container = new ContainerInfo(HostContext)
|
||||
{
|
||||
ContainerImage = Data.Image,
|
||||
ContainerName = ExecutionContext.Id.ToString("N"),
|
||||
@@ -158,16 +161,21 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
Directory.CreateDirectory(tempHomeDirectory);
|
||||
this.Environment["HOME"] = tempHomeDirectory;
|
||||
|
||||
var tempFileCommandDirectory = Path.Combine(tempDirectory, "_runner_file_commands");
|
||||
ArgUtil.Directory(tempFileCommandDirectory, nameof(tempFileCommandDirectory));
|
||||
|
||||
var tempWorkflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
|
||||
ArgUtil.Directory(tempWorkflowDirectory, nameof(tempWorkflowDirectory));
|
||||
|
||||
container.MountVolumes.Add(new MountVolume("/var/run/docker.sock", "/var/run/docker.sock"));
|
||||
container.MountVolumes.Add(new MountVolume(tempHomeDirectory, "/github/home"));
|
||||
container.MountVolumes.Add(new MountVolume(tempWorkflowDirectory, "/github/workflow"));
|
||||
container.MountVolumes.Add(new MountVolume(tempFileCommandDirectory, "/github/file_commands"));
|
||||
container.MountVolumes.Add(new MountVolume(defaultWorkingDirectory, "/github/workspace"));
|
||||
|
||||
container.AddPathTranslateMapping(tempHomeDirectory, "/github/home");
|
||||
container.AddPathTranslateMapping(tempWorkflowDirectory, "/github/workflow");
|
||||
container.AddPathTranslateMapping(tempFileCommandDirectory, "/github/file_commands");
|
||||
container.AddPathTranslateMapping(defaultWorkingDirectory, "/github/workspace");
|
||||
|
||||
container.ContainerWorkDirectory = "/github/workspace";
|
||||
@@ -185,7 +193,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
|
||||
// Add Actions Runtime server info
|
||||
var systemConnection = ExecutionContext.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
var systemConnection = ExecutionContext.Global.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
Environment["ACTIONS_RUNTIME_URL"] = systemConnection.Url.AbsoluteUri;
|
||||
Environment["ACTIONS_RUNTIME_TOKEN"] = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken];
|
||||
if (systemConnection.Data.TryGetValue("CacheServerUrl", out var cacheUrl) && !string.IsNullOrEmpty(cacheUrl))
|
||||
|
||||
@@ -148,14 +148,14 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
// Validate args.
|
||||
Trace.Entering();
|
||||
ArgUtil.NotNull(ExecutionContext.PrependPath, nameof(ExecutionContext.PrependPath));
|
||||
if (ExecutionContext.PrependPath.Count == 0)
|
||||
ArgUtil.NotNull(ExecutionContext.Global.PrependPath, nameof(ExecutionContext.Global.PrependPath));
|
||||
if (ExecutionContext.Global.PrependPath.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
// Prepend path.
|
||||
string prepend = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>());
|
||||
string prepend = string.Join(Path.PathSeparator.ToString(), ExecutionContext.Global.PrependPath.Reverse<string>());
|
||||
var containerStepHost = StepHost as ContainerStepHost;
|
||||
if (containerStepHost != null)
|
||||
{
|
||||
|
||||
@@ -68,16 +68,8 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
else if (data.ExecutionType == ActionExecutionType.Composite)
|
||||
{
|
||||
if (executionContext.FinalizeContext == null)
|
||||
{
|
||||
handler = HostContext.CreateService<ICompositeActionHandler>();
|
||||
(handler as ICompositeActionHandler).Data = data as CompositeActionExecutionData;
|
||||
}
|
||||
else
|
||||
{
|
||||
handler = HostContext.CreateService<ICompositeActionOutputHandler>();
|
||||
(handler as ICompositeActionOutputHandler).Data = data as CompositeActionExecutionData;
|
||||
}
|
||||
handler = HostContext.CreateService<ICompositeActionHandler>();
|
||||
(handler as ICompositeActionHandler).Data = data as CompositeActionExecutionData;
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
@@ -46,7 +46,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
|
||||
// Add Actions Runtime server info
|
||||
var systemConnection = ExecutionContext.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
var systemConnection = ExecutionContext.Global.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
Environment["ACTIONS_RUNTIME_URL"] = systemConnection.Url.AbsoluteUri;
|
||||
Environment["ACTIONS_RUNTIME_TOKEN"] = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken];
|
||||
if (systemConnection.Data.TryGetValue("CacheServerUrl", out var cacheUrl) && !string.IsNullOrEmpty(cacheUrl))
|
||||
@@ -113,7 +113,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: outputEncoding,
|
||||
killProcessOnCancel: false,
|
||||
inheritConsoleHandler: !ExecutionContext.Variables.Retain_Default_Encoding,
|
||||
inheritConsoleHandler: !ExecutionContext.Global.Variables.Retain_Default_Encoding,
|
||||
cancellationToken: ExecutionContext.CancellationToken);
|
||||
|
||||
// Wait for either the node exit or force finish through ##vso command
|
||||
|
||||
@@ -31,7 +31,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
_executionContext = executionContext;
|
||||
_commandManager = commandManager;
|
||||
_container = container ?? executionContext.Container;
|
||||
_container = container ?? executionContext.Global.Container;
|
||||
|
||||
// Recursion failsafe (test override)
|
||||
var failsafeString = Environment.GetEnvironmentVariable("RUNNER_TEST_GET_REPOSITORY_PATH_FAILSAFE");
|
||||
@@ -41,7 +41,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
|
||||
// Determine the timeout
|
||||
var timeoutStr = _executionContext.Variables.Get(_timeoutKey);
|
||||
var timeoutStr = _executionContext.Global.Variables.Get(_timeoutKey);
|
||||
if (string.IsNullOrEmpty(timeoutStr) ||
|
||||
!TimeSpan.TryParse(timeoutStr, CultureInfo.InvariantCulture, out _timeout) ||
|
||||
_timeout <= TimeSpan.Zero)
|
||||
|
||||
@@ -23,6 +23,19 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
|
||||
public override void PrintActionDetails(ActionRunStage stage)
|
||||
{
|
||||
// We don't want to display the internal workings if composite (similar/equivalent information can be found in debug)
|
||||
void writeDetails(string message)
|
||||
{
|
||||
if (ExecutionContext.InsideComposite)
|
||||
{
|
||||
ExecutionContext.Debug(message);
|
||||
}
|
||||
else
|
||||
{
|
||||
ExecutionContext.Output(message);
|
||||
}
|
||||
}
|
||||
|
||||
if (stage == ActionRunStage.Post)
|
||||
{
|
||||
throw new NotSupportedException("Script action should not have 'Post' job action.");
|
||||
@@ -39,7 +52,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
firstLine = firstLine.Substring(0, firstNewLine);
|
||||
}
|
||||
|
||||
ExecutionContext.Output($"##[group]Run {firstLine}");
|
||||
writeDetails(ExecutionContext.InsideComposite ? $"Run {firstLine}" : $"##[group]Run {firstLine}");
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -50,20 +63,20 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
foreach (var line in multiLines)
|
||||
{
|
||||
// Bright Cyan color
|
||||
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
|
||||
writeDetails($"\x1b[36;1m{line}\x1b[0m");
|
||||
}
|
||||
|
||||
string argFormat;
|
||||
string shellCommand;
|
||||
string shellCommandPath = null;
|
||||
bool validateShellOnHost = !(StepHost is ContainerStepHost);
|
||||
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>());
|
||||
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.Global.PrependPath.Reverse<string>());
|
||||
string shell = null;
|
||||
if (!Inputs.TryGetValue("shell", out shell) || string.IsNullOrEmpty(shell))
|
||||
{
|
||||
// TODO: figure out how defaults interact with template later
|
||||
// for now, we won't check job.defaults if we are inside a template.
|
||||
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
|
||||
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.Global.JobDefaults.TryGetValue("run", out var runDefaults))
|
||||
{
|
||||
runDefaults.TryGetValue("shell", out shell);
|
||||
}
|
||||
@@ -109,23 +122,23 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
|
||||
if (!string.IsNullOrEmpty(shellCommandPath))
|
||||
{
|
||||
ExecutionContext.Output($"shell: {shellCommandPath} {argFormat}");
|
||||
writeDetails($"shell: {shellCommandPath} {argFormat}");
|
||||
}
|
||||
else
|
||||
{
|
||||
ExecutionContext.Output($"shell: {shellCommand} {argFormat}");
|
||||
writeDetails($"shell: {shellCommand} {argFormat}");
|
||||
}
|
||||
|
||||
if (this.Environment?.Count > 0)
|
||||
{
|
||||
ExecutionContext.Output("env:");
|
||||
writeDetails("env:");
|
||||
foreach (var env in this.Environment)
|
||||
{
|
||||
ExecutionContext.Output($" {env.Key}: {env.Value}");
|
||||
writeDetails($" {env.Key}: {env.Value}");
|
||||
}
|
||||
}
|
||||
|
||||
ExecutionContext.Output("##[endgroup]");
|
||||
writeDetails(ExecutionContext.InsideComposite ? "" : "##[endgroup]");
|
||||
}
|
||||
|
||||
public async Task RunAsync(ActionRunStage stage)
|
||||
@@ -151,9 +164,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
string workingDirectory = null;
|
||||
if (!Inputs.TryGetValue("workingDirectory", out workingDirectory))
|
||||
{
|
||||
// TODO: figure out how defaults interact with template later
|
||||
// for now, we won't check job.defaults if we are inside a template.
|
||||
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
|
||||
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.Global.JobDefaults.TryGetValue("run", out var runDefaults))
|
||||
{
|
||||
if (runDefaults.TryGetValue("working-directory", out workingDirectory))
|
||||
{
|
||||
@@ -167,9 +178,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
string shell = null;
|
||||
if (!Inputs.TryGetValue("shell", out shell) || string.IsNullOrEmpty(shell))
|
||||
{
|
||||
// TODO: figure out how defaults interact with template later
|
||||
// for now, we won't check job.defaults if we are inside a template.
|
||||
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
|
||||
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.Global.JobDefaults.TryGetValue("run", out var runDefaults))
|
||||
{
|
||||
if (runDefaults.TryGetValue("shell", out shell))
|
||||
{
|
||||
@@ -180,7 +189,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
|
||||
var isContainerStepHost = StepHost is ContainerStepHost;
|
||||
|
||||
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>());
|
||||
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.Global.PrependPath.Reverse<string>());
|
||||
string commandPath, argFormat, shellCommand;
|
||||
// Set up default command and arguments
|
||||
if (string.IsNullOrEmpty(shell))
|
||||
@@ -232,7 +241,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
#if OS_WINDOWS
|
||||
// Normalize Windows line endings
|
||||
contents = contents.Replace("\r\n", "\n").Replace("\n", "\r\n");
|
||||
var encoding = ExecutionContext.Variables.Retain_Default_Encoding && Console.InputEncoding.CodePage != 65001
|
||||
var encoding = ExecutionContext.Global.Variables.Retain_Default_Encoding && Console.InputEncoding.CodePage != 65001
|
||||
? Console.InputEncoding
|
||||
: new UTF8Encoding(false);
|
||||
#else
|
||||
@@ -285,7 +294,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: false,
|
||||
inheritConsoleHandler: !ExecutionContext.Variables.Retain_Default_Encoding,
|
||||
inheritConsoleHandler: !ExecutionContext.Global.Variables.Retain_Default_Encoding,
|
||||
cancellationToken: ExecutionContext.CancellationToken);
|
||||
|
||||
// Error
|
||||
|
||||
@@ -5,6 +5,7 @@ using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Runtime.Serialization;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
@@ -41,6 +42,8 @@ namespace GitHub.Runner.Worker
|
||||
private readonly HashSet<string> _existingProcesses = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
private bool _processCleanup;
|
||||
private string _processLookupId = $"github_{Guid.NewGuid()}";
|
||||
private CancellationTokenSource _diskSpaceCheckToken = new CancellationTokenSource();
|
||||
private Task _diskSpaceCheckTask = null;
|
||||
|
||||
// Download all required actions.
|
||||
// Make sure all condition inputs are valid.
|
||||
@@ -74,6 +77,10 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
// print out HostName for self-hosted runner
|
||||
context.Output($"Runner name: '{setting.AgentName}'");
|
||||
if (message.Variables.TryGetValue("system.runnerGroupName", out VariableValue runnerGroupName))
|
||||
{
|
||||
context.Output($"Runner group name: '{runnerGroupName.Value}'");
|
||||
}
|
||||
context.Output($"Machine name: '{Environment.MachineName}'");
|
||||
}
|
||||
}
|
||||
@@ -115,6 +122,26 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var tokenPermissions = jobContext.Global.Variables.Get("system.github.token.permissions") ?? "";
|
||||
if (!string.IsNullOrEmpty(tokenPermissions))
|
||||
{
|
||||
context.Output($"##[group]GITHUB_TOKEN Permissions");
|
||||
var permissions = StringUtil.ConvertFromJson<Dictionary<string, string>>(tokenPermissions);
|
||||
foreach(KeyValuePair<string, string> entry in permissions)
|
||||
{
|
||||
context.Output($"{entry.Key}: {entry.Value}");
|
||||
}
|
||||
context.Output("##[endgroup]");
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
context.Output($"Fail to parse and display GITHUB_TOKEN permissions list: {ex.Message}");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
|
||||
var repoFullName = context.GetGitHubContext("repository");
|
||||
ArgUtil.NotNull(repoFullName, nameof(repoFullName));
|
||||
context.Debug($"Primary repository: {repoFullName}");
|
||||
@@ -162,7 +189,7 @@ namespace GitHub.Runner.Worker
|
||||
var environmentVariables = templateEvaluator.EvaluateStepEnvironment(token, jobContext.ExpressionValues, jobContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
|
||||
foreach (var pair in environmentVariables)
|
||||
{
|
||||
context.EnvironmentVariables[pair.Key] = pair.Value ?? string.Empty;
|
||||
context.Global.EnvironmentVariables[pair.Key] = pair.Value ?? string.Empty;
|
||||
context.SetEnvContext(pair.Key, pair.Value ?? string.Empty);
|
||||
}
|
||||
}
|
||||
@@ -172,7 +199,7 @@ namespace GitHub.Runner.Worker
|
||||
var container = templateEvaluator.EvaluateJobContainer(message.JobContainer, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
|
||||
if (container != null)
|
||||
{
|
||||
jobContext.Container = new Container.ContainerInfo(HostContext, container);
|
||||
jobContext.Global.Container = new Container.ContainerInfo(HostContext, container);
|
||||
}
|
||||
|
||||
// Evaluate the job service containers
|
||||
@@ -184,7 +211,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
var networkAlias = pair.Key;
|
||||
var serviceContainer = pair.Value;
|
||||
jobContext.ServiceContainers.Add(new Container.ContainerInfo(HostContext, serviceContainer, false, networkAlias));
|
||||
jobContext.Global.ServiceContainers.Add(new Container.ContainerInfo(HostContext, serviceContainer, false, networkAlias));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,14 +222,14 @@ namespace GitHub.Runner.Worker
|
||||
var defaults = token.AssertMapping("defaults");
|
||||
if (defaults.Any(x => string.Equals(x.Key.AssertString("defaults key").Value, "run", StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
context.JobDefaults["run"] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
context.Global.JobDefaults["run"] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
var defaultsRun = defaults.First(x => string.Equals(x.Key.AssertString("defaults key").Value, "run", StringComparison.OrdinalIgnoreCase));
|
||||
var jobDefaults = templateEvaluator.EvaluateJobDefaultsRun(defaultsRun.Value, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
|
||||
foreach (var pair in jobDefaults)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(pair.Value))
|
||||
{
|
||||
context.JobDefaults["run"][pair.Key] = pair.Value;
|
||||
context.Global.JobDefaults["run"][pair.Key] = pair.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -216,15 +243,15 @@ namespace GitHub.Runner.Worker
|
||||
preJobSteps.AddRange(prepareResult.ContainerSetupSteps);
|
||||
|
||||
// Add start-container steps, record and stop-container steps
|
||||
if (jobContext.Container != null || jobContext.ServiceContainers.Count > 0)
|
||||
if (jobContext.Global.Container != null || jobContext.Global.ServiceContainers.Count > 0)
|
||||
{
|
||||
var containerProvider = HostContext.GetService<IContainerOperationProvider>();
|
||||
var containers = new List<Container.ContainerInfo>();
|
||||
if (jobContext.Container != null)
|
||||
if (jobContext.Global.Container != null)
|
||||
{
|
||||
containers.Add(jobContext.Container);
|
||||
containers.Add(jobContext.Global.Container);
|
||||
}
|
||||
containers.AddRange(jobContext.ServiceContainers);
|
||||
containers.AddRange(jobContext.Global.ServiceContainers);
|
||||
|
||||
preJobSteps.Add(new JobExtensionRunner(runAsync: containerProvider.StartContainersAsync,
|
||||
condition: $"{PipelineTemplateConstants.Success}()",
|
||||
@@ -296,7 +323,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
ArgUtil.NotNull(actionStep, step.DisplayName);
|
||||
intraActionStates.TryGetValue(actionStep.Action.Id, out var intraActionState);
|
||||
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, actionStep.Action.ScopeName, actionStep.Action.ContextName, intraActionState);
|
||||
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, null, actionStep.Action.ContextName, intraActionState);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -305,7 +332,7 @@ namespace GitHub.Runner.Worker
|
||||
steps.AddRange(jobSteps);
|
||||
|
||||
// Prepare for orphan process cleanup
|
||||
_processCleanup = jobContext.Variables.GetBoolean("process.clean") ?? true;
|
||||
_processCleanup = jobContext.Global.Variables.GetBoolean("process.clean") ?? true;
|
||||
if (_processCleanup)
|
||||
{
|
||||
// Set the RUNNER_TRACKING_ID env variable.
|
||||
@@ -321,6 +348,12 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
jobContext.Global.EnvironmentVariables.TryGetValue(Constants.Runner.Features.DiskSpaceWarning, out var enableWarning);
|
||||
if (StringUtil.ConvertToBoolean(enableWarning, defaultValue: true))
|
||||
{
|
||||
_diskSpaceCheckTask = CheckDiskSpaceAsync(context, _diskSpaceCheckToken.Token);
|
||||
}
|
||||
|
||||
return steps;
|
||||
}
|
||||
catch (OperationCanceledException ex) when (jobContext.CancellationToken.IsCancellationRequested)
|
||||
@@ -331,6 +364,14 @@ namespace GitHub.Runner.Worker
|
||||
context.Result = TaskResult.Canceled;
|
||||
throw;
|
||||
}
|
||||
catch (FailedToResolveActionDownloadInfoException ex)
|
||||
{
|
||||
// Log the error and fail the JobExtension Initialization.
|
||||
Trace.Error($"Caught exception from JobExtenion Initialization: {ex}");
|
||||
context.InfrastructureError(ex.Message);
|
||||
context.Result = TaskResult.Failed;
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
// Log the error and fail the JobExtension Initialization.
|
||||
@@ -361,6 +402,24 @@ namespace GitHub.Runner.Worker
|
||||
context.Start();
|
||||
context.Debug("Starting: Complete job");
|
||||
|
||||
Trace.Info("Initialize Env context");
|
||||
|
||||
#if OS_WINDOWS
|
||||
var envContext = new DictionaryContextData();
|
||||
#else
|
||||
var envContext = new CaseSensitiveDictionaryContextData();
|
||||
#endif
|
||||
context.ExpressionValues["env"] = envContext;
|
||||
foreach (var pair in context.Global.EnvironmentVariables)
|
||||
{
|
||||
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
|
||||
}
|
||||
|
||||
// Populate env context for each step
|
||||
Trace.Info("Initialize steps context");
|
||||
context.ExpressionValues["steps"] = context.Global.StepsContext.GetScope(context.ScopeName);
|
||||
|
||||
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||
// Evaluate job outputs
|
||||
if (message.JobOutputs != null && message.JobOutputs.Type != TokenType.Null)
|
||||
{
|
||||
@@ -370,21 +429,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// Populate env context for each step
|
||||
Trace.Info("Initialize Env context for evaluating job outputs");
|
||||
#if OS_WINDOWS
|
||||
var envContext = new DictionaryContextData();
|
||||
#else
|
||||
var envContext = new CaseSensitiveDictionaryContextData();
|
||||
#endif
|
||||
context.ExpressionValues["env"] = envContext;
|
||||
foreach (var pair in context.EnvironmentVariables)
|
||||
{
|
||||
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
|
||||
}
|
||||
|
||||
Trace.Info("Initialize steps context for evaluating job outputs");
|
||||
context.ExpressionValues["steps"] = context.StepsContext.GetScope(context.ScopeName);
|
||||
|
||||
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||
var outputs = templateEvaluator.EvaluateJobOutput(message.JobOutputs, context.ExpressionValues, context.ExpressionFunctions);
|
||||
foreach (var output in outputs)
|
||||
{
|
||||
@@ -413,7 +458,35 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
if (context.Variables.GetBoolean(Constants.Variables.Actions.RunnerDebug) ?? false)
|
||||
// Evaluate environment data
|
||||
if (jobContext.ActionsEnvironment?.Url != null && jobContext.ActionsEnvironment?.Url.Type != TokenType.Null)
|
||||
{
|
||||
try
|
||||
{
|
||||
context.Output($"Evaluate and set environment url");
|
||||
|
||||
var environmentUrlToken = templateEvaluator.EvaluateEnvironmentUrl(jobContext.ActionsEnvironment.Url, context.ExpressionValues, context.ExpressionFunctions);
|
||||
var environmentUrl = environmentUrlToken.AssertString("environment.url");
|
||||
if (!string.Equals(environmentUrl.Value, HostContext.SecretMasker.MaskSecrets(environmentUrl.Value)))
|
||||
{
|
||||
context.Warning($"Skip setting environment url as environment '{jobContext.ActionsEnvironment.Name}' may contain secret.");
|
||||
}
|
||||
else
|
||||
{
|
||||
context.Output($"Evaluated environment url: {environmentUrl}");
|
||||
jobContext.ActionsEnvironment.Url = environmentUrlToken;
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
context.Result = TaskResult.Failed;
|
||||
context.Error($"Failed to evaluate environment url");
|
||||
context.Error(ex);
|
||||
jobContext.Result = TaskResultUtil.MergeTaskResults(jobContext.Result, TaskResult.Failed);
|
||||
}
|
||||
}
|
||||
|
||||
if (context.Global.Variables.GetBoolean(Constants.Variables.Actions.RunnerDebug) ?? false)
|
||||
{
|
||||
Trace.Info("Support log upload starting.");
|
||||
context.Output("Uploading runner diagnostic logs");
|
||||
@@ -485,6 +558,11 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (_diskSpaceCheckTask != null)
|
||||
{
|
||||
_diskSpaceCheckToken.Cancel();
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@@ -500,6 +578,39 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
private async Task CheckDiskSpaceAsync(IExecutionContext context, CancellationToken token)
|
||||
{
|
||||
while (!token.IsCancellationRequested)
|
||||
{
|
||||
// Add warning when disk is lower than system.runner.lowdiskspacethreshold from service (default to 100 MB on service side)
|
||||
var lowDiskSpaceThreshold = context.Global.Variables.GetInt(WellKnownDistributedTaskVariables.RunnerLowDiskspaceThreshold);
|
||||
if (lowDiskSpaceThreshold == null)
|
||||
{
|
||||
Trace.Info($"Low diskspace warning is not enabled.");
|
||||
return;
|
||||
}
|
||||
var workDirRoot = Directory.GetDirectoryRoot(HostContext.GetDirectory(WellKnownDirectory.Work));
|
||||
var driveInfo = new DriveInfo(workDirRoot);
|
||||
var freeSpaceInMB = driveInfo.AvailableFreeSpace / 1024 / 1024;
|
||||
if (freeSpaceInMB < lowDiskSpaceThreshold)
|
||||
{
|
||||
var issue = new Issue() { Type = IssueType.Warning, Message = $"You are running out of disk space. The runner will stop working when the machine runs out of disk space. Free space left: {freeSpaceInMB} MB" };
|
||||
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.LowDiskSpace;
|
||||
context.AddIssue(issue);
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await Task.Delay(10 * 1000, token);
|
||||
}
|
||||
catch (TaskCanceledException)
|
||||
{
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Dictionary<int, Process> SnapshotProcesses()
|
||||
{
|
||||
Dictionary<int, Process> snapshot = new Dictionary<int, Process>();
|
||||
|
||||
@@ -99,7 +99,7 @@ namespace GitHub.Runner.Worker
|
||||
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Failed);
|
||||
}
|
||||
|
||||
if (jobContext.WriteDebug)
|
||||
if (jobContext.Global.WriteDebug)
|
||||
{
|
||||
jobContext.SetRunnerContext("debug", "1");
|
||||
}
|
||||
@@ -152,7 +152,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
foreach (var step in jobSteps)
|
||||
{
|
||||
jobContext.JobSteps.Add(step);
|
||||
jobContext.JobSteps.Enqueue(step);
|
||||
}
|
||||
|
||||
await stepsRunner.RunAsync(jobContext);
|
||||
@@ -209,14 +209,14 @@ namespace GitHub.Runner.Worker
|
||||
// Clean TEMP after finish process jobserverqueue, since there might be a pending fileupload still use the TEMP dir.
|
||||
_tempDirectoryManager?.CleanupTempDirectory();
|
||||
|
||||
if (!jobContext.Features.HasFlag(PlanFeatures.JobCompletedPlanEvent))
|
||||
if (!jobContext.Global.Features.HasFlag(PlanFeatures.JobCompletedPlanEvent))
|
||||
{
|
||||
Trace.Info($"Skip raise job completed event call from worker because Plan version is {message.Plan.Version}");
|
||||
return result;
|
||||
}
|
||||
|
||||
Trace.Info("Raising job completed event.");
|
||||
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result, jobContext.JobOutputs);
|
||||
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result, jobContext.JobOutputs, jobContext.ActionsEnvironment);
|
||||
|
||||
var completeJobRetryLimit = 5;
|
||||
var exceptions = new List<Exception>();
|
||||
|
||||
@@ -100,12 +100,12 @@ namespace GitHub.Runner.Worker
|
||||
RunnerActionPluginExecutionContext pluginContext = new RunnerActionPluginExecutionContext
|
||||
{
|
||||
Inputs = inputs,
|
||||
Endpoints = context.Endpoints,
|
||||
Endpoints = context.Global.Endpoints,
|
||||
Context = context.ExpressionValues
|
||||
};
|
||||
|
||||
// variables
|
||||
foreach (var variable in context.Variables.AllVariables)
|
||||
foreach (var variable in context.Global.Variables.AllVariables)
|
||||
{
|
||||
pluginContext.Variables[variable.Name] = new VariableValue(variable.Value, variable.Secret);
|
||||
}
|
||||
|
||||
@@ -15,6 +15,14 @@ namespace GitHub.Runner.Worker
|
||||
private static readonly Regex _propertyRegex = new Regex("^[a-zA-Z_][a-zA-Z0-9_]*$", RegexOptions.Compiled);
|
||||
private readonly DictionaryContextData _contextData = new DictionaryContextData();
|
||||
|
||||
public void ClearScope(string scopeName)
|
||||
{
|
||||
if (_contextData.TryGetValue(scopeName, out _))
|
||||
{
|
||||
_contextData[scopeName] = new DictionaryContextData();
|
||||
}
|
||||
}
|
||||
|
||||
public DictionaryContextData GetScope(string scopeName)
|
||||
{
|
||||
if (scopeName == null)
|
||||
|
||||
@@ -59,18 +59,18 @@ namespace GitHub.Runner.Worker
|
||||
checkPostJobActions = true;
|
||||
while (jobContext.PostJobSteps.TryPop(out var postStep))
|
||||
{
|
||||
jobContext.JobSteps.Add(postStep);
|
||||
jobContext.JobSteps.Enqueue(postStep);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
var step = jobContext.JobSteps[0];
|
||||
jobContext.JobSteps.RemoveAt(0);
|
||||
var step = jobContext.JobSteps.Dequeue();
|
||||
|
||||
Trace.Info($"Processing step: DisplayName='{step.DisplayName}'");
|
||||
ArgUtil.NotNull(step.ExecutionContext, nameof(step.ExecutionContext));
|
||||
ArgUtil.NotNull(step.ExecutionContext.Variables, nameof(step.ExecutionContext.Variables));
|
||||
ArgUtil.NotNull(step.ExecutionContext.Global, nameof(step.ExecutionContext.Global));
|
||||
ArgUtil.NotNull(step.ExecutionContext.Global.Variables, nameof(step.ExecutionContext.Global.Variables));
|
||||
|
||||
// Start
|
||||
step.ExecutionContext.Start();
|
||||
@@ -82,7 +82,7 @@ namespace GitHub.Runner.Worker
|
||||
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, 0));
|
||||
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>(PipelineTemplateConstants.HashFiles, 1, byte.MaxValue));
|
||||
|
||||
step.ExecutionContext.ExpressionValues["steps"] = step.ExecutionContext.StepsContext.GetScope(step.ExecutionContext.ScopeName);
|
||||
step.ExecutionContext.ExpressionValues["steps"] = step.ExecutionContext.Global.StepsContext.GetScope(step.ExecutionContext.ScopeName);
|
||||
|
||||
// Populate env context for each step
|
||||
Trace.Info("Initialize Env context for step");
|
||||
@@ -93,25 +93,11 @@ namespace GitHub.Runner.Worker
|
||||
#endif
|
||||
|
||||
// Global env
|
||||
foreach (var pair in step.ExecutionContext.EnvironmentVariables)
|
||||
foreach (var pair in step.ExecutionContext.Global.EnvironmentVariables)
|
||||
{
|
||||
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
|
||||
}
|
||||
|
||||
// Stomps over with outside step env
|
||||
if (step.ExecutionContext.ExpressionValues.TryGetValue("env", out var envContextData))
|
||||
{
|
||||
#if OS_WINDOWS
|
||||
var dict = envContextData as DictionaryContextData;
|
||||
#else
|
||||
var dict = envContextData as CaseSensitiveDictionaryContextData;
|
||||
#endif
|
||||
foreach (var pair in dict)
|
||||
{
|
||||
envContext[pair.Key] = pair.Value;
|
||||
}
|
||||
}
|
||||
|
||||
step.ExecutionContext.ExpressionValues["env"] = envContext;
|
||||
|
||||
bool evaluateStepEnvFailed = false;
|
||||
@@ -300,40 +286,7 @@ namespace GitHub.Runner.Worker
|
||||
step.ExecutionContext.SetTimeout(timeout);
|
||||
}
|
||||
|
||||
#if OS_WINDOWS
|
||||
try
|
||||
{
|
||||
if (Console.InputEncoding.CodePage != 65001)
|
||||
{
|
||||
using (var p = HostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
// Use UTF8 code page
|
||||
int exitCode = await p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Work),
|
||||
fileName: WhichUtil.Which("chcp", true, Trace),
|
||||
arguments: "65001",
|
||||
environment: null,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: false,
|
||||
redirectStandardIn: null,
|
||||
inheritConsoleHandler: true,
|
||||
cancellationToken: step.ExecutionContext.CancellationToken);
|
||||
if (exitCode == 0)
|
||||
{
|
||||
Trace.Info("Successfully returned to code page 65001 (UTF8)");
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Warning($"'chcp 65001' failed with exit code {exitCode}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Warning($"'chcp 65001' failed with exception {ex.Message}");
|
||||
}
|
||||
#endif
|
||||
await EncodingUtil.SetEncoding(HostContext, Trace, step.ExecutionContext.CancellationToken);
|
||||
|
||||
try
|
||||
{
|
||||
|
||||
@@ -108,19 +108,26 @@
|
||||
}
|
||||
},
|
||||
"composite-steps": {
|
||||
"context": [
|
||||
"github",
|
||||
"strategy",
|
||||
"matrix",
|
||||
"steps",
|
||||
"inputs",
|
||||
"job",
|
||||
"runner",
|
||||
"env",
|
||||
"hashFiles(1,255)"
|
||||
],
|
||||
"sequence": {
|
||||
"item-type": "any"
|
||||
"item-type": "composite-step"
|
||||
}
|
||||
},
|
||||
"composite-step": {
|
||||
"mapping": {
|
||||
"properties": {
|
||||
"name": "string-steps-context",
|
||||
"id": "non-empty-string",
|
||||
"run": {
|
||||
"type": "string-steps-context",
|
||||
"required": true
|
||||
},
|
||||
"env": "step-env",
|
||||
"working-directory": "string-steps-context",
|
||||
"shell": {
|
||||
"type": "non-empty-string",
|
||||
"required": true
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"container-runs-context": {
|
||||
@@ -157,6 +164,37 @@
|
||||
"string": {
|
||||
"require-non-empty": true
|
||||
}
|
||||
},
|
||||
"string-steps-context": {
|
||||
"context": [
|
||||
"github",
|
||||
"inputs",
|
||||
"strategy",
|
||||
"matrix",
|
||||
"steps",
|
||||
"job",
|
||||
"runner",
|
||||
"env",
|
||||
"hashFiles(1,255)"
|
||||
],
|
||||
"string": {}
|
||||
},
|
||||
"step-env": {
|
||||
"context": [
|
||||
"github",
|
||||
"inputs",
|
||||
"strategy",
|
||||
"matrix",
|
||||
"steps",
|
||||
"job",
|
||||
"runner",
|
||||
"env",
|
||||
"hashFiles(1,255)"
|
||||
],
|
||||
"mapping": {
|
||||
"loose-key-type": "non-empty-string",
|
||||
"loose-value-type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
27
src/Sdk/Common/Common/Utility/HashAlgorithmExtensions.cs
Normal file
27
src/Sdk/Common/Common/Utility/HashAlgorithmExtensions.cs
Normal file
@@ -0,0 +1,27 @@
|
||||
using System.IO;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace GitHub.Services.Common
|
||||
{
|
||||
public static class HashAlgorithmExtensions
|
||||
{
|
||||
public static async Task<byte[]> ComputeHashAsync(this HashAlgorithm hashAlg, Stream inputStream)
|
||||
{
|
||||
byte[] buffer = new byte[4096];
|
||||
|
||||
while (true)
|
||||
{
|
||||
int read = await inputStream.ReadAsync(buffer, 0, buffer.Length);
|
||||
if (read == 0)
|
||||
break;
|
||||
|
||||
hashAlg.TransformBlock(buffer, 0, read, null, 0);
|
||||
}
|
||||
|
||||
hashAlg.TransformFinalBlock(buffer, 0, 0);
|
||||
return hashAlg.Hash;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -85,5 +85,19 @@ namespace GitHub.Services.Common
|
||||
var bytes = FromBase64StringNoPadding(base64String);
|
||||
return BitConverter.ToString(bytes).Replace("-", String.Empty);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts byte array into a hex string
|
||||
/// </summary>
|
||||
public static String ConvertToHexString(byte[] bytes)
|
||||
{
|
||||
// Convert byte array to string
|
||||
var sBuilder = new StringBuilder();
|
||||
for (int i = 0; i < bytes.Length; i++)
|
||||
{
|
||||
sBuilder.Append(bytes[i].ToString("x2"));
|
||||
}
|
||||
return sBuilder.ToString();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -587,6 +587,7 @@ namespace GitHub.DistributedTask.WebApi
|
||||
/// <param name="packageType"></param>
|
||||
/// <param name="platform"></param>
|
||||
/// <param name="version"></param>
|
||||
/// <param name="includeToken"></param>
|
||||
/// <param name="userState"></param>
|
||||
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
||||
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||
@@ -594,6 +595,7 @@ namespace GitHub.DistributedTask.WebApi
|
||||
string packageType,
|
||||
string platform,
|
||||
string version,
|
||||
bool? includeToken = null,
|
||||
object userState = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -601,11 +603,18 @@ namespace GitHub.DistributedTask.WebApi
|
||||
Guid locationId = new Guid("8ffcd551-079c-493a-9c02-54346299d144");
|
||||
object routeValues = new { packageType = packageType, platform = platform, version = version };
|
||||
|
||||
List<KeyValuePair<string, string>> queryParams = new List<KeyValuePair<string, string>>();
|
||||
if (includeToken != null)
|
||||
{
|
||||
queryParams.Add("includeToken", includeToken.Value.ToString());
|
||||
}
|
||||
|
||||
return SendAsync<PackageMetadata>(
|
||||
httpMethod,
|
||||
locationId,
|
||||
routeValues: routeValues,
|
||||
version: new ApiResourceVersion(5.1, 2),
|
||||
queryParameters: queryParams,
|
||||
userState: userState,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
@@ -616,6 +625,7 @@ namespace GitHub.DistributedTask.WebApi
|
||||
/// <param name="packageType"></param>
|
||||
/// <param name="platform"></param>
|
||||
/// <param name="top"></param>
|
||||
/// <param name="includeToken"></param>
|
||||
/// <param name="userState"></param>
|
||||
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
|
||||
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||
@@ -623,6 +633,7 @@ namespace GitHub.DistributedTask.WebApi
|
||||
string packageType,
|
||||
string platform = null,
|
||||
int? top = null,
|
||||
bool? includeToken = null,
|
||||
object userState = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -635,6 +646,10 @@ namespace GitHub.DistributedTask.WebApi
|
||||
{
|
||||
queryParams.Add("$top", top.Value.ToString(CultureInfo.InvariantCulture));
|
||||
}
|
||||
if (includeToken != null)
|
||||
{
|
||||
queryParams.Add("includeToken", includeToken.Value.ToString());
|
||||
}
|
||||
|
||||
return SendAsync<List<PackageMetadata>>(
|
||||
httpMethod,
|
||||
|
||||
@@ -37,6 +37,12 @@ namespace GitHub.DistributedTask.Logging
|
||||
return Base64StringEscapeShift(value, 2);
|
||||
}
|
||||
|
||||
// Used when we pass environment variables to docker to escape " with \"
|
||||
public static String CommandLineArgumentEscape(String value)
|
||||
{
|
||||
return value.Replace("\"", "\\\"");
|
||||
}
|
||||
|
||||
public static String ExpressionStringEscape(String value)
|
||||
{
|
||||
return Expressions2.Sdk.ExpressionUtility.StringEscape(value);
|
||||
|
||||
@@ -52,6 +52,7 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
internal const String String = "string";
|
||||
internal const String StringDefinition = "string-definition";
|
||||
internal const String StringDefinitionProperties = "string-definition-properties";
|
||||
internal const String StringRunnerContextNoSecrets = "string-runner-context-no-secrets";
|
||||
internal const String Structure = "structure";
|
||||
internal const String TemplateSchema = "template-schema";
|
||||
internal const String True = "true";
|
||||
|
||||
@@ -24,7 +24,6 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
Environment = actionToClone.Environment?.Clone();
|
||||
Inputs = actionToClone.Inputs?.Clone();
|
||||
ContextName = actionToClone?.ContextName;
|
||||
ScopeName = actionToClone?.ScopeName;
|
||||
DisplayNameToken = actionToClone.DisplayNameToken?.Clone();
|
||||
}
|
||||
|
||||
@@ -41,9 +40,6 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public TemplateToken DisplayNameToken { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public String ScopeName { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public String ContextName { get; set; }
|
||||
|
||||
|
||||
@@ -39,10 +39,10 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
DictionaryContextData contextData,
|
||||
WorkspaceOptions workspaceOptions,
|
||||
IEnumerable<JobStep> steps,
|
||||
IEnumerable<ContextScope> scopes,
|
||||
IList<String> fileTable,
|
||||
TemplateToken jobOutputs,
|
||||
IList<TemplateToken> defaults)
|
||||
IList<TemplateToken> defaults,
|
||||
ActionsEnvironmentReference actionsEnvironment)
|
||||
{
|
||||
this.MessageType = JobRequestMessageTypes.PipelineAgentJobRequest;
|
||||
this.Plan = plan;
|
||||
@@ -55,16 +55,11 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
this.Resources = jobResources;
|
||||
this.Workspace = workspaceOptions;
|
||||
this.JobOutputs = jobOutputs;
|
||||
|
||||
this.ActionsEnvironment = actionsEnvironment;
|
||||
m_variables = new Dictionary<String, VariableValue>(variables, StringComparer.OrdinalIgnoreCase);
|
||||
m_maskHints = new List<MaskHint>(maskHints);
|
||||
m_steps = new List<JobStep>(steps);
|
||||
|
||||
if (scopes != null)
|
||||
{
|
||||
m_scopes = new List<ContextScope>(scopes);
|
||||
}
|
||||
|
||||
if (environmentVariables?.Count > 0)
|
||||
{
|
||||
m_environmentVariables = new List<TemplateToken>(environmentVariables);
|
||||
@@ -234,6 +229,13 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
}
|
||||
}
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public ActionsEnvironmentReference ActionsEnvironment
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the collection of variables associated with the current context.
|
||||
/// </summary>
|
||||
@@ -261,18 +263,6 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
}
|
||||
}
|
||||
|
||||
public IList<ContextScope> Scopes
|
||||
{
|
||||
get
|
||||
{
|
||||
if (m_scopes == null)
|
||||
{
|
||||
m_scopes = new List<ContextScope>();
|
||||
}
|
||||
return m_scopes;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the table of files used when parsing the pipeline (e.g. yaml files)
|
||||
/// </summary>
|
||||
@@ -415,11 +405,6 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
m_maskHints = new List<MaskHint>(this.m_maskHints.Distinct());
|
||||
}
|
||||
|
||||
if (m_scopes?.Count == 0)
|
||||
{
|
||||
m_scopes = null;
|
||||
}
|
||||
|
||||
if (m_variables?.Count == 0)
|
||||
{
|
||||
m_variables = null;
|
||||
@@ -447,9 +432,6 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
[DataMember(Name = "Steps", EmitDefaultValue = false)]
|
||||
private List<JobStep> m_steps;
|
||||
|
||||
[DataMember(Name = "Scopes", EmitDefaultValue = false)]
|
||||
private List<ContextScope> m_scopes;
|
||||
|
||||
[DataMember(Name = "Variables", EmitDefaultValue = false)]
|
||||
private IDictionary<String, VariableValue> m_variables;
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user