mirror of
https://github.com/actions/runner.git
synced 2025-12-10 12:36:23 +00:00
Compare commits
88 Commits
users/tihu
...
fhammerl/d
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
079ee2afef | ||
|
|
46258428cd | ||
|
|
eb9a604b63 | ||
|
|
8792d8e5ee | ||
|
|
87e86e3d72 | ||
|
|
48b6cd9a42 | ||
|
|
d081289ed5 | ||
|
|
7d5e9cd70f | ||
|
|
98aa9c1152 | ||
|
|
ddc700e9eb | ||
|
|
a0458aebfe | ||
|
|
b2c6d093b2 | ||
|
|
292a2e0ab3 | ||
|
|
29cee52276 | ||
|
|
ad0d0c4d0a | ||
|
|
2c6064a655 | ||
|
|
af6c8e6edd | ||
|
|
c15d3f10b2 | ||
|
|
bdf1e90503 | ||
|
|
100c99f263 | ||
|
|
e8ccafea63 | ||
|
|
02d2cb8fcd | ||
|
|
0cbf3351f4 | ||
|
|
6abef8199f | ||
|
|
ec9830836b | ||
|
|
460c32a337 | ||
|
|
934027da60 | ||
|
|
28f0027938 | ||
|
|
17153c9b29 | ||
|
|
a65ac083b4 | ||
|
|
882f36dcf8 | ||
|
|
f2578529b0 | ||
|
|
bd77ccf34e | ||
|
|
cb19da9638 | ||
|
|
d64190927f | ||
|
|
101b74cab6 | ||
|
|
c06da82ccd | ||
|
|
374989b280 | ||
|
|
47fee8cd64 | ||
|
|
85dcd93d98 | ||
|
|
bac91075f4 | ||
|
|
9240a1cf6c | ||
|
|
2946801fb6 | ||
|
|
1a0d588d3a | ||
|
|
192ebfeccf | ||
|
|
f2347b7a59 | ||
|
|
8f160bc084 | ||
|
|
47ba1203c9 | ||
|
|
dc8b1b685f | ||
|
|
8eacbdc79f | ||
|
|
6b4a95cdb1 | ||
|
|
c95d5eae30 | ||
|
|
ea67ff9647 | ||
|
|
d7d38e173e | ||
|
|
ac31fd10b2 | ||
|
|
d8251bf912 | ||
|
|
715bb7cca8 | ||
|
|
47dfebdf48 | ||
|
|
7cb198a554 | ||
|
|
7616e9b7aa | ||
|
|
3b8475de3e | ||
|
|
ba9766d544 | ||
|
|
29da60a538 | ||
|
|
f2e210e5f3 | ||
|
|
fa32fcf2a1 | ||
|
|
46da23edb1 | ||
|
|
9bfbc48f45 | ||
|
|
ead1826afb | ||
|
|
9de17f197c | ||
|
|
45decac397 | ||
|
|
55ed60b9fc | ||
|
|
698d3a2e66 | ||
|
|
d0ab54ce45 | ||
|
|
3e65909b81 | ||
|
|
3ec20e989d | ||
|
|
231fdcb19d | ||
|
|
bef164a12f | ||
|
|
a519f96a41 | ||
|
|
b1ecffd707 | ||
|
|
801a02ec89 | ||
|
|
6332f9a42f | ||
|
|
5b8ff174c6 | ||
|
|
e3e977fd84 | ||
|
|
4dc8a09db3 | ||
|
|
dcc5d34ad1 | ||
|
|
3e34fb10c1 | ||
|
|
23a693aa2c | ||
|
|
eb36db8ff9 |
10
.github/ISSUE_TEMPLATE/bug_report.md
vendored
10
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,12 +1,18 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
name: 🛑 Report a bug in the runner application
|
||||
about: If you have issues with GitHub Actions, please follow the "support for GitHub Actions" link, below.
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!--
|
||||
👋 You're opening a bug report against the GitHub Actions **runner application**.
|
||||
|
||||
🛑 Please stop if you're not certain that the bug you're seeing is in the runner application - if you have general problems with actions, workflows, or runners, please see the [GitHub Community Support Forum](https://github.community/c/code-to-cloud/52) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃
|
||||
-->
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
|
||||
11
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
11
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: ✅ Support for GitHub Actions
|
||||
url: https://github.community/c/code-to-cloud/52
|
||||
about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum.
|
||||
- name: ✅ Feedback and suggestions for GitHub Actions
|
||||
url: https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback
|
||||
about: If you have feedback or suggestions about GitHub Actions, please open a discussion (or add to an existing one) in the GitHub Actions Feedback. GitHub Actions Product Managers and Engineers monitor the feedback forum.
|
||||
- name: ‼️ GitHub Security Bug Bounty
|
||||
url: https://bounty.github.com/
|
||||
about: Please report security vulnerabilities here.
|
||||
13
.github/ISSUE_TEMPLATE/enhancement_request.md
vendored
13
.github/ISSUE_TEMPLATE/enhancement_request.md
vendored
@@ -1,19 +1,24 @@
|
||||
---
|
||||
name: Feature Request
|
||||
about: Create a request to help us improve
|
||||
name: 🛑 Request a feature in the runner application
|
||||
about: If you have feature requests for GitHub Actions, please use the "feedback and suggestions for GitHub Actions" link below.
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
Thank you 🙇♀ for wanting to create a feature in this repository. Before you do, please ensure you are filing the issue in the right place. Issues should only be opened on if the issue **relates to code in this repository**.
|
||||
<!--
|
||||
👋 You're opening a request for an enhancement in the GitHub Actions **runner application**.
|
||||
|
||||
🛑 Please stop if you're not certain that the feature you want is in the runner application - if you have a suggestion for improving GitHub Actions, please see the [GitHub Actions Feedback](https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback) discussion forum which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃
|
||||
|
||||
Some additional useful links:
|
||||
* If you have found a security issue [please submit it here](https://hackerone.com/github)
|
||||
* If you have questions or issues with the service, writing workflows or actions, then please [visit the GitHub Community Forum's Actions Board](https://github.community/t5/GitHub-Actions/bd-p/actions)
|
||||
* If you are having an issue or question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
|
||||
* If you are having an issue or have a question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
|
||||
|
||||
If you have a feature request that is relevant to this repository, the runner, then please include the information below:
|
||||
-->
|
||||
|
||||
**Describe the enhancement**
|
||||
A clear and concise description of what the features or enhancement you need.
|
||||
|
||||
43
.github/workflows/build.yml
vendored
43
.github/workflows/build.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-musl-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
|
||||
include:
|
||||
- runtime: linux-x64
|
||||
os: ubuntu-latest
|
||||
@@ -32,21 +32,17 @@ jobs:
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: linux-musl-x64
|
||||
os: ubuntu-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: osx-x64
|
||||
os: macOS-latest
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-latest
|
||||
os: windows-2019
|
||||
devScript: ./dev
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
@@ -54,12 +50,35 @@ jobs:
|
||||
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Check runtime/externals hash
|
||||
- name: Compute/Compare runtime and externals Hash
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
||||
echo "Current Externals hash result: $EXTERNALS_HASH"
|
||||
|
||||
NeedUpdate=0
|
||||
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
||||
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
||||
NeedUpdate=1
|
||||
fi
|
||||
|
||||
exit $NeedUpdate
|
||||
env:
|
||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||
|
||||
# Run tests
|
||||
- name: L0
|
||||
run: |
|
||||
${{ matrix.devScript }} test
|
||||
working-directory: src
|
||||
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm' && matrix.runtime != 'linux-musl-x64'
|
||||
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
|
||||
|
||||
# Create runner package tar.gz/zip
|
||||
- name: Package Release
|
||||
@@ -71,7 +90,11 @@ jobs:
|
||||
# Upload runner package tar.gz/zip as artifact
|
||||
- name: Publish Artifact
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v1
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: runner-package-${{ matrix.runtime }}
|
||||
path: _package
|
||||
path: |
|
||||
_package
|
||||
_package_trims/trim_externals
|
||||
_package_trims/trim_runtime
|
||||
_package_trims/trim_runtime_externals
|
||||
|
||||
5
.github/workflows/codeql.yml
vendored
5
.github/workflows/codeql.yml
vendored
@@ -1,7 +1,12 @@
|
||||
name: "Code Scanning - Action"
|
||||
|
||||
permissions:
|
||||
security-events: write
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: '0 0 * * 0'
|
||||
|
||||
354
.github/workflows/release.yml
vendored
354
.github/workflows/release.yml
vendored
@@ -51,6 +51,21 @@ jobs:
|
||||
linux-arm-sha: ${{ steps.sha.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha: ${{ steps.sha.outputs.win-x64-sha256 }}
|
||||
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
|
||||
linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }}
|
||||
osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }}
|
||||
linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }}
|
||||
osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }}
|
||||
linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }}
|
||||
linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }}
|
||||
linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }}
|
||||
win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }}
|
||||
osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }}
|
||||
strategy:
|
||||
matrix:
|
||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
|
||||
@@ -72,12 +87,12 @@ jobs:
|
||||
devScript: ./dev.sh
|
||||
|
||||
- runtime: win-x64
|
||||
os: windows-latest
|
||||
os: windows-2019
|
||||
devScript: ./dev
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
# Build runner layout
|
||||
- name: Build & Layout Release
|
||||
@@ -99,14 +114,6 @@ jobs:
|
||||
${{ matrix.devScript }} package Release ${{ matrix.runtime }}
|
||||
working-directory: src
|
||||
|
||||
# Upload runner package tar.gz/zip as artifact.
|
||||
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
|
||||
- name: Publish Artifact
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v1
|
||||
with:
|
||||
name: runner-packages
|
||||
path: _package
|
||||
# compute shas and set as job outputs to use in release notes
|
||||
- run: brew install coreutils #needed for shasum util
|
||||
if: ${{ matrix.os == 'macOS-latest' }}
|
||||
@@ -120,6 +127,91 @@ jobs:
|
||||
id: sha
|
||||
name: Compute SHA256
|
||||
working-directory: _package
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
|
||||
echo "::set-output name=sha256::$sha"
|
||||
shell: bash
|
||||
id: sha_noexternals
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_externals
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
|
||||
echo "::set-output name=sha256::$sha"
|
||||
shell: bash
|
||||
id: sha_noruntime
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_runtime
|
||||
- run: |
|
||||
file=$(ls)
|
||||
sha=$(sha256sum $file | awk '{ print $1 }')
|
||||
echo "Computed sha256: $sha for $file"
|
||||
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
|
||||
echo "::set-output name=sha256::$sha"
|
||||
shell: bash
|
||||
id: sha_noruntime_noexternals
|
||||
name: Compute SHA256
|
||||
working-directory: _package_trims/trim_runtime_externals
|
||||
|
||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
||||
if: matrix.runtime == 'win-x64'
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_zip.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
||||
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
||||
|
||||
console.log(trimmedPackages)
|
||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
||||
|
||||
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
|
||||
if: matrix.runtime != 'win-x64'
|
||||
uses: actions/github-script@0.3.0
|
||||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const core = require('@actions/core')
|
||||
const fs = require('fs');
|
||||
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
|
||||
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_targz.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
|
||||
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
|
||||
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
|
||||
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
|
||||
|
||||
console.log(trimmedPackages)
|
||||
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
|
||||
|
||||
# Upload runner package tar.gz/zip as artifact.
|
||||
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
|
||||
- name: Publish Artifact
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: runner-packages
|
||||
path: |
|
||||
_package
|
||||
_package_trims/trim_externals
|
||||
_package_trims/trim_runtime
|
||||
_package_trims/trim_runtime_externals
|
||||
${{ matrix.runtime }}-trimmedpackages.json
|
||||
|
||||
release:
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
@@ -150,9 +242,35 @@ jobs:
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-x64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}')
|
||||
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}')
|
||||
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}')
|
||||
console.log(releaseNote)
|
||||
core.setOutput('version', runnerVersion);
|
||||
core.setOutput('note', releaseNote);
|
||||
|
||||
- name: Validate Packages HASH
|
||||
working-directory: _package
|
||||
run: |
|
||||
ls -l
|
||||
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
|
||||
echo "${{needs.build.outputs.osx-x64-sha}} actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
|
||||
echo "${{needs.build.outputs.linux-x64-sha}} actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
|
||||
echo "${{needs.build.outputs.linux-arm-sha}} actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
|
||||
echo "${{needs.build.outputs.linux-arm64-sha}} actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
|
||||
|
||||
# Create GitHub release
|
||||
- uses: actions/create-release@master
|
||||
id: createRelease
|
||||
@@ -165,14 +283,14 @@ jobs:
|
||||
body: |
|
||||
${{ steps.releaseNote.outputs.note }}
|
||||
|
||||
# Upload release assets
|
||||
# Upload release assets (full runner packages)
|
||||
- name: Upload Release Asset (win-x64)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -182,7 +300,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -192,7 +310,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -202,7 +320,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -212,6 +330,210 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim externals)
|
||||
- name: Upload Release Asset (win-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime)
|
||||
- name: Upload Release Asset (win-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noruntime)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trim runtime and externals)
|
||||
- name: Upload Release Asset (win-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-noruntime-noexternals)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
# Upload release assets (trimmedpackages.json)
|
||||
- name: Upload Release Asset (win-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/win-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (osx-x64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/osx-x64-trimmedpackages.json
|
||||
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Upload Release Asset (linux-arm64-trimmedpackages.json)
|
||||
uses: actions/upload-release-asset@v1.0.1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
|
||||
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -19,7 +19,9 @@
|
||||
node_modules
|
||||
_downloads
|
||||
_layout
|
||||
_layout_trims
|
||||
_package
|
||||
_package_trims
|
||||
_dotnetsdk
|
||||
TestResults
|
||||
TestLogs
|
||||
|
||||
6
.vscode/launch.json
vendored
6
.vscode/launch.json
vendored
@@ -13,6 +13,7 @@
|
||||
"cwd": "${workspaceFolder}/src",
|
||||
"console": "integratedTerminal",
|
||||
"requireExactSource": false,
|
||||
"targetArchitecture": "x86_64"
|
||||
},
|
||||
{
|
||||
"name": "Run",
|
||||
@@ -25,6 +26,7 @@
|
||||
"cwd": "${workspaceFolder}/src",
|
||||
"console": "integratedTerminal",
|
||||
"requireExactSource": false,
|
||||
"targetArchitecture": "x86_64"
|
||||
},
|
||||
{
|
||||
"name": "Configure",
|
||||
@@ -38,6 +40,7 @@
|
||||
"cwd": "${workspaceFolder}/src",
|
||||
"console": "integratedTerminal",
|
||||
"requireExactSource": false,
|
||||
"targetArchitecture": "x86_64"
|
||||
},
|
||||
{
|
||||
"name": "Debug Worker",
|
||||
@@ -45,6 +48,7 @@
|
||||
"request": "attach",
|
||||
"processName": "Runner.Worker",
|
||||
"requireExactSource": false,
|
||||
"targetArchitecture": "x86_64"
|
||||
},
|
||||
{
|
||||
"name": "Attach Debugger",
|
||||
@@ -52,6 +56,8 @@
|
||||
"request": "attach",
|
||||
"processId": "${command:pickProcess}",
|
||||
"requireExactSource": false,
|
||||
"targetArchitecture": "x86_64"
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
# GitHub Actions Runner
|
||||
|
||||
[](https://github.com/actions/runner/actions)
|
||||
[](https://github.com/actions/runner/actions)
|
||||
|
||||
The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the [hosted virtual environments](https://github.com/actions/virtual-environments), or you can [self-host the runner](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners) in your own environment.
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ Compilation failures during a CI build should surface good error messages.
|
||||
|
||||
For example, the actual compile errors from the typescript compiler should bubble as issues in the UI. And not simply "tsc exited with exit code 1".
|
||||
|
||||
VSCode has an extensible model for solving this type of problem. VSCode allows users to configure which problems matchers to use, when scanning output. For example, a user can apply the `tsc` problem matcher to receive a rich error output experience in VSCode, when compiling their typescript project.
|
||||
VSCode has an extensible model for solving this type of problem. VSCode allows users to configure which [problems matchers](https://code.visualstudio.com/docs/editor/tasks#_defining-a-problem-matcher) to use, when scanning output. For example, a user can apply the `tsc` problem matcher to receive a rich error output experience in VSCode, when compiling their typescript project.
|
||||
|
||||
The problem-matcher concept fits well with "setup" actions. For example, the `setup-nodejs` action will download node.js, add it to the PATH, and register the `tsc` problem matcher. For the duration of the job, the `tsc` problem matcher will be applied against the output.
|
||||
|
||||
@@ -18,21 +18,23 @@ The problem-matcher concept fits well with "setup" actions. For example, the `se
|
||||
|
||||
### Registration
|
||||
|
||||
#### Using `##` command
|
||||
#### Using `::` command
|
||||
|
||||
`##[add-matcher]path-to-problem-matcher-config.json`
|
||||
`::add-matcher::path-to-problem-matcher-config.json`
|
||||
|
||||
Using a `##` command allows for flexibility:
|
||||
Using a `::` command allows for flexibility:
|
||||
- Ad hoc scripts can register problem matchers
|
||||
- Allows problem matchers to be conditionally registered
|
||||
|
||||
Note, if a matcher with the same name is registered a second time, it will clobber the first instance.
|
||||
|
||||
#### Unregister using `##` command
|
||||
Note, at some point the syntax changed from `##` to `::`.
|
||||
|
||||
#### Unregister using `::` command
|
||||
|
||||
A way out for rare cases where scoping is a problem.
|
||||
|
||||
`##[remove-matcher]owner`
|
||||
`::remove-matcher::owner`
|
||||
|
||||
For this to be usable, the `owner` needs to be discoverable. Therefore, debug print the owner on registration.
|
||||
|
||||
@@ -104,7 +106,7 @@ message: ; expected
|
||||
fromPath: C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj
|
||||
```
|
||||
|
||||
Additionally the line will appear red in the web UI (prefix with `##[error]`).
|
||||
Additionally the line will appear red in the web UI (prefix with `::error`).
|
||||
|
||||
Note, an error does not imply task failure. Exit codes communicate failure.
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ The runner will look for a file `.setup_info` under the runner's root directory,
|
||||
}
|
||||
]
|
||||
```
|
||||
The runner will use `##[group]` and `##[endgroup]` to fold all detail info into an expandable group.
|
||||
The runner will use `::group` and `::endgroup` to fold all detail info into an expandable group.
|
||||
|
||||
Both [virtual-environments](https://github.com/actions/virtual-environments) and self-hosted runners can use this mechanism to add extra logging info to the `Set up job` step's log.
|
||||
|
||||
|
||||
@@ -6,13 +6,35 @@
|
||||
Make sure the runner has access to actions service for GitHub.com or GitHub Enterprise Server
|
||||
|
||||
- For GitHub.com
|
||||
- The runner needs to access https://api.github.com for downloading actions.
|
||||
- The runner needs to access https://vstoken.actions.githubusercontent.com/_apis/.../ for requesting an access token.
|
||||
- The runner needs to access https://pipelines.actions.githubusercontent.com/_apis/.../ for receiving workflow jobs.
|
||||
- The runner needs to access `https://api.github.com` for downloading actions.
|
||||
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
|
||||
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
|
||||
|
||||
These can by tested by running the following `curl` commands from your self-hosted runner machine:
|
||||
|
||||
```
|
||||
curl -v https://api.github.com/api/v3/zen
|
||||
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
|
||||
curl -v https://pipelines.actions.githubusercontent/_apis/health
|
||||
```
|
||||
|
||||
- For GitHub Enterprise Server
|
||||
- The runner needs to access https://myGHES.com/api/v3 for downloading actions.
|
||||
- The runner needs to access https://myGHES.com/_services/vstoken/_apis/.../ for requesting an access token.
|
||||
- The runner needs to access https://myGHES.com/_services/pipelines/_apis/.../ for receiving workflow jobs.
|
||||
- The runner needs to access `https://[hostname]/api/v3` for downloading actions.
|
||||
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token.
|
||||
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs.
|
||||
|
||||
These can by tested by running the following `curl` commands from your self-hosted runner machine, replacing `[hostname]` with the hostname of your appliance, for instance `github.example.com`:
|
||||
|
||||
```
|
||||
curl -v https://[hostname]/api/v3/zen
|
||||
curl -v https://[hostname]/_services/vstoken/_apis/health
|
||||
curl -v https://[hostname]/_services/pipelines/_apis/health
|
||||
```
|
||||
|
||||
A common cause of this these connectivity issues is if your to GitHub Enterprise Server appliance is using [the self-signed certificate that is enabled the first time](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls) your appliance is started. As self-signed certificates are not trusted by web browsers and Git clients, these clients (including the GitHub Actions runner) will report certificate warnings.
|
||||
|
||||
We recommend [upload a certificate signed by a trusted authority](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls) to GitHub Enterprise Server, or enabling the built-in ][Let's Encrypt support](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls).
|
||||
|
||||
|
||||
## What is checked?
|
||||
|
||||
@@ -42,4 +64,4 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
||||
|
||||
## Still not working?
|
||||
|
||||
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
Contact [GitHub Support](https://support.github.com] if you have further questuons, or log an issue at https://github.com/actions/runner if you think it's a runner issue.
|
||||
|
||||
@@ -4,9 +4,9 @@
|
||||
|
||||
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
|
||||
|
||||
The runner carries it's own copy of node.js executable under `<runner_root>/externals/node12/`.
|
||||
The runner carries its own copy of node.js executable under `<runner_root>/externals/node16/`.
|
||||
|
||||
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node12/`.
|
||||
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node16/`.
|
||||
|
||||
> Not the `node` from `$PATH`
|
||||
|
||||
|
||||
@@ -23,6 +23,10 @@ An ADR is an Architectural Decision Record. This allows consensus on the direct
|
||||
|
||||
  Git for Windows and Linux [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
|
||||
|
||||
 cURL [Install here](https://curl.se/download.html) (needed for external sh script)
|
||||
|
||||
 Visual Studio 2017 or newer [Install here](https://visualstudio.microsoft.com) (needed for dev sh script)
|
||||
|
||||
## Quickstart: Run a job from a real repository
|
||||
|
||||
If you just want to get from building the sourcecode to using it to execute an action, you will need:
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 158 KiB After Width: | Height: | Size: 138 KiB |
@@ -23,8 +23,8 @@ You might see something like this which indicate a dependency's missing.
|
||||
./config.sh
|
||||
libunwind.so.8 => not found
|
||||
libunwind-x86_64.so.8 => not found
|
||||
Dependencies is missing for Dotnet Core 3.0
|
||||
Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies.
|
||||
Dependencies is missing for Dotnet Core 6.0
|
||||
Execute ./bin/installdependencies.sh to install any missing Dotnet Core 6.0 dependencies.
|
||||
```
|
||||
You can easily correct the problem by executing `./bin/installdependencies.sh`.
|
||||
The `installdependencies.sh` script should install all required dependencies on all supported Linux versions
|
||||
|
||||
@@ -1,19 +1,12 @@
|
||||
## Features
|
||||
|
||||
- Expose GITHUB_REF_* as environment variable (#1314)
|
||||
- Add arch to runner context (#1372)
|
||||
- Support Conditional Steps in Composite Actions (#1438)
|
||||
- Log current runner version in terminal (#1441)
|
||||
|
||||
## Bugs
|
||||
|
||||
- Makes the user keychains available to the service (#847)
|
||||
- Use Actions Service health and api.github.com endpoints after connection failure on Actions Server and Hosted (#1385)
|
||||
- Fix an issue where nested local composite actions did not correctly register post steps (#1433)
|
||||
- Fixed a crash on runner startup (#1770)
|
||||
|
||||
## Misc
|
||||
|
||||
- Cleanup Older versions on MacOS now that we recreate node versions as needed (#1410)
|
||||
- Clarified the type of step running when running job started or completed hooks (#1769)
|
||||
|
||||
|
||||
## Windows x64
|
||||
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
|
||||
@@ -85,3 +78,21 @@ The SHA-256 checksums for the packages included in this build are shown below:
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-x64_noexternals --><WIN_X64_SHA_NOEXTERNALS><!-- END SHA win-x64_noexternals -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noexternals --><OSX_X64_SHA_NOEXTERNALS><!-- END SHA osx-x64_noexternals -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noexternals --><LINUX_X64_SHA_NOEXTERNALS><!-- END SHA linux-x64_noexternals -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noexternals --><LINUX_ARM64_SHA_NOEXTERNALS><!-- END SHA linux-arm64_noexternals -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noexternals --><LINUX_ARM_SHA_NOEXTERNALS><!-- END SHA linux-arm_noexternals -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-x64_noruntime --><WIN_X64_SHA_NORUNTIME><!-- END SHA win-x64_noruntime -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-x64_noruntime --><OSX_X64_SHA_NORUNTIME><!-- END SHA osx-x64_noruntime -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-x64_noruntime --><LINUX_X64_SHA_NORUNTIME><!-- END SHA linux-x64_noruntime -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm64_noruntime --><LINUX_ARM64_SHA_NORUNTIME><!-- END SHA linux-arm64_noruntime -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm_noruntime --><LINUX_ARM_SHA_NORUNTIME><!-- END SHA linux-arm_noruntime -->
|
||||
|
||||
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-x64_noruntime_noexternals --><WIN_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-x64_noruntime_noexternals -->
|
||||
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noruntime_noexternals --><OSX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-x64_noruntime_noexternals -->
|
||||
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noruntime_noexternals --><LINUX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-x64_noruntime_noexternals -->
|
||||
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noruntime_noexternals --><LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm64_noruntime_noexternals -->
|
||||
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noruntime_noexternals --><LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm_noruntime_noexternals -->
|
||||
|
||||
@@ -13,7 +13,7 @@ set -e
|
||||
|
||||
flags_found=false
|
||||
|
||||
while getopts 's:g:n:u:l:' opt; do
|
||||
while getopts 's:g:n:r:u:l:' opt; do
|
||||
flags_found=true
|
||||
|
||||
case $opt in
|
||||
@@ -26,6 +26,9 @@ while getopts 's:g:n:u:l:' opt; do
|
||||
n)
|
||||
runner_name=$OPTARG
|
||||
;;
|
||||
r)
|
||||
runner_group=$OPTARG
|
||||
;;
|
||||
u)
|
||||
svc_user=$OPTARG
|
||||
;;
|
||||
@@ -44,6 +47,7 @@ Usage:
|
||||
-s required scope: repo (:owner/:repo) or org (:organization)
|
||||
-g optional ghe_hostname: the fully qualified domain name of your GitHub Enterprise Server deployment
|
||||
-n optional name of the runner, defaults to hostname
|
||||
-r optional name of the runner group to add the runner to, defaults to the Default group
|
||||
-u optional user svc will run as, defaults to current
|
||||
-l optional list of labels (split by comma) applied on the runner"
|
||||
exit 0
|
||||
@@ -59,6 +63,7 @@ if ! "$flags_found"; then
|
||||
runner_name=${3:-$(hostname)}
|
||||
svc_user=${4:-$USER}
|
||||
labels=${5}
|
||||
runner_group=${6}
|
||||
fi
|
||||
|
||||
# apply defaults
|
||||
@@ -164,8 +169,8 @@ fi
|
||||
|
||||
echo
|
||||
echo "Configuring ${runner_name} @ $runner_url"
|
||||
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name --labels $labels"
|
||||
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name --labels $labels
|
||||
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup \"$runner_group\"}"
|
||||
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup "$runner_group"}
|
||||
|
||||
#---------------------------------------
|
||||
# Configuring as a service
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
<DefineConstants>$(DefineConstants);X64</DefineConstants>
|
||||
</PropertyGroup>
|
||||
|
||||
<PropertyGroup Condition="'$(BUILD_OS)' == 'Linux' AND ('$(PackageRuntime)' == 'linux-x64' OR '$(PackageRuntime)' == 'linux-musl-x64' OR '$(PackageRuntime)' == '')">
|
||||
<PropertyGroup Condition="'$(BUILD_OS)' == 'Linux' AND ('$(PackageRuntime)' == 'linux-x64' OR '$(PackageRuntime)' == '')">
|
||||
<DefineConstants>$(DefineConstants);X64</DefineConstants>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(BUILD_OS)' == 'Linux' AND '$(PackageRuntime)' == 'linux-arm'">
|
||||
|
||||
1
src/Misc/contentHash/dotnetRuntime/linux-arm
Normal file
1
src/Misc/contentHash/dotnetRuntime/linux-arm
Normal file
@@ -0,0 +1 @@
|
||||
de62d296708908cfd1236e58869aebbc2bae8a8c3d629276968542626c508e37
|
||||
1
src/Misc/contentHash/dotnetRuntime/linux-arm64
Normal file
1
src/Misc/contentHash/dotnetRuntime/linux-arm64
Normal file
@@ -0,0 +1 @@
|
||||
44fcd0422dd98ed17d2c8e9057ff2260c50165f20674236a4ae7d2645a07df25
|
||||
1
src/Misc/contentHash/dotnetRuntime/linux-x64
Normal file
1
src/Misc/contentHash/dotnetRuntime/linux-x64
Normal file
@@ -0,0 +1 @@
|
||||
e57652cf322ee16ce3af4f9e58f80858746b9e1e60279e991a3b3d9a6baf8d79
|
||||
1
src/Misc/contentHash/dotnetRuntime/osx-x64
Normal file
1
src/Misc/contentHash/dotnetRuntime/osx-x64
Normal file
@@ -0,0 +1 @@
|
||||
bdd247b2ff3f51095524412e2ac588e7a87af805e114d6caf2368366ee7be1ea
|
||||
1
src/Misc/contentHash/dotnetRuntime/win-x64
Normal file
1
src/Misc/contentHash/dotnetRuntime/win-x64
Normal file
@@ -0,0 +1 @@
|
||||
d23a0cb9f20c0aa1cddb7a39567cd097020cdeb06a1e952940601d1a405c53b8
|
||||
1
src/Misc/contentHash/externals/linux-arm
vendored
Normal file
1
src/Misc/contentHash/externals/linux-arm
vendored
Normal file
@@ -0,0 +1 @@
|
||||
6ed30a2c1ee403a610d63e82bb230b9ba846a9c25cec9e4ea8672fb6ed4e1a51
|
||||
1
src/Misc/contentHash/externals/linux-arm64
vendored
Normal file
1
src/Misc/contentHash/externals/linux-arm64
vendored
Normal file
@@ -0,0 +1 @@
|
||||
711c30c51ec52c9b7a9a2eb399d6ab2ab5ee1dc72de11879f2f36f919f163d78
|
||||
1
src/Misc/contentHash/externals/linux-x64
vendored
Normal file
1
src/Misc/contentHash/externals/linux-x64
vendored
Normal file
@@ -0,0 +1 @@
|
||||
a49479ca4b4988a06c097e8d22c51fd08a11c13f40807366236213d0e008cf6a
|
||||
1
src/Misc/contentHash/externals/osx-x64
vendored
Normal file
1
src/Misc/contentHash/externals/osx-x64
vendored
Normal file
@@ -0,0 +1 @@
|
||||
8e97df75230b843462a9b4c578ccec604ee4b4a1066120c85b04374317fa372b
|
||||
1
src/Misc/contentHash/externals/win-x64
vendored
Normal file
1
src/Misc/contentHash/externals/win-x64
vendored
Normal file
@@ -0,0 +1 @@
|
||||
f75a671e5a188c76680739689aa75331a2c09d483dce9c80023518c48fd67a18
|
||||
723
src/Misc/dotnet-install.ps1
vendored
723
src/Misc/dotnet-install.ps1
vendored
@@ -16,27 +16,15 @@
|
||||
- LTS - most current supported release
|
||||
- 2-part version in a format A.B - represents a specific release
|
||||
examples: 2.0, 1.0
|
||||
- 3-part version in a format A.B.Cxx - represents a specific SDK release
|
||||
examples: 5.0.1xx, 5.0.2xx
|
||||
Supported since 5.0 release
|
||||
Note: The version parameter overrides the channel parameter when any version other than 'latest' is used.
|
||||
.PARAMETER Quality
|
||||
Download the latest build of specified quality in the channel. The possible values are: daily, signed, validated, preview, GA.
|
||||
Works only in combination with channel. Not applicable for current and LTS channels and will be ignored if those channels are used.
|
||||
For SDK use channel in A.B.Cxx format: using quality together with channel in A.B format is not supported.
|
||||
Supported since 5.0 release.
|
||||
Note: The version parameter overrides the channel parameter when any version other than 'latest' is used, and therefore overrides the quality.
|
||||
- Branch name
|
||||
examples: release/2.0.0, Master
|
||||
Note: The version parameter overrides the channel parameter.
|
||||
.PARAMETER Version
|
||||
Default: latest
|
||||
Represents a build version on specific channel. Possible values:
|
||||
- latest - most latest build on specific channel
|
||||
- 3-part version in a format A.B.C - represents specific version of build
|
||||
examples: 2.0.0-preview2-006120, 1.1.0
|
||||
.PARAMETER Internal
|
||||
Download internal builds. Requires providing credentials via -FeedCredential parameter.
|
||||
.PARAMETER FeedCredential
|
||||
Token to access Azure feed. Used as a query string to append to the Azure feed.
|
||||
This parameter typically is not specified.
|
||||
.PARAMETER InstallDir
|
||||
Default: %LocalAppData%\Microsoft\dotnet
|
||||
Path to where to install dotnet. Note that binaries will be placed directly in a given directory.
|
||||
@@ -71,6 +59,9 @@
|
||||
.PARAMETER UncachedFeed
|
||||
This parameter typically is not changed by the user.
|
||||
It allows changing the URL for the Uncached feed used by this installer.
|
||||
.PARAMETER FeedCredential
|
||||
Used as a query string to append to the Azure feed.
|
||||
It allows changing the URL to use non-public blob storage accounts.
|
||||
.PARAMETER ProxyAddress
|
||||
If set, the installer will use the proxy when making web requests
|
||||
.PARAMETER ProxyUseDefaultCredentials
|
||||
@@ -86,19 +77,15 @@
|
||||
.PARAMETER JSonFile
|
||||
Determines the SDK version from a user specified global.json file
|
||||
Note: global.json must have a value for 'SDK:Version'
|
||||
.PARAMETER DownloadTimeout
|
||||
Determines timeout duration in seconds for dowloading of the SDK file
|
||||
Default: 1200 seconds (20 minutes)
|
||||
#>
|
||||
[cmdletbinding()]
|
||||
param(
|
||||
[string]$Channel="LTS",
|
||||
[string]$Quality,
|
||||
[string]$Version="Latest",
|
||||
[switch]$Internal,
|
||||
[string]$JSonFile,
|
||||
[Alias('i')][string]$InstallDir="<auto>",
|
||||
[string]$InstallDir="<auto>",
|
||||
[string]$Architecture="<auto>",
|
||||
[ValidateSet("dotnet", "aspnetcore", "windowsdesktop", IgnoreCase = $false)]
|
||||
[string]$Runtime,
|
||||
[Obsolete("This parameter may be removed in a future version of this script. The recommended alternative is '-Runtime dotnet'.")]
|
||||
[switch]$SharedRuntime,
|
||||
@@ -111,8 +98,7 @@ param(
|
||||
[switch]$ProxyUseDefaultCredentials,
|
||||
[string[]]$ProxyBypassList=@(),
|
||||
[switch]$SkipNonVersionedFiles,
|
||||
[switch]$NoCdn,
|
||||
[int]$DownloadTimeout=1200
|
||||
[switch]$NoCdn
|
||||
)
|
||||
|
||||
Set-StrictMode -Version Latest
|
||||
@@ -181,7 +167,7 @@ function Say-Invocation($Invocation) {
|
||||
Say-Verbose "$command $args"
|
||||
}
|
||||
|
||||
function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [System.Threading.CancellationToken]$cancellationToken = [System.Threading.CancellationToken]::None, [int]$MaxAttempts = 3, [int]$SecondsBetweenAttempts = 1) {
|
||||
function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [int]$SecondsBetweenAttempts = 1) {
|
||||
$Attempts = 0
|
||||
|
||||
while ($true) {
|
||||
@@ -190,7 +176,7 @@ function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [System.Threading.Cancella
|
||||
}
|
||||
catch {
|
||||
$Attempts++
|
||||
if (($Attempts -lt $MaxAttempts) -and -not $cancellationToken.IsCancellationRequested) {
|
||||
if ($Attempts -lt $MaxAttempts) {
|
||||
Start-Sleep $SecondsBetweenAttempts
|
||||
}
|
||||
else {
|
||||
@@ -219,7 +205,7 @@ function Get-Machine-Architecture() {
|
||||
function Get-CLIArchitecture-From-Architecture([string]$Architecture) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
switch ($Architecture.ToLowerInvariant()) {
|
||||
switch ($Architecture.ToLower()) {
|
||||
{ $_ -eq "<auto>" } { return Get-CLIArchitecture-From-Architecture $(Get-Machine-Architecture) }
|
||||
{ ($_ -eq "amd64") -or ($_ -eq "x64") } { return "x64" }
|
||||
{ $_ -eq "x86" } { return "x86" }
|
||||
@@ -229,53 +215,6 @@ function Get-CLIArchitecture-From-Architecture([string]$Architecture) {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function Get-NormalizedQuality([string]$Quality) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if ([string]::IsNullOrEmpty($Quality)) {
|
||||
return ""
|
||||
}
|
||||
|
||||
switch ($Quality) {
|
||||
{ @("daily", "signed", "validated", "preview") -contains $_ } { return $Quality.ToLowerInvariant() }
|
||||
#ga quality is available without specifying quality, so normalizing it to empty
|
||||
{ $_ -eq "ga" } { return "" }
|
||||
default { throw "'$Quality' is not a supported value for -Quality option. Supported values are: daily, signed, validated, preview, ga. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues." }
|
||||
}
|
||||
}
|
||||
|
||||
function Get-NormalizedChannel([string]$Channel) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if ([string]::IsNullOrEmpty($Channel)) {
|
||||
return ""
|
||||
}
|
||||
|
||||
if ($Channel.StartsWith('release/')) {
|
||||
Say-Warning 'Using branch name with -Channel option is no longer supported with newer releases. Use -Quality option with a channel in X.Y format instead, such as "-Channel 5.0 -Quality Daily."'
|
||||
}
|
||||
|
||||
switch ($Channel) {
|
||||
{ $_ -eq "lts" } { return "LTS" }
|
||||
{ $_ -eq "current" } { return "current" }
|
||||
default { return $Channel.ToLowerInvariant() }
|
||||
}
|
||||
}
|
||||
|
||||
function Get-NormalizedProduct([string]$Runtime) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
switch ($Runtime) {
|
||||
{ $_ -eq "dotnet" } { return "dotnet-runtime" }
|
||||
{ $_ -eq "aspnetcore" } { return "aspnetcore-runtime" }
|
||||
{ $_ -eq "windowsdesktop" } { return "windowsdesktop-runtime" }
|
||||
{ [string]::IsNullOrEmpty($_) } { return "dotnet-sdk" }
|
||||
default { throw "'$Runtime' is not a supported value for -Runtime option, supported values are: dotnet, aspnetcore, windowsdesktop. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues." }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# The version text returned from the feeds is a 1-line or 2-line string:
|
||||
# For the SDK and the dotnet runtime (2 lines):
|
||||
# Line 1: # commit_hash
|
||||
@@ -304,11 +243,10 @@ function Load-Assembly([string] $Assembly) {
|
||||
}
|
||||
}
|
||||
|
||||
function GetHTTPResponse([Uri] $Uri, [bool]$HeaderOnly, [bool]$DisableRedirect, [bool]$DisableFeedCredential)
|
||||
function GetHTTPResponse([Uri] $Uri)
|
||||
{
|
||||
$cts = New-Object System.Threading.CancellationTokenSource
|
||||
|
||||
$downloadScript = {
|
||||
Invoke-With-Retry(
|
||||
{
|
||||
|
||||
$HttpClient = $null
|
||||
|
||||
@@ -332,53 +270,32 @@ function GetHTTPResponse([Uri] $Uri, [bool]$HeaderOnly, [bool]$DisableRedirect,
|
||||
}
|
||||
}
|
||||
|
||||
$HttpClientHandler = New-Object System.Net.Http.HttpClientHandler
|
||||
if($ProxyAddress) {
|
||||
$HttpClientHandler = New-Object System.Net.Http.HttpClientHandler
|
||||
$HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{
|
||||
Address=$ProxyAddress;
|
||||
UseDefaultCredentials=$ProxyUseDefaultCredentials;
|
||||
BypassList = $ProxyBypassList;
|
||||
}
|
||||
}
|
||||
if ($DisableRedirect)
|
||||
{
|
||||
$HttpClientHandler.AllowAutoRedirect = $false
|
||||
$HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler
|
||||
}
|
||||
$HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler
|
||||
else {
|
||||
|
||||
$HttpClient = New-Object System.Net.Http.HttpClient
|
||||
}
|
||||
# Default timeout for HttpClient is 100s. For a 50 MB download this assumes 500 KB/s average, any less will time out
|
||||
# Defaulting to 20 minutes allows it to work over much slower connections.
|
||||
$HttpClient.Timeout = New-TimeSpan -Seconds $DownloadTimeout
|
||||
|
||||
if ($HeaderOnly){
|
||||
$completionOption = [System.Net.Http.HttpCompletionOption]::ResponseHeadersRead
|
||||
}
|
||||
else {
|
||||
$completionOption = [System.Net.Http.HttpCompletionOption]::ResponseContentRead
|
||||
}
|
||||
|
||||
if ($DisableFeedCredential) {
|
||||
$UriWithCredential = $Uri
|
||||
}
|
||||
else {
|
||||
$UriWithCredential = "${Uri}${FeedCredential}"
|
||||
}
|
||||
|
||||
$Task = $HttpClient.GetAsync("$UriWithCredential", $completionOption).ConfigureAwait("false");
|
||||
# 20 minutes allows it to work over much slower connections.
|
||||
$HttpClient.Timeout = New-TimeSpan -Minutes 20
|
||||
$Task = $HttpClient.GetAsync("${Uri}${FeedCredential}").ConfigureAwait("false");
|
||||
$Response = $Task.GetAwaiter().GetResult();
|
||||
|
||||
if (($null -eq $Response) -or ((-not $HeaderOnly) -and (-not ($Response.IsSuccessStatusCode)))) {
|
||||
if (($null -eq $Response) -or (-not ($Response.IsSuccessStatusCode))) {
|
||||
# The feed credential is potentially sensitive info. Do not log FeedCredential to console output.
|
||||
$DownloadException = [System.Exception] "Unable to download $Uri."
|
||||
|
||||
if ($null -ne $Response) {
|
||||
$DownloadException.Data["StatusCode"] = [int] $Response.StatusCode
|
||||
$DownloadException.Data["ErrorMessage"] = "Unable to download $Uri. Returned HTTP status code: " + $DownloadException.Data["StatusCode"]
|
||||
|
||||
if (404 -eq [int] $Response.StatusCode)
|
||||
{
|
||||
$cts.Cancel()
|
||||
}
|
||||
}
|
||||
|
||||
throw $DownloadException
|
||||
@@ -406,22 +323,11 @@ function GetHTTPResponse([Uri] $Uri, [bool]$HeaderOnly, [bool]$DisableRedirect,
|
||||
throw $DownloadException
|
||||
}
|
||||
finally {
|
||||
if ($null -ne $HttpClient) {
|
||||
if ($HttpClient -ne $null) {
|
||||
$HttpClient.Dispose()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
return Invoke-With-Retry $downloadScript $cts.Token
|
||||
}
|
||||
finally
|
||||
{
|
||||
if ($null -ne $cts)
|
||||
{
|
||||
$cts.Dispose()
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel) {
|
||||
@@ -443,9 +349,6 @@ function Get-Latest-Version-Info([string]$AzureFeed, [string]$Channel) {
|
||||
else {
|
||||
throw "Invalid value for `$Runtime"
|
||||
}
|
||||
|
||||
Say-Verbose "Constructed latest.version URL: $VersionFileUrl"
|
||||
|
||||
try {
|
||||
$Response = GetHTTPResponse -Uri $VersionFileUrl
|
||||
}
|
||||
@@ -508,7 +411,7 @@ function Get-Specific-Version-From-Version([string]$AzureFeed, [string]$Channel,
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
if (-not $JSonFile) {
|
||||
if ($Version.ToLowerInvariant() -eq "latest") {
|
||||
if ($Version.ToLower() -eq "latest") {
|
||||
$LatestVersionInfo = Get-Latest-Version-Info -AzureFeed $AzureFeed -Channel $Channel
|
||||
return $LatestVersionInfo.Version
|
||||
}
|
||||
@@ -575,116 +478,58 @@ function Get-LegacyDownload-Link([string]$AzureFeed, [string]$SpecificVersion, [
|
||||
return $PayloadURL
|
||||
}
|
||||
|
||||
function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion, [string]$PackageDownloadLink) {
|
||||
function Get-Product-Version([string]$AzureFeed, [string]$SpecificVersion) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
# Try to get the version number, using the productVersion.txt file located next to the installer file.
|
||||
$ProductVersionTxtURLs = (Get-Product-Version-Url $AzureFeed $SpecificVersion $PackageDownloadLink -Flattened $true),
|
||||
(Get-Product-Version-Url $AzureFeed $SpecificVersion $PackageDownloadLink -Flattened $false)
|
||||
|
||||
Foreach ($ProductVersionTxtURL in $ProductVersionTxtURLs) {
|
||||
Say-Verbose "Checking for the existence of $ProductVersionTxtURL"
|
||||
|
||||
try {
|
||||
$productVersionResponse = GetHTTPResponse($productVersionTxtUrl)
|
||||
|
||||
if ($productVersionResponse.StatusCode -eq 200) {
|
||||
$productVersion = $productVersionResponse.Content.ReadAsStringAsync().Result.Trim()
|
||||
if ($productVersion -ne $SpecificVersion)
|
||||
{
|
||||
Say "Using alternate version $productVersion found in $ProductVersionTxtURL"
|
||||
}
|
||||
return $productVersion
|
||||
}
|
||||
else {
|
||||
Say-Verbose "Got StatusCode $($productVersionResponse.StatusCode) when trying to get productVersion.txt at $productVersionTxtUrl."
|
||||
}
|
||||
}
|
||||
catch {
|
||||
Say-Verbose "Could not read productVersion.txt at $productVersionTxtUrl (Exception: '$($_.Exception.Message)'. )"
|
||||
}
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
|
||||
# Getting the version number with productVersion.txt has failed. Try parsing the download link for a version number.
|
||||
if ([string]::IsNullOrEmpty($PackageDownloadLink))
|
||||
{
|
||||
Say-Verbose "Using the default value '$SpecificVersion' as the product version."
|
||||
return $SpecificVersion
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
|
||||
$productVersion = Get-ProductVersionFromDownloadLink $PackageDownloadLink $SpecificVersion
|
||||
return $productVersion
|
||||
}
|
||||
|
||||
function Get-Product-Version-Url([string]$AzureFeed, [string]$SpecificVersion, [string]$PackageDownloadLink, [bool]$Flattened) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
$majorVersion=$null
|
||||
if ($SpecificVersion -match '^(\d+)\.(.*)') {
|
||||
$majorVersion = $Matches[1] -as[int]
|
||||
}
|
||||
|
||||
$pvFileName='productVersion.txt'
|
||||
if($Flattened) {
|
||||
if(-not $Runtime) {
|
||||
$pvFileName='sdk-productVersion.txt'
|
||||
}
|
||||
elseif($Runtime -eq "dotnet") {
|
||||
$pvFileName='runtime-productVersion.txt'
|
||||
}
|
||||
else {
|
||||
$pvFileName="$Runtime-productVersion.txt"
|
||||
}
|
||||
}
|
||||
|
||||
if ([string]::IsNullOrEmpty($PackageDownloadLink)) {
|
||||
if ($Runtime -eq "dotnet") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/$pvFileName"
|
||||
}
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$ProductVersionTxtURL = "$AzureFeed/aspnetcore/Runtime/$SpecificVersion/$pvFileName"
|
||||
}
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
# The windows desktop runtime is part of the core runtime layout prior to 5.0
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/$pvFileName"
|
||||
if ($majorVersion -ne $null -and $majorVersion -ge 5) {
|
||||
$ProductVersionTxtURL = "$AzureFeed/WindowsDesktop/$SpecificVersion/$pvFileName"
|
||||
elseif ($Runtime -eq "windowsdesktop") {
|
||||
# The windows desktop runtime is part of the core runtime layout prior to 5.0
|
||||
$ProductVersionTxtURL = "$AzureFeed/Runtime/$SpecificVersion/productVersion.txt"
|
||||
if ($SpecificVersion -match '^(\d+)\.(.*)')
|
||||
{
|
||||
$majorVersion = [int]$Matches[1]
|
||||
if ($majorVersion -ge 5)
|
||||
{
|
||||
$ProductVersionTxtURL = "$AzureFeed/WindowsDesktop/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/$pvFileName"
|
||||
}
|
||||
else {
|
||||
throw "Invalid value '$Runtime' specified for `$Runtime"
|
||||
}
|
||||
}
|
||||
elseif (-not $Runtime) {
|
||||
$ProductVersionTxtURL = "$AzureFeed/Sdk/$SpecificVersion/productVersion.txt"
|
||||
}
|
||||
else {
|
||||
$ProductVersionTxtURL = $PackageDownloadLink.Substring(0, $PackageDownloadLink.LastIndexOf("/")) + "/$pvFileName"
|
||||
throw "Invalid value '$Runtime' specified for `$Runtime"
|
||||
}
|
||||
|
||||
Say-Verbose "Constructed productVersion link: $ProductVersionTxtURL"
|
||||
Say-Verbose "Checking for existence of $ProductVersionTxtURL"
|
||||
|
||||
return $ProductVersionTxtURL
|
||||
}
|
||||
try {
|
||||
$productVersionResponse = GetHTTPResponse($productVersionTxtUrl)
|
||||
|
||||
function Get-ProductVersionFromDownloadLink([string]$PackageDownloadLink, [string]$SpecificVersion)
|
||||
{
|
||||
Say-Invocation $MyInvocation
|
||||
if ($productVersionResponse.StatusCode -eq 200) {
|
||||
$productVersion = $productVersionResponse.Content.ReadAsStringAsync().Result.Trim()
|
||||
if ($productVersion -ne $SpecificVersion)
|
||||
{
|
||||
Say "Using alternate version $productVersion found in $ProductVersionTxtURL"
|
||||
}
|
||||
|
||||
#product specific version follows the product name
|
||||
#for filename 'dotnet-sdk-3.1.404-win-x64.zip': the product version is 3.1.400
|
||||
$filename = $PackageDownloadLink.Substring($PackageDownloadLink.LastIndexOf("/") + 1)
|
||||
$filenameParts = $filename.Split('-')
|
||||
if ($filenameParts.Length -gt 2)
|
||||
{
|
||||
$productVersion = $filenameParts[2]
|
||||
Say-Verbose "Extracted product version '$productVersion' from download link '$PackageDownloadLink'."
|
||||
}
|
||||
else {
|
||||
Say-Verbose "Using the default value '$SpecificVersion' as the product version."
|
||||
return $productVersion
|
||||
}
|
||||
else {
|
||||
Say-Verbose "Got StatusCode $($productVersionResponse.StatusCode) trying to get productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion"
|
||||
$productVersion = $SpecificVersion
|
||||
}
|
||||
} catch {
|
||||
Say-Verbose "Could not read productVersion.txt at $productVersionTxtUrl, so using default value of $SpecificVersion (Exception: '$($_.Exception.Message)' )"
|
||||
$productVersion = $SpecificVersion
|
||||
}
|
||||
return $productVersion
|
||||
|
||||
return $productVersion
|
||||
}
|
||||
|
||||
function Get-User-Share-Path() {
|
||||
@@ -857,150 +702,15 @@ function Prepend-Sdk-InstallRoot-To-Path([string]$InstallRoot, [string]$BinFolde
|
||||
}
|
||||
}
|
||||
|
||||
function Get-AkaMSDownloadLink([string]$Channel, [string]$Quality, [bool]$Internal, [string]$Product, [string]$Architecture) {
|
||||
Say-Invocation $MyInvocation
|
||||
|
||||
#quality is not supported for LTS or current channel
|
||||
if (![string]::IsNullOrEmpty($Quality) -and (@("LTS", "current") -contains $Channel)) {
|
||||
$Quality = ""
|
||||
Say-Warning "Specifying quality for current or LTS channel is not supported, the quality will be ignored."
|
||||
}
|
||||
Say-Verbose "Retrieving primary payload URL from aka.ms link for channel: '$Channel', quality: '$Quality' product: '$Product', os: 'win', architecture: '$Architecture'."
|
||||
|
||||
#construct aka.ms link
|
||||
$akaMsLink = "https://aka.ms/dotnet"
|
||||
if ($Internal) {
|
||||
$akaMsLink += "/internal"
|
||||
}
|
||||
$akaMsLink += "/$Channel"
|
||||
if (-not [string]::IsNullOrEmpty($Quality)) {
|
||||
$akaMsLink +="/$Quality"
|
||||
}
|
||||
$akaMsLink +="/$Product-win-$Architecture.zip"
|
||||
Say-Verbose "Constructed aka.ms link: '$akaMsLink'."
|
||||
$akaMsDownloadLink=$null
|
||||
|
||||
for ($maxRedirections = 9; $maxRedirections -ge 0; $maxRedirections--)
|
||||
{
|
||||
#get HTTP response
|
||||
#do not pass credentials as a part of the $akaMsLink and do not apply credentials in the GetHTTPResponse function
|
||||
#otherwise the redirect link would have credentials as well
|
||||
#it would result in applying credentials twice to the resulting link and thus breaking it, and in echoing credentials to the output as a part of redirect link
|
||||
$Response= GetHTTPResponse -Uri $akaMsLink -HeaderOnly $true -DisableRedirect $true -DisableFeedCredential $true
|
||||
Say-Verbose "Received response:`n$Response"
|
||||
|
||||
if ([string]::IsNullOrEmpty($Response)) {
|
||||
Say-Verbose "The link '$akaMsLink' is not valid: failed to get redirect location. The resource is not available."
|
||||
return $null
|
||||
}
|
||||
|
||||
#if HTTP code is 301 (Moved Permanently), the redirect link exists
|
||||
if ($Response.StatusCode -eq 301)
|
||||
{
|
||||
try {
|
||||
$akaMsDownloadLink = $Response.Headers.GetValues("Location")[0]
|
||||
|
||||
if ([string]::IsNullOrEmpty($akaMsDownloadLink)) {
|
||||
Say-Verbose "The link '$akaMsLink' is not valid: server returned 301 (Moved Permanently), but the headers do not contain the redirect location."
|
||||
return $null
|
||||
}
|
||||
|
||||
Say-Verbose "The redirect location retrieved: '$akaMsDownloadLink'."
|
||||
# This may yet be a link to another redirection. Attempt to retrieve the page again.
|
||||
$akaMsLink = $akaMsDownloadLink
|
||||
continue
|
||||
}
|
||||
catch {
|
||||
Say-Verbose "The link '$akaMsLink' is not valid: failed to get redirect location."
|
||||
return $null
|
||||
}
|
||||
}
|
||||
elseif ((($Response.StatusCode -lt 300) -or ($Response.StatusCode -ge 400)) -and (-not [string]::IsNullOrEmpty($akaMsDownloadLink)))
|
||||
{
|
||||
# Redirections have ended.
|
||||
return $akaMsDownloadLink
|
||||
}
|
||||
|
||||
Say-Verbose "The link '$akaMsLink' is not valid: failed to retrieve the redirection location."
|
||||
return $null
|
||||
}
|
||||
|
||||
Say-Verbose "Aka.ms links have redirected more than the maximum allowed redirections. This may be caused by a cyclic redirection of aka.ms links."
|
||||
return $null
|
||||
|
||||
}
|
||||
|
||||
Say "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:"
|
||||
Say "- The SDK needs to be installed without user interaction and without admin rights."
|
||||
Say "- The SDK installation doesn't need to persist across multiple CI runs."
|
||||
Say "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.`r`n"
|
||||
|
||||
if ($Internal -and [string]::IsNullOrWhitespace($FeedCredential)) {
|
||||
$message = "Provide credentials via -FeedCredential parameter."
|
||||
if ($DryRun) {
|
||||
Say-Warning "$message"
|
||||
} else {
|
||||
throw "$message"
|
||||
}
|
||||
}
|
||||
|
||||
#FeedCredential should start with "?", for it to be added to the end of the link.
|
||||
#adding "?" at the beginning of the FeedCredential if needed.
|
||||
if ((![string]::IsNullOrWhitespace($FeedCredential)) -and ($FeedCredential[0] -ne '?')) {
|
||||
$FeedCredential = "?" + $FeedCredential
|
||||
}
|
||||
|
||||
$CLIArchitecture = Get-CLIArchitecture-From-Architecture $Architecture
|
||||
$NormalizedQuality = Get-NormalizedQuality $Quality
|
||||
Say-Verbose "Normalized quality: '$NormalizedQuality'"
|
||||
$NormalizedChannel = Get-NormalizedChannel $Channel
|
||||
Say-Verbose "Normalized channel: '$NormalizedChannel'"
|
||||
$NormalizedProduct = Get-NormalizedProduct $Runtime
|
||||
Say-Verbose "Normalized product: '$NormalizedProduct'"
|
||||
$DownloadLink = $null
|
||||
|
||||
#try to get download location from aka.ms link
|
||||
#not applicable when exact version is specified via command or json file
|
||||
if ([string]::IsNullOrEmpty($JSonFile) -and ($Version -eq "latest")) {
|
||||
$AkaMsDownloadLink = Get-AkaMSDownloadLink -Channel $NormalizedChannel -Quality $NormalizedQuality -Internal $Internal -Product $NormalizedProduct -Architecture $CLIArchitecture
|
||||
|
||||
if ([string]::IsNullOrEmpty($AkaMsDownloadLink)){
|
||||
if (-not [string]::IsNullOrEmpty($NormalizedQuality)) {
|
||||
# if quality is specified - exit with error - there is no fallback approach
|
||||
Say-Error "Failed to locate the latest version in the channel '$NormalizedChannel' with '$NormalizedQuality' quality for '$NormalizedProduct', os: 'win', architecture: '$CLIArchitecture'."
|
||||
Say-Error "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support."
|
||||
throw "aka.ms link resolution failure"
|
||||
}
|
||||
Say-Verbose "Falling back to latest.version file approach."
|
||||
}
|
||||
else {
|
||||
Say-Verbose "Retrieved primary named payload URL from aka.ms link: '$AkaMsDownloadLink'."
|
||||
$DownloadLink = $AkaMsDownloadLink
|
||||
Say-Verbose "Downloading using legacy url will not be attempted."
|
||||
$LegacyDownloadLink = $null
|
||||
|
||||
#get version from the path
|
||||
$pathParts = $DownloadLink.Split('/')
|
||||
if ($pathParts.Length -ge 2) {
|
||||
$SpecificVersion = $pathParts[$pathParts.Length - 2]
|
||||
Say-Verbose "Version: '$SpecificVersion'."
|
||||
}
|
||||
else {
|
||||
Say-Error "Failed to extract the version from download link '$DownloadLink'."
|
||||
}
|
||||
|
||||
#retrieve effective (product) version
|
||||
$EffectiveVersion = Get-Product-Version -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -PackageDownloadLink $DownloadLink
|
||||
Say-Verbose "Product version: '$EffectiveVersion'."
|
||||
}
|
||||
}
|
||||
|
||||
if ([string]::IsNullOrEmpty($DownloadLink)) {
|
||||
$SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $AzureFeed -Channel $Channel -Version $Version -JSonFile $JSonFile
|
||||
$DownloadLink, $EffectiveVersion = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
$LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
}
|
||||
|
||||
$SpecificVersion = Get-Specific-Version-From-Version -AzureFeed $AzureFeed -Channel $Channel -Version $Version -JSonFile $JSonFile
|
||||
$DownloadLink, $EffectiveVersion = Get-Download-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
$LegacyDownloadLink = Get-LegacyDownload-Link -AzureFeed $AzureFeed -SpecificVersion $SpecificVersion -CLIArchitecture $CLIArchitecture
|
||||
|
||||
$InstallRoot = Resolve-Installation-Path $InstallDir
|
||||
Say-Verbose "InstallRoot: $InstallRoot"
|
||||
@@ -1008,9 +718,9 @@ $ScriptName = $MyInvocation.MyCommand.Name
|
||||
|
||||
if ($DryRun) {
|
||||
Say "Payload URLs:"
|
||||
Say "Primary named payload URL: ${DownloadLink}"
|
||||
Say "Primary named payload URL: $DownloadLink"
|
||||
if ($LegacyDownloadLink) {
|
||||
Say "Legacy named payload URL: ${LegacyDownloadLink}"
|
||||
Say "Legacy named payload URL: $LegacyDownloadLink"
|
||||
}
|
||||
$RepeatableCommand = ".\$ScriptName -Version `"$SpecificVersion`" -InstallDir `"$InstallRoot`" -Architecture `"$CLIArchitecture`""
|
||||
if ($Runtime -eq "dotnet") {
|
||||
@@ -1019,20 +729,11 @@ if ($DryRun) {
|
||||
elseif ($Runtime -eq "aspnetcore") {
|
||||
$RepeatableCommand+=" -Runtime `"aspnetcore`""
|
||||
}
|
||||
|
||||
if (-not [string]::IsNullOrEmpty($NormalizedQuality))
|
||||
{
|
||||
$RepeatableCommand+=" -Quality `"$NormalizedQuality`""
|
||||
}
|
||||
|
||||
foreach ($key in $MyInvocation.BoundParameters.Keys) {
|
||||
if (-not (@("Architecture","Channel","DryRun","InstallDir","Runtime","SharedRuntime","Version","Quality","FeedCredential") -contains $key)) {
|
||||
if (-not (@("Architecture","Channel","DryRun","InstallDir","Runtime","SharedRuntime","Version") -contains $key)) {
|
||||
$RepeatableCommand+=" -$key `"$($MyInvocation.BoundParameters[$key])`""
|
||||
}
|
||||
}
|
||||
if ($MyInvocation.BoundParameters.Keys -contains "FeedCredential") {
|
||||
$RepeatableCommand+=" -FeedCredential `"<feedCredential>`""
|
||||
}
|
||||
Say "Repeatable invocation: $RepeatableCommand"
|
||||
if ($SpecificVersion -ne $EffectiveVersion)
|
||||
{
|
||||
@@ -1078,16 +779,9 @@ if ($isAssetInstalled) {
|
||||
|
||||
New-Item -ItemType Directory -Force -Path $InstallRoot | Out-Null
|
||||
|
||||
$installDrive = $((Get-Item $InstallRoot -Force).PSDrive.Name);
|
||||
$diskInfo = $null
|
||||
try{
|
||||
$diskInfo = Get-PSDrive -Name $installDrive
|
||||
}
|
||||
catch{
|
||||
Say-Warning "Failed to check the disk space. Installation will continue, but it may fail if you do not have enough disk space."
|
||||
}
|
||||
|
||||
if ( ($diskInfo -ne $null) -and ($diskInfo.Free / 1MB -le 100)) {
|
||||
$installDrive = $((Get-Item $InstallRoot).PSDrive.Name);
|
||||
$diskInfo = Get-PSDrive -Name $installDrive
|
||||
if ($diskInfo.Free / 1MB -le 100) {
|
||||
throw "There is not enough disk space on drive ${installDrive}:"
|
||||
}
|
||||
|
||||
@@ -1207,44 +901,43 @@ Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath
|
||||
Say "Note that the script does not resolve dependencies during installation."
|
||||
Say "To check the list of dependencies, go to https://docs.microsoft.com/dotnet/core/install/windows#dependencies"
|
||||
Say "Installation finished"
|
||||
|
||||
# SIG # Begin signature block
|
||||
# MIIjhgYJKoZIhvcNAQcCoIIjdzCCI3MCAQExDzANBglghkgBZQMEAgEFADB5Bgor
|
||||
# MIIjjwYJKoZIhvcNAQcCoIIjgDCCI3wCAQExDzANBglghkgBZQMEAgEFADB5Bgor
|
||||
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
|
||||
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCDO8obeUp97UA1H
|
||||
# qy3NSLDwxxLLlEbGxeCzMOcKVT/3eKCCDYEwggX/MIID56ADAgECAhMzAAAB32vw
|
||||
# LpKnSrTQAAAAAAHfMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
|
||||
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCCNsnhcJvx/hXmM
|
||||
# w8KjuvvIMDBFonhg9XJFc1QwfTyH4aCCDYEwggX/MIID56ADAgECAhMzAAABh3IX
|
||||
# chVZQMcJAAAAAAGHMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
|
||||
# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy
|
||||
# b3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNpZ25p
|
||||
# bmcgUENBIDIwMTEwHhcNMjAxMjE1MjEzMTQ1WhcNMjExMjAyMjEzMTQ1WjB0MQsw
|
||||
# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ3WhcNMjEwMzAzMTgzOTQ3WjB0MQsw
|
||||
# CQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9u
|
||||
# ZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMR4wHAYDVQQDExVNaWNy
|
||||
# b3NvZnQgQ29ycG9yYXRpb24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
|
||||
# AQC2uxlZEACjqfHkuFyoCwfL25ofI9DZWKt4wEj3JBQ48GPt1UsDv834CcoUUPMn
|
||||
# s/6CtPoaQ4Thy/kbOOg/zJAnrJeiMQqRe2Lsdb/NSI2gXXX9lad1/yPUDOXo4GNw
|
||||
# PjXq1JZi+HZV91bUr6ZjzePj1g+bepsqd/HC1XScj0fT3aAxLRykJSzExEBmU9eS
|
||||
# yuOwUuq+CriudQtWGMdJU650v/KmzfM46Y6lo/MCnnpvz3zEL7PMdUdwqj/nYhGG
|
||||
# 3UVILxX7tAdMbz7LN+6WOIpT1A41rwaoOVnv+8Ua94HwhjZmu1S73yeV7RZZNxoh
|
||||
# EegJi9YYssXa7UZUUkCCA+KnAgMBAAGjggF+MIIBejAfBgNVHSUEGDAWBgorBgEE
|
||||
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUOPbML8IdkNGtCfMmVPtvI6VZ8+Mw
|
||||
# AQDOt8kLc7P3T7MKIhouYHewMFmnq8Ayu7FOhZCQabVwBp2VS4WyB2Qe4TQBT8aB
|
||||
# znANDEPjHKNdPT8Xz5cNali6XHefS8i/WXtF0vSsP8NEv6mBHuA2p1fw2wB/F0dH
|
||||
# sJ3GfZ5c0sPJjklsiYqPw59xJ54kM91IOgiO2OUzjNAljPibjCWfH7UzQ1TPHc4d
|
||||
# weils8GEIrbBRb7IWwiObL12jWT4Yh71NQgvJ9Fn6+UhD9x2uk3dLj84vwt1NuFQ
|
||||
# itKJxIV0fVsRNR3abQVOLqpDugbr0SzNL6o8xzOHL5OXiGGwg6ekiXA1/2XXY7yV
|
||||
# Fc39tledDtZjSjNbex1zzwSXAgMBAAGjggF+MIIBejAfBgNVHSUEGDAWBgorBgEE
|
||||
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUhov4ZyO96axkJdMjpzu2zVXOJcsw
|
||||
# UAYDVR0RBEkwR6RFMEMxKTAnBgNVBAsTIE1pY3Jvc29mdCBPcGVyYXRpb25zIFB1
|
||||
# ZXJ0byBSaWNvMRYwFAYDVQQFEw0yMzAwMTIrNDYzMDA5MB8GA1UdIwQYMBaAFEhu
|
||||
# ZXJ0byBSaWNvMRYwFAYDVQQFEw0yMzAwMTIrNDU4Mzg1MB8GA1UdIwQYMBaAFEhu
|
||||
# ZOVQBdOCqhc3NyK1bajKdQKVMFQGA1UdHwRNMEswSaBHoEWGQ2h0dHA6Ly93d3cu
|
||||
# bWljcm9zb2Z0LmNvbS9wa2lvcHMvY3JsL01pY0NvZFNpZ1BDQTIwMTFfMjAxMS0w
|
||||
# Ny0wOC5jcmwwYQYIKwYBBQUHAQEEVTBTMFEGCCsGAQUFBzAChkVodHRwOi8vd3d3
|
||||
# Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NlcnRzL01pY0NvZFNpZ1BDQTIwMTFfMjAx
|
||||
# MS0wNy0wOC5jcnQwDAYDVR0TAQH/BAIwADANBgkqhkiG9w0BAQsFAAOCAgEAnnqH
|
||||
# tDyYUFaVAkvAK0eqq6nhoL95SZQu3RnpZ7tdQ89QR3++7A+4hrr7V4xxmkB5BObS
|
||||
# 0YK+MALE02atjwWgPdpYQ68WdLGroJZHkbZdgERG+7tETFl3aKF4KpoSaGOskZXp
|
||||
# TPnCaMo2PXoAMVMGpsQEQswimZq3IQ3nRQfBlJ0PoMMcN/+Pks8ZTL1BoPYsJpok
|
||||
# t6cql59q6CypZYIwgyJ892HpttybHKg1ZtQLUlSXccRMlugPgEcNZJagPEgPYni4
|
||||
# b11snjRAgf0dyQ0zI9aLXqTxWUU5pCIFiPT0b2wsxzRqCtyGqpkGM8P9GazO8eao
|
||||
# mVItCYBcJSByBx/pS0cSYwBBHAZxJODUqxSXoSGDvmTfqUJXntnWkL4okok1FiCD
|
||||
# Z4jpyXOQunb6egIXvkgQ7jb2uO26Ow0m8RwleDvhOMrnHsupiOPbozKroSa6paFt
|
||||
# VSh89abUSooR8QdZciemmoFhcWkEwFg4spzvYNP4nIs193261WyTaRMZoceGun7G
|
||||
# CT2Rl653uUj+F+g94c63AhzSq4khdL4HlFIP2ePv29smfUnHtGq6yYFDLnT0q/Y+
|
||||
# Di3jwloF8EWkkHRtSuXlFUbTmwr/lDDgbpZiKhLS7CBTDj32I0L5i532+uHczw82
|
||||
# oZDmYmYmIUSMbZOgS65h797rj5JJ6OkeEUJoAVwwggd6MIIFYqADAgECAgphDpDS
|
||||
# MS0wNy0wOC5jcnQwDAYDVR0TAQH/BAIwADANBgkqhkiG9w0BAQsFAAOCAgEAixmy
|
||||
# S6E6vprWD9KFNIB9G5zyMuIjZAOuUJ1EK/Vlg6Fb3ZHXjjUwATKIcXbFuFC6Wr4K
|
||||
# NrU4DY/sBVqmab5AC/je3bpUpjtxpEyqUqtPc30wEg/rO9vmKmqKoLPT37svc2NV
|
||||
# BmGNl+85qO4fV/w7Cx7J0Bbqk19KcRNdjt6eKoTnTPHBHlVHQIHZpMxacbFOAkJr
|
||||
# qAVkYZdz7ikNXTxV+GRb36tC4ByMNxE2DF7vFdvaiZP0CVZ5ByJ2gAhXMdK9+usx
|
||||
# zVk913qKde1OAuWdv+rndqkAIm8fUlRnr4saSCg7cIbUwCCf116wUJ7EuJDg0vHe
|
||||
# yhnCeHnBbyH3RZkHEi2ofmfgnFISJZDdMAeVZGVOh20Jp50XBzqokpPzeZ6zc1/g
|
||||
# yILNyiVgE+RPkjnUQshd1f1PMgn3tns2Cz7bJiVUaqEO3n9qRFgy5JuLae6UweGf
|
||||
# AeOo3dgLZxikKzYs3hDMaEtJq8IP71cX7QXe6lnMmXU/Hdfz2p897Zd+kU+vZvKI
|
||||
# 3cwLfuVQgK2RZ2z+Kc3K3dRPz2rXycK5XCuRZmvGab/WbrZiC7wJQapgBodltMI5
|
||||
# GMdFrBg9IeF7/rP4EqVQXeKtevTlZXjpuNhhjuR+2DMt/dWufjXpiW91bo3aH6Ea
|
||||
# jOALXmoxgltCp1K7hrS6gmsvj94cLRf50QQ4U8Qwggd6MIIFYqADAgECAgphDpDS
|
||||
# AAAAAAADMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzETMBEGA1UECBMK
|
||||
# V2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0
|
||||
# IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBDZXJ0aWZpY2F0
|
||||
@@ -1284,119 +977,119 @@ Say "Installation finished"
|
||||
# xw4o7t5lL+yX9qFcltgA1qFGvVnzl6UJS0gQmYAf0AApxbGbpT9Fdx41xtKiop96
|
||||
# eiL6SJUfq/tHI4D1nvi/a7dLl+LrdXga7Oo3mXkYS//WsyNodeav+vyL6wuA6mk7
|
||||
# r/ww7QRMjt/fdW1jkT3RnVZOT7+AVyKheBEyIXrvQQqxP/uozKRdwaGIm1dxVk5I
|
||||
# RcBCyZt2WwqASGv9eZ/BvW1taslScxMNelDNMYIVWzCCFVcCAQEwgZUwfjELMAkG
|
||||
# RcBCyZt2WwqASGv9eZ/BvW1taslScxMNelDNMYIVZDCCFWACAQEwgZUwfjELMAkG
|
||||
# A1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQx
|
||||
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEoMCYGA1UEAxMfTWljcm9z
|
||||
# b2Z0IENvZGUgU2lnbmluZyBQQ0EgMjAxMQITMwAAAd9r8C6Sp0q00AAAAAAB3zAN
|
||||
# b2Z0IENvZGUgU2lnbmluZyBQQ0EgMjAxMQITMwAAAYdyF3IVWUDHCQAAAAABhzAN
|
||||
# BglghkgBZQMEAgEFAKCBrjAZBgkqhkiG9w0BCQMxDAYKKwYBBAGCNwIBBDAcBgor
|
||||
# BgEEAYI3AgELMQ4wDAYKKwYBBAGCNwIBFTAvBgkqhkiG9w0BCQQxIgQg7EExOCvj
|
||||
# ZDkub0Fc7HAVivJIlz+Omt11fROkTFKR+KQwQgYKKwYBBAGCNwIBDDE0MDKgFIAS
|
||||
# BgEEAYI3AgELMQ4wDAYKKwYBBAGCNwIBFTAvBgkqhkiG9w0BCQQxIgQgpT/bxWwe
|
||||
# aW0EinKMWCAzDXUjwXkIHldYzR6lw4/1Pc0wQgYKKwYBBAGCNwIBDDE0MDKgFIAS
|
||||
# AE0AaQBjAHIAbwBzAG8AZgB0oRqAGGh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbTAN
|
||||
# BgkqhkiG9w0BAQEFAASCAQBPvflyU+HQEIM4XvImAK2eQp74ABDaxeWVbl21ZF0L
|
||||
# cKf5GkIzdvlCoKxforXBkRIBtLxU9Sb4ff+AO3jiw2M36ZLwZQRYcn003FTR3BAY
|
||||
# 3XVGdxb64vCEMUb6Dzh9Hb1lmJjyn2NHsB514ZuUez4v/UIsV9e3TV731kY6gcQV
|
||||
# 3ridn3IKj3RMJGtPD5qxVMZohvVIy1Xiz4n/9BpBkk4HHsSuGPmBj08jWnIcI5TF
|
||||
# eXYC4LonTOFZLcsKiW4eMRaDt0fXxJjhQF8DSJX3PAxCC73YkPrJJyXAkGNHcXCg
|
||||
# 2Lyi9Yd6oV0nbJlCSLWK7jcn5lxUIgsrb+U6F8yXoNsXoYIS5TCCEuEGCisGAQQB
|
||||
# gjcDAwExghLRMIISzQYJKoZIhvcNAQcCoIISvjCCEroCAQMxDzANBglghkgBZQME
|
||||
# AgEFADCCAVEGCyqGSIb3DQEJEAEEoIIBQASCATwwggE4AgEBBgorBgEEAYRZCgMB
|
||||
# MDEwDQYJYIZIAWUDBAIBBQAEIEwFz8qgqbYKKOXyaZMOTE0/ve/rpdJjnq1eyDVG
|
||||
# mWB6AgZhHpsDT9wYEzIwMjEwODIzMTUxNzA2LjAyMVowBIACAfSggdCkgc0wgcox
|
||||
# BgkqhkiG9w0BAQEFAASCAQCHd7sSQVq0YDg8QDx6/kLWn3s6jtvvIDCCgsO9spHM
|
||||
# quPd4FPbG67DCsKDClekQs52qrtRO3Zo+JMnCw4j3bS+gZHzeJr2shbftOrpsFoD
|
||||
# l7OPcUmtrqul9dkQCOp8t0MP3ls0n96/YyNy6lz4BAlTdkdDx957uAxalKaCIBzb
|
||||
# R9QyppOKIfNFvwD4EI5KI6tpmSy/uH8SrRg7ZExAYZl6J6R18WkL7KHn649lPoAQ
|
||||
# ujwrIXH10xOJops45ILGzKWQcHmCzLJGYapL4VHUuK+73nT+9ZROGHdk/PyvIcdw
|
||||
# iERa+C06v305t3DA+CuHFy1tvyw7IFF6RVbLZPwxrJjToYIS7jCCEuoGCisGAQQB
|
||||
# gjcDAwExghLaMIIS1gYJKoZIhvcNAQcCoIISxzCCEsMCAQMxDzANBglghkgBZQME
|
||||
# AgEFADCCAVUGCyqGSIb3DQEJEAEEoIIBRASCAUAwggE8AgEBBgorBgEEAYRZCgMB
|
||||
# MDEwDQYJYIZIAWUDBAIBBQAEIOCaTmvM1AP0WaEVqzKaaCu/R+bTlR4kCrM/ZXsb
|
||||
# /eNOAgZgGeLsMwsYEzIwMjEwMjAzMjExNzQ5LjU5MVowBIACAfSggdSkgdEwgc4x
|
||||
# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt
|
||||
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xJTAjBgNVBAsTHE1p
|
||||
# Y3Jvc29mdCBBbWVyaWNhIE9wZXJhdGlvbnMxJjAkBgNVBAsTHVRoYWxlcyBUU1Mg
|
||||
# RVNOOjhBODItRTM0Ri05RERBMSUwIwYDVQQDExxNaWNyb3NvZnQgVGltZS1TdGFt
|
||||
# cCBTZXJ2aWNloIIOPDCCBPEwggPZoAMCAQICEzMAAAFLT7KmSNXkwlEAAAAAAUsw
|
||||
# DQYJKoZIhvcNAQELBQAwfDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0
|
||||
# b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3Jh
|
||||
# dGlvbjEmMCQGA1UEAxMdTWljcm9zb2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAwHhcN
|
||||
# MjAxMTEyMTgyNTU5WhcNMjIwMjExMTgyNTU5WjCByjELMAkGA1UEBhMCVVMxEzAR
|
||||
# BgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1p
|
||||
# Y3Jvc29mdCBDb3Jwb3JhdGlvbjElMCMGA1UECxMcTWljcm9zb2Z0IEFtZXJpY2Eg
|
||||
# T3BlcmF0aW9uczEmMCQGA1UECxMdVGhhbGVzIFRTUyBFU046OEE4Mi1FMzRGLTlE
|
||||
# REExJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1wIFNlcnZpY2UwggEiMA0G
|
||||
# CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQChNnpQx3YuJr/ivobPoLtpQ9egUFl8
|
||||
# THdWZ6SAKIJdtP3L24D3/d63ommmjZjCyrQm+j/1tHDAwjQGuOwYvn79ecPCQfAB
|
||||
# 91JnEp/wP4BMF2SXyMf8k9R84RthIdfGHPXTWqzpCCfNWolVEcUVm8Ad/r1LrikR
|
||||
# O+4KKo6slDQJKsgKApfBU/9J7Rudvhw1rEQw0Nk1BRGWjrIp7/uWoUIfR4rcl6U1
|
||||
# utOiYIonC87PPpAJQXGRsDdKnVFF4NpWvMiyeuksn5t/Otwz82sGlne/HNQpmMzi
|
||||
# gR8cZ8eXEDJJNIZxov9WAHHj28gUE29D8ivAT706ihxvTv50ZY8W51uxAgMBAAGj
|
||||
# ggEbMIIBFzAdBgNVHQ4EFgQUUqpqftASlue6K3LePlTTn01K68YwHwYDVR0jBBgw
|
||||
# FoAU1WM6XIoxkPNDe3xGG8UzaFqFbVUwVgYDVR0fBE8wTTBLoEmgR4ZFaHR0cDov
|
||||
# L2NybC5taWNyb3NvZnQuY29tL3BraS9jcmwvcHJvZHVjdHMvTWljVGltU3RhUENB
|
||||
# XzIwMTAtMDctMDEuY3JsMFoGCCsGAQUFBwEBBE4wTDBKBggrBgEFBQcwAoY+aHR0
|
||||
# cDovL3d3dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNUaW1TdGFQQ0FfMjAx
|
||||
# MC0wNy0wMS5jcnQwDAYDVR0TAQH/BAIwADATBgNVHSUEDDAKBggrBgEFBQcDCDAN
|
||||
# BgkqhkiG9w0BAQsFAAOCAQEAFtq51Zc/O1AfJK4tEB2Nr8bGEVD5qQ8l8gXIQMrM
|
||||
# ZYtddHH+cGiqgF/4GmvmPfl5FAYh+gf/8Yd3q4/iD2+K4LtJbs/3v6mpyBl1mQ4v
|
||||
# usK65dAypWmiT1W3FiXjsmCIkjSDDsKLFBYH5yGFnNFOEMgL+O7u4osH42f80nc2
|
||||
# WdnZV6+OvW035XPV6ZttUBfFWHdIbUkdOG1O2n4yJm10OfacItZ08fzgMMqE+f/S
|
||||
# TgVWNCHbR2EYqTWayrGP69jMwtVD9BGGTWti1XjpvE6yKdO8H9nuRi3L+C6jYntf
|
||||
# aEmBTbnTFEV+kRx1CNcpSb9os86CAUehZU1aRzQ6CQ/pjzCCBnEwggRZoAMCAQIC
|
||||
# CmEJgSoAAAAAAAIwDQYJKoZIhvcNAQELBQAwgYgxCzAJBgNVBAYTAlVTMRMwEQYD
|
||||
# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy
|
||||
# b3NvZnQgQ29ycG9yYXRpb24xMjAwBgNVBAMTKU1pY3Jvc29mdCBSb290IENlcnRp
|
||||
# ZmljYXRlIEF1dGhvcml0eSAyMDEwMB4XDTEwMDcwMTIxMzY1NVoXDTI1MDcwMTIx
|
||||
# NDY1NVowfDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNV
|
||||
# BAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQG
|
||||
# A1UEAxMdTWljcm9zb2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAwggEiMA0GCSqGSIb3
|
||||
# DQEBAQUAA4IBDwAwggEKAoIBAQCpHQ28dxGKOiDs/BOX9fp/aZRrdFQQ1aUKAIKF
|
||||
# ++18aEssX8XD5WHCdrc+Zitb8BVTJwQxH0EbGpUdzgkTjnxhMFmxMEQP8WCIhFRD
|
||||
# DNdNuDgIs0Ldk6zWczBXJoKjRQ3Q6vVHgc2/JGAyWGBG8lhHhjKEHnRhZ5FfgVSx
|
||||
# z5NMksHEpl3RYRNuKMYa+YaAu99h/EbBJx0kZxJyGiGKr0tkiVBisV39dx898Fd1
|
||||
# rL2KQk1AUdEPnAY+Z3/1ZsADlkR+79BL/W7lmsqxqPJ6Kgox8NpOBpG2iAg16Hgc
|
||||
# sOmZzTznL0S6p/TcZL2kAcEgCZN4zfy8wMlEXV4WnAEFTyJNAgMBAAGjggHmMIIB
|
||||
# 4jAQBgkrBgEEAYI3FQEEAwIBADAdBgNVHQ4EFgQU1WM6XIoxkPNDe3xGG8UzaFqF
|
||||
# bVUwGQYJKwYBBAGCNxQCBAweCgBTAHUAYgBDAEEwCwYDVR0PBAQDAgGGMA8GA1Ud
|
||||
# EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAU1fZWy4/oolxiaNE9lJBb186aGMQwVgYD
|
||||
# VR0fBE8wTTBLoEmgR4ZFaHR0cDovL2NybC5taWNyb3NvZnQuY29tL3BraS9jcmwv
|
||||
# cHJvZHVjdHMvTWljUm9vQ2VyQXV0XzIwMTAtMDYtMjMuY3JsMFoGCCsGAQUFBwEB
|
||||
# BE4wTDBKBggrBgEFBQcwAoY+aHR0cDovL3d3dy5taWNyb3NvZnQuY29tL3BraS9j
|
||||
# ZXJ0cy9NaWNSb29DZXJBdXRfMjAxMC0wNi0yMy5jcnQwgaAGA1UdIAEB/wSBlTCB
|
||||
# kjCBjwYJKwYBBAGCNy4DMIGBMD0GCCsGAQUFBwIBFjFodHRwOi8vd3d3Lm1pY3Jv
|
||||
# c29mdC5jb20vUEtJL2RvY3MvQ1BTL2RlZmF1bHQuaHRtMEAGCCsGAQUFBwICMDQe
|
||||
# MiAdAEwAZQBnAGEAbABfAFAAbwBsAGkAYwB5AF8AUwB0AGEAdABlAG0AZQBuAHQA
|
||||
# LiAdMA0GCSqGSIb3DQEBCwUAA4ICAQAH5ohRDeLG4Jg/gXEDPZ2joSFvs+umzPUx
|
||||
# vs8F4qn++ldtGTCzwsVmyWrf9efweL3HqJ4l4/m87WtUVwgrUYJEEvu5U4zM9GAS
|
||||
# inbMQEBBm9xcF/9c+V4XNZgkVkt070IQyK+/f8Z/8jd9Wj8c8pl5SpFSAK84Dxf1
|
||||
# L3mBZdmptWvkx872ynoAb0swRCQiPM/tA6WWj1kpvLb9BOFwnzJKJ/1Vry/+tuWO
|
||||
# M7tiX5rbV0Dp8c6ZZpCM/2pif93FSguRJuI57BlKcWOdeyFtw5yjojz6f32WapB4
|
||||
# pm3S4Zz5Hfw42JT0xqUKloakvZ4argRCg7i1gJsiOCC1JeVk7Pf0v35jWSUPei45
|
||||
# V3aicaoGig+JFrphpxHLmtgOR5qAxdDNp9DvfYPw4TtxCd9ddJgiCGHasFAeb73x
|
||||
# 4QDf5zEHpJM692VHeOj4qEir995yfmFrb3epgcunCaw5u+zGy9iCtHLNHfS4hQEe
|
||||
# gPsbiSpUObJb2sgNVZl6h3M7COaYLeqN4DMuEin1wC9UJyH3yKxO2ii4sanblrKn
|
||||
# QqLJzxlBTeCG+SqaoxFmMNO7dDJL32N79ZmKLxvHIa9Zta7cRDyXUHHXodLFVeNp
|
||||
# 3lfB0d4wwP3M5k37Db9dT+mdHhk4L7zPWAUu7w2gUDXa7wknHNWzfjUeCLraNtvT
|
||||
# X4/edIhJEqGCAs4wggI3AgEBMIH4oYHQpIHNMIHKMQswCQYDVQQGEwJVUzETMBEG
|
||||
# A1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWlj
|
||||
# cm9zb2Z0IENvcnBvcmF0aW9uMSUwIwYDVQQLExxNaWNyb3NvZnQgQW1lcmljYSBP
|
||||
# cGVyYXRpb25zMSYwJAYDVQQLEx1UaGFsZXMgVFNTIEVTTjo4QTgyLUUzNEYtOURE
|
||||
# QTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAgU2VydmljZaIjCgEBMAcG
|
||||
# BSsOAwIaAxUAkToz97fseHxNOUSQ5O/bBVSF+e6ggYMwgYCkfjB8MQswCQYDVQQG
|
||||
# EwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwG
|
||||
# A1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYDVQQDEx1NaWNyb3NvZnQg
|
||||
# VGltZS1TdGFtcCBQQ0EgMjAxMDANBgkqhkiG9w0BAQUFAAIFAOTNtrcwIhgPMjAy
|
||||
# MTA4MjMxMzU1MDNaGA8yMDIxMDgyNDEzNTUwM1owdzA9BgorBgEEAYRZCgQBMS8w
|
||||
# LTAKAgUA5M22twIBADAKAgEAAgIcfAIB/zAHAgEAAgIQOTAKAgUA5M8INwIBADA2
|
||||
# BgorBgEEAYRZCgQCMSgwJjAMBgorBgEEAYRZCgMCoAowCAIBAAIDB6EgoQowCAIB
|
||||
# AAIDAYagMA0GCSqGSIb3DQEBBQUAA4GBADrUnASEIEovjIFdqxhyYsyiBSfNeD7e
|
||||
# W4qeRl1B4DTAY8Z/U1SpnK8yy7r6oZn0D8og+QJmnWEnCPqNXAreyg8tlFyTF1TV
|
||||
# K6uUlslIY4WCmrAECIFua/DuksTerny66SBw4aSEeQHtGiynnR87WcdqItBJxxRA
|
||||
# pElXzs8QzC0EMYIDDTCCAwkCAQEwgZMwfDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
|
||||
# Cldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jvc29m
|
||||
# dCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9zb2Z0IFRpbWUtU3RhbXAgUENB
|
||||
# IDIwMTACEzMAAAFLT7KmSNXkwlEAAAAAAUswDQYJYIZIAWUDBAIBBQCgggFKMBoG
|
||||
# CSqGSIb3DQEJAzENBgsqhkiG9w0BCRABBDAvBgkqhkiG9w0BCQQxIgQg87M/Jiqh
|
||||
# MlaXNWAIAn7CM9CZw6QCPgBx3voUSIH6+qUwgfoGCyqGSIb3DQEJEAIvMYHqMIHn
|
||||
# MIHkMIG9BCBr9u6EInnsZYEts/Fj/rIFv0YZW1ynhXKOP2hVPUU5IzCBmDCBgKR+
|
||||
# MHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdS
|
||||
# ZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xJjAkBgNVBAMT
|
||||
# HU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwAhMzAAABS0+ypkjV5MJRAAAA
|
||||
# AAFLMCIEIGs9Mn51TUGRleeUO5lQCRHZSu0G/8Ir92l3NgMGq1/GMA0GCSqGSIb3
|
||||
# DQEBCwUABIIBAA1oBIiM3guh89/Pvwwcn4hdZGGBsMk56MRMvOir8XQM0Je/rSwP
|
||||
# P11S2VwYUMmBjfFX1QK7r280k3k9xcDWW79ZC4Kn2UsV7jxuUwFHgdVlgDNjg6QL
|
||||
# unGuwFX5lStgNevwUH9DotoASXxgtwNNmtZNDB4NYVCxQLS7RtVSbxP+xFYXbokG
|
||||
# G4DFVHBr3CGPga3OduE/YUBpxIRxtiDF/9WHo0tJEdS95tfdnsyaGo8eHNzyfTdS
|
||||
# kjrNsAlqpJRT0Va/ooBXCV7Uny7PlCkRIQRTSJDy/kSXpsL22CTEHw6mU0jEU99k
|
||||
# OL4uwu+aVnolWFlpbiJjQxBqfT3plBLIg88=
|
||||
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1p
|
||||
# Y3Jvc29mdCBPcGVyYXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMg
|
||||
# VFNTIEVTTjo4OTdBLUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUt
|
||||
# U3RhbXAgU2VydmljZaCCDkEwggT1MIID3aADAgECAhMzAAABLCKvRZd1+RvuAAAA
|
||||
# AAEsMA0GCSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNo
|
||||
# aW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29y
|
||||
# cG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEw
|
||||
# MB4XDTE5MTIxOTAxMTUwM1oXDTIxMDMxNzAxMTUwM1owgc4xCzAJBgNVBAYTAlVT
|
||||
# MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQK
|
||||
# ExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1pY3Jvc29mdCBPcGVy
|
||||
# YXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMgVFNTIEVTTjo4OTdB
|
||||
# LUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAgU2Vydmlj
|
||||
# ZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAPK1zgSSq+MxAYo3qpCt
|
||||
# QDxSMPPJy6mm/wfEJNjNUnYtLFBwl1BUS5trEk/t41ldxITKehs+ABxYqo4Qxsg3
|
||||
# Gy1ugKiwHAnYiiekfC+ZhptNFgtnDZIn45zC0AlVr/6UfLtsLcHCh1XElLUHfEC0
|
||||
# nBuQcM/SpYo9e3l1qY5NdMgDGxCsmCKdiZfYXIu+U0UYIBhdzmSHnB3fxZOBVcr5
|
||||
# htFHEBBNt/rFJlm/A4yb8oBsp+Uf0p5QwmO/bCcdqB15JpylOhZmWs0sUfJKlK9E
|
||||
# rAhBwGki2eIRFKsQBdkXS9PWpF1w2gIJRvSkDEaCf+lbGTPdSzHSbfREWOF9wY3i
|
||||
# Yj8CAwEAAaOCARswggEXMB0GA1UdDgQWBBRRahZSGfrCQhCyIyGH9DkiaW7L0zAf
|
||||
# BgNVHSMEGDAWgBTVYzpcijGQ80N7fEYbxTNoWoVtVTBWBgNVHR8ETzBNMEugSaBH
|
||||
# hkVodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNU
|
||||
# aW1TdGFQQ0FfMjAxMC0wNy0wMS5jcmwwWgYIKwYBBQUHAQEETjBMMEoGCCsGAQUF
|
||||
# BzAChj5odHRwOi8vd3d3Lm1pY3Jvc29mdC5jb20vcGtpL2NlcnRzL01pY1RpbVN0
|
||||
# YVBDQV8yMDEwLTA3LTAxLmNydDAMBgNVHRMBAf8EAjAAMBMGA1UdJQQMMAoGCCsG
|
||||
# AQUFBwMIMA0GCSqGSIb3DQEBCwUAA4IBAQBPFxHIwi4vAH49w9Svmz6K3tM55RlW
|
||||
# 5pPeULXdut2Rqy6Ys0+VpZsbuaEoxs6Z1C3hMbkiqZFxxyltxJpuHTyGTg61zfNI
|
||||
# F5n6RsYF3s7IElDXNfZznF1/2iWc6uRPZK8rxxUJ/7emYXZCYwuUY0XjsCpP9pbR
|
||||
# RKeJi6r5arSyI+NfKxvgoM21JNt1BcdlXuAecdd/k8UjxCscffanoK2n6LFw1PcZ
|
||||
# lEO7NId7o+soM2C0QY5BYdghpn7uqopB6ixyFIIkDXFub+1E7GmAEwfU6VwEHL7y
|
||||
# 9rNE8bd+JrQs+yAtkkHy9FmXg/PsGq1daVzX1So7CJ6nyphpuHSN3VfTMIIGcTCC
|
||||
# BFmgAwIBAgIKYQmBKgAAAAAAAjANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMC
|
||||
# VVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNV
|
||||
# BAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJv
|
||||
# b3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTAwHhcNMTAwNzAxMjEzNjU1WhcN
|
||||
# MjUwNzAxMjE0NjU1WjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3Rv
|
||||
# bjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0
|
||||
# aW9uMSYwJAYDVQQDEx1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMDCCASIw
|
||||
# DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKkdDbx3EYo6IOz8E5f1+n9plGt0
|
||||
# VBDVpQoAgoX77XxoSyxfxcPlYcJ2tz5mK1vwFVMnBDEfQRsalR3OCROOfGEwWbEw
|
||||
# RA/xYIiEVEMM1024OAizQt2TrNZzMFcmgqNFDdDq9UeBzb8kYDJYYEbyWEeGMoQe
|
||||
# dGFnkV+BVLHPk0ySwcSmXdFhE24oxhr5hoC732H8RsEnHSRnEnIaIYqvS2SJUGKx
|
||||
# Xf13Hz3wV3WsvYpCTUBR0Q+cBj5nf/VmwAOWRH7v0Ev9buWayrGo8noqCjHw2k4G
|
||||
# kbaICDXoeByw6ZnNPOcvRLqn9NxkvaQBwSAJk3jN/LzAyURdXhacAQVPIk0CAwEA
|
||||
# AaOCAeYwggHiMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBTVYzpcijGQ80N7
|
||||
# fEYbxTNoWoVtVTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTALBgNVHQ8EBAMC
|
||||
# AYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBTV9lbLj+iiXGJo0T2UkFvX
|
||||
# zpoYxDBWBgNVHR8ETzBNMEugSaBHhkVodHRwOi8vY3JsLm1pY3Jvc29mdC5jb20v
|
||||
# cGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXRfMjAxMC0wNi0yMy5jcmwwWgYI
|
||||
# KwYBBQUHAQEETjBMMEoGCCsGAQUFBzAChj5odHRwOi8vd3d3Lm1pY3Jvc29mdC5j
|
||||
# b20vcGtpL2NlcnRzL01pY1Jvb0NlckF1dF8yMDEwLTA2LTIzLmNydDCBoAYDVR0g
|
||||
# AQH/BIGVMIGSMIGPBgkrBgEEAYI3LgMwgYEwPQYIKwYBBQUHAgEWMWh0dHA6Ly93
|
||||
# d3cubWljcm9zb2Z0LmNvbS9QS0kvZG9jcy9DUFMvZGVmYXVsdC5odG0wQAYIKwYB
|
||||
# BQUHAgIwNB4yIB0ATABlAGcAYQBsAF8AUABvAGwAaQBjAHkAXwBTAHQAYQB0AGUA
|
||||
# bQBlAG4AdAAuIB0wDQYJKoZIhvcNAQELBQADggIBAAfmiFEN4sbgmD+BcQM9naOh
|
||||
# IW+z66bM9TG+zwXiqf76V20ZMLPCxWbJat/15/B4vceoniXj+bzta1RXCCtRgkQS
|
||||
# +7lTjMz0YBKKdsxAQEGb3FwX/1z5Xhc1mCRWS3TvQhDIr79/xn/yN31aPxzymXlK
|
||||
# kVIArzgPF/UveYFl2am1a+THzvbKegBvSzBEJCI8z+0DpZaPWSm8tv0E4XCfMkon
|
||||
# /VWvL/625Y4zu2JfmttXQOnxzplmkIz/amJ/3cVKC5Em4jnsGUpxY517IW3DnKOi
|
||||
# PPp/fZZqkHimbdLhnPkd/DjYlPTGpQqWhqS9nhquBEKDuLWAmyI4ILUl5WTs9/S/
|
||||
# fmNZJQ96LjlXdqJxqgaKD4kWumGnEcua2A5HmoDF0M2n0O99g/DhO3EJ3110mCII
|
||||
# YdqwUB5vvfHhAN/nMQekkzr3ZUd46PioSKv33nJ+YWtvd6mBy6cJrDm77MbL2IK0
|
||||
# cs0d9LiFAR6A+xuJKlQ5slvayA1VmXqHczsI5pgt6o3gMy4SKfXAL1QnIffIrE7a
|
||||
# KLixqduWsqdCosnPGUFN4Ib5KpqjEWYw07t0MkvfY3v1mYovG8chr1m1rtxEPJdQ
|
||||
# cdeh0sVV42neV8HR3jDA/czmTfsNv11P6Z0eGTgvvM9YBS7vDaBQNdrvCScc1bN+
|
||||
# NR4Iuto229Nfj950iEkSoYICzzCCAjgCAQEwgfyhgdSkgdEwgc4xCzAJBgNVBAYT
|
||||
# AlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYD
|
||||
# VQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKTAnBgNVBAsTIE1pY3Jvc29mdCBP
|
||||
# cGVyYXRpb25zIFB1ZXJ0byBSaWNvMSYwJAYDVQQLEx1UaGFsZXMgVFNTIEVTTjo4
|
||||
# OTdBLUUzNTYtMTcwMTElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUtU3RhbXAgU2Vy
|
||||
# dmljZaIjCgEBMAcGBSsOAwIaAxUADE5OKSMoNx/mYxYWap1RTOohbJ2ggYMwgYCk
|
||||
# fjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
|
||||
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYDVQQD
|
||||
# Ex1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMDANBgkqhkiG9w0BAQUFAAIF
|
||||
# AOPFChkwIhgPMjAyMTAyMDMxNTQwMDlaGA8yMDIxMDIwNDE1NDAwOVowdDA6Bgor
|
||||
# BgEEAYRZCgQBMSwwKjAKAgUA48UKGQIBADAHAgEAAgIXmDAHAgEAAgIRyTAKAgUA
|
||||
# 48ZbmQIBADA2BgorBgEEAYRZCgQCMSgwJjAMBgorBgEEAYRZCgMCoAowCAIBAAID
|
||||
# B6EgoQowCAIBAAIDAYagMA0GCSqGSIb3DQEBBQUAA4GBAHeeznL2n6HWCjHH94Fl
|
||||
# hcdW6TEXzq4XNgp1Gx1W9F8gJ4x+SwoV7elJZkwgGffcpHomLvIY/VSuzsl1NgtJ
|
||||
# TWM2UxoqSv58BBOrl4eGhH6kkg8Ucy2tdeK5T8cHa8pMkq2j9pFd2mRG/6VMk0dl
|
||||
# Xz7Uy3Z6bZqkcABMyAfuAaGbMYIDDTCCAwkCAQEwgZMwfDELMAkGA1UEBhMCVVMx
|
||||
# EzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoT
|
||||
# FU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9zb2Z0IFRpbWUt
|
||||
# U3RhbXAgUENBIDIwMTACEzMAAAEsIq9Fl3X5G+4AAAAAASwwDQYJYIZIAWUDBAIB
|
||||
# BQCgggFKMBoGCSqGSIb3DQEJAzENBgsqhkiG9w0BCRABBDAvBgkqhkiG9w0BCQQx
|
||||
# IgQg/QYv7yp+354WTjWUIsXWndTEzXjaYjqwYjcBxCJKjdUwgfoGCyqGSIb3DQEJ
|
||||
# EAIvMYHqMIHnMIHkMIG9BCBbn/0uFFh42hTM5XOoKdXevBaiSxmYK9Ilcn9nu5ZH
|
||||
# 4TCBmDCBgKR+MHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw
|
||||
# DgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24x
|
||||
# JjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwAhMzAAABLCKv
|
||||
# RZd1+RvuAAAAAAEsMCIEIIfIM3YbzHswb/Kj/qq1l1cHA6QBl+gEXYanUNJomrpT
|
||||
# MA0GCSqGSIb3DQEBCwUABIIBAAwdcXssUZGO7ho5+NHLjIxLtQk543aKGo+lrRMY
|
||||
# Q9abE1h/AaaNJl0iGxX4IihNWyfovSfYL3L4eODUBAu68tWSxeceRfWNsb/ZZfUi
|
||||
# v89hpLssI/Gf1BEgNMA4zCuIGQiC8okusVumEpAhhvCEbSiTTTtBdolTnU/CAKui
|
||||
# oxaU3R9XkKh1F4oAM26+dJ1J2BLQXPs5afNvvedDsZWNQUPK1sFF3JRfzxiTrwBW
|
||||
# EJRyflev9gyDoqCHzippgb+6+eti1WTkcA9Q49GIT11S6LOAVqkSC9N7Nqf8ksh8
|
||||
# ARdwT8jigpsm+mj7lrVU9upDkhVYhKeO8oiZq95Q53Zkteo=
|
||||
# SIG # End signature block
|
||||
|
||||
428
src/Misc/dotnet-install.sh
vendored
428
src/Misc/dotnet-install.sh
vendored
@@ -24,7 +24,7 @@ exec 3>&1
|
||||
# See if stdout is a terminal
|
||||
if [ -t 1 ] && command -v tput > /dev/null; then
|
||||
# see if it supports colors
|
||||
ncolors=$(tput colors || echo 0)
|
||||
ncolors=$(tput colors)
|
||||
if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then
|
||||
bold="$(tput bold || echo)"
|
||||
normal="$(tput sgr0 || echo)"
|
||||
@@ -224,7 +224,7 @@ get_legacy_os_name() {
|
||||
machine_has() {
|
||||
eval $invocation
|
||||
|
||||
command -v "$1" > /dev/null 2>&1
|
||||
hash "$1" > /dev/null 2>&1
|
||||
return $?
|
||||
}
|
||||
|
||||
@@ -368,75 +368,6 @@ get_normalized_os() {
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# quality - $1
|
||||
get_normalized_quality() {
|
||||
eval $invocation
|
||||
|
||||
local quality="$(to_lowercase "$1")"
|
||||
if [ ! -z "$quality" ]; then
|
||||
case "$quality" in
|
||||
daily | signed | validated | preview)
|
||||
echo "$quality"
|
||||
return 0
|
||||
;;
|
||||
ga)
|
||||
#ga quality is available without specifying quality, so normalizing it to empty
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
say_err "'$quality' is not a supported value for --quality option. Supported values are: daily, signed, validated, preview, ga. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues."
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# channel - $1
|
||||
get_normalized_channel() {
|
||||
eval $invocation
|
||||
|
||||
local channel="$(to_lowercase "$1")"
|
||||
|
||||
if [[ $channel == release/* ]]; then
|
||||
say_warning 'Using branch name with -Channel option is no longer supported with newer releases. Use -Quality option with a channel in X.Y format instead.';
|
||||
fi
|
||||
|
||||
if [ ! -z "$channel" ]; then
|
||||
case "$channel" in
|
||||
lts)
|
||||
echo "LTS"
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
echo "$channel"
|
||||
return 0
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# runtime - $1
|
||||
get_normalized_product() {
|
||||
eval $invocation
|
||||
|
||||
local runtime="$(to_lowercase "$1")"
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
product="dotnet-runtime"
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
product="aspnetcore-runtime"
|
||||
elif [ -z "$runtime" ]; then
|
||||
product="dotnet-sdk"
|
||||
fi
|
||||
echo "$product"
|
||||
return 0
|
||||
}
|
||||
|
||||
# The version text returned from the feeds is a 1-line or 2-line string:
|
||||
# For the SDK and the dotnet runtime (2 lines):
|
||||
# Line 1: # commit_hash
|
||||
@@ -610,131 +541,43 @@ construct_download_link() {
|
||||
# args:
|
||||
# azure_feed - $1
|
||||
# specific_version - $2
|
||||
# download link - $3 (optional)
|
||||
get_specific_product_version() {
|
||||
# If we find a 'productVersion.txt' at the root of any folder, we'll use its contents
|
||||
# to resolve the version of what's in the folder, superseding the specified version.
|
||||
# if 'productVersion.txt' is missing but download link is already available, product version will be taken from download link
|
||||
eval $invocation
|
||||
|
||||
local azure_feed="$1"
|
||||
local specific_version="${2//[$'\t\r\n']}"
|
||||
local package_download_link=""
|
||||
if [ $# -gt 2 ]; then
|
||||
local package_download_link="$3"
|
||||
fi
|
||||
local specific_product_version=null
|
||||
|
||||
# Try to get the version number, using the productVersion.txt file located next to the installer file.
|
||||
local download_links=($(get_specific_product_version_url "$azure_feed" "$specific_version" true "$package_download_link")
|
||||
$(get_specific_product_version_url "$azure_feed" "$specific_version" false "$package_download_link"))
|
||||
|
||||
for download_link in "${download_links[@]}"
|
||||
do
|
||||
say_verbose "Checking for the existence of $download_link"
|
||||
|
||||
if machine_has "curl"
|
||||
then
|
||||
specific_product_version=$(curl -s --fail "${download_link}${feed_credential}")
|
||||
if [ $? = 0 ]; then
|
||||
echo "${specific_product_version//[$'\t\r\n']}"
|
||||
return 0
|
||||
fi
|
||||
elif machine_has "wget"
|
||||
then
|
||||
specific_product_version=$(wget -qO- "${download_link}${feed_credential}")
|
||||
if [ $? = 0 ]; then
|
||||
echo "${specific_product_version//[$'\t\r\n']}"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
# Getting the version number with productVersion.txt has failed. Try parsing the download link for a version number.
|
||||
say_verbose "Failed to get the version using productVersion.txt file. Download link will be parsed instead."
|
||||
specific_product_version="$(get_product_specific_version_from_download_link "$package_download_link" "$specific_version")"
|
||||
echo "${specific_product_version//[$'\t\r\n']}"
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# azure_feed - $1
|
||||
# specific_version - $2
|
||||
# is_flattened - $3
|
||||
# download link - $4 (optional)
|
||||
get_specific_product_version_url() {
|
||||
eval $invocation
|
||||
|
||||
local azure_feed="$1"
|
||||
local specific_version="$2"
|
||||
local is_flattened="$3"
|
||||
local package_download_link=""
|
||||
if [ $# -gt 3 ]; then
|
||||
local package_download_link="$4"
|
||||
fi
|
||||
|
||||
local pvFileName="productVersion.txt"
|
||||
if [ "$is_flattened" = true ]; then
|
||||
if [ -z "$runtime" ]; then
|
||||
pvFileName="sdk-productVersion.txt"
|
||||
elif [[ "$runtime" == "dotnet" ]]; then
|
||||
pvFileName="runtime-productVersion.txt"
|
||||
else
|
||||
pvFileName="$runtime-productVersion.txt"
|
||||
fi
|
||||
fi
|
||||
local specific_product_version=$specific_version
|
||||
|
||||
local download_link=null
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
download_link="$azure_feed/Runtime/$specific_version/productVersion.txt${feed_credential}"
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
download_link="$azure_feed/aspnetcore/Runtime/$specific_version/productVersion.txt${feed_credential}"
|
||||
elif [ -z "$runtime" ]; then
|
||||
download_link="$azure_feed/Sdk/$specific_version/productVersion.txt${feed_credential}"
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [ -z "$package_download_link" ]; then
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
download_link="$azure_feed/Runtime/$specific_version/${pvFileName}"
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
download_link="$azure_feed/aspnetcore/Runtime/$specific_version/${pvFileName}"
|
||||
elif [ -z "$runtime" ]; then
|
||||
download_link="$azure_feed/Sdk/$specific_version/${pvFileName}"
|
||||
else
|
||||
return 1
|
||||
if machine_has "curl"
|
||||
then
|
||||
specific_product_version=$(curl -s --fail "$download_link")
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
specific_product_version=$specific_version
|
||||
fi
|
||||
elif machine_has "wget"
|
||||
then
|
||||
specific_product_version=$(wget -qO- "$download_link")
|
||||
if [ $? -ne 0 ]
|
||||
then
|
||||
specific_product_version=$specific_version
|
||||
fi
|
||||
else
|
||||
download_link="${package_download_link%/*}/${pvFileName}"
|
||||
fi
|
||||
specific_product_version="${specific_product_version//[$'\t\r\n']}"
|
||||
|
||||
say_verbose "Constructed productVersion link: $download_link"
|
||||
echo "$download_link"
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# download link - $1
|
||||
# specific version - $2
|
||||
get_product_specific_version_from_download_link()
|
||||
{
|
||||
eval $invocation
|
||||
|
||||
local download_link="$1"
|
||||
local specific_version="$2"
|
||||
local specific_product_version=""
|
||||
|
||||
if [ -z "$download_link" ]; then
|
||||
echo "$specific_version"
|
||||
return 0
|
||||
fi
|
||||
|
||||
#get filename
|
||||
filename="${download_link##*/}"
|
||||
|
||||
#product specific version follows the product name
|
||||
#for filename 'dotnet-sdk-3.1.404-linux-x64.tar.gz': the product version is 3.1.404
|
||||
IFS='-'
|
||||
read -ra filename_elems <<< "$filename"
|
||||
count=${#filename_elems[@]}
|
||||
if [[ "$count" -gt 2 ]]; then
|
||||
specific_product_version="${filename_elems[2]}"
|
||||
else
|
||||
specific_product_version=$specific_version
|
||||
fi
|
||||
unset IFS;
|
||||
echo "$specific_product_version"
|
||||
return 0
|
||||
}
|
||||
@@ -868,62 +711,19 @@ extract_dotnet_package() {
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# remote_path - $1
|
||||
# disable_feed_credential - $2
|
||||
get_http_header()
|
||||
{
|
||||
eval $invocation
|
||||
local remote_path="$1"
|
||||
local disable_feed_credential="$2"
|
||||
|
||||
local failed=false
|
||||
local response
|
||||
if machine_has "curl"; then
|
||||
get_http_header_curl $remote_path $disable_feed_credential || failed=true
|
||||
elif machine_has "wget"; then
|
||||
get_http_header_wget $remote_path $disable_feed_credential || failed=true
|
||||
else
|
||||
failed=true
|
||||
fi
|
||||
if [ "$failed" = true ]; then
|
||||
say_verbose "Failed to get HTTP header: '$remote_path'."
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# remote_path - $1
|
||||
# disable_feed_credential - $2
|
||||
get_http_header_curl() {
|
||||
eval $invocation
|
||||
local remote_path="$1"
|
||||
local disable_feed_credential="$2"
|
||||
|
||||
remote_path_with_credential="$remote_path"
|
||||
if [ "$disable_feed_credential" = false ]; then
|
||||
remote_path_with_credential+="$feed_credential"
|
||||
fi
|
||||
|
||||
remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
curl_options="-I -sSL --retry 5 --retry-delay 2 --connect-timeout 15 "
|
||||
curl $curl_options "$remote_path_with_credential" || return 1
|
||||
return 0
|
||||
}
|
||||
|
||||
# args:
|
||||
# remote_path - $1
|
||||
# disable_feed_credential - $2
|
||||
get_http_header_wget() {
|
||||
eval $invocation
|
||||
local remote_path="$1"
|
||||
local disable_feed_credential="$2"
|
||||
|
||||
remote_path_with_credential="$remote_path"
|
||||
if [ "$disable_feed_credential" = false ]; then
|
||||
remote_path_with_credential+="$feed_credential"
|
||||
fi
|
||||
|
||||
remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
wget_options="-q -S --spider --tries 5 --waitretry 2 --connect-timeout 15 "
|
||||
wget $wget_options "$remote_path_with_credential" 2>&1 || return 1
|
||||
return 0
|
||||
@@ -963,7 +763,7 @@ download() {
|
||||
|
||||
say "Download attempt #$attempts has failed: $http_code $download_error_msg"
|
||||
say "Attempt #$((attempts+1)) will start in $((attempts*10)) seconds."
|
||||
sleep $((attempts*10))
|
||||
sleep $((attempts*20))
|
||||
done
|
||||
|
||||
|
||||
@@ -983,7 +783,6 @@ downloadcurl() {
|
||||
local remote_path="$1"
|
||||
local out_path="${2:-}"
|
||||
# Append feed_credential as late as possible before calling curl to avoid logging feed_credential
|
||||
# Avoid passing URI with credentials to functions: note, most of them echoing parameters of invocation in verbose output.
|
||||
local remote_path_with_credential="${remote_path}${feed_credential}"
|
||||
local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
|
||||
local failed=false
|
||||
@@ -993,8 +792,7 @@ downloadcurl() {
|
||||
curl $curl_options -o "$out_path" "$remote_path_with_credential" || failed=true
|
||||
fi
|
||||
if [ "$failed" = true ]; then
|
||||
local disable_feed_credential=false
|
||||
local response=$(get_http_header_curl $remote_path $disable_feed_credential)
|
||||
local response=$(get_http_header_curl $remote_path_with_credential)
|
||||
http_code=$( echo "$response" | awk '/^HTTP/{print $2}' | tail -1 )
|
||||
download_error_msg="Unable to download $remote_path."
|
||||
if [[ $http_code != 2* ]]; then
|
||||
@@ -1024,8 +822,7 @@ downloadwget() {
|
||||
wget $wget_options -O "$out_path" "$remote_path_with_credential" || failed=true
|
||||
fi
|
||||
if [ "$failed" = true ]; then
|
||||
local disable_feed_credential=false
|
||||
local response=$(get_http_header_wget $remote_path $disable_feed_credential)
|
||||
local response=$(get_http_header_wget $remote_path_with_credential)
|
||||
http_code=$( echo "$response" | awk '/^ HTTP/{print $2}' | tail -1 )
|
||||
download_error_msg="Unable to download $remote_path."
|
||||
if [[ $http_code != 2* ]]; then
|
||||
@@ -1037,115 +834,15 @@ downloadwget() {
|
||||
return 0
|
||||
}
|
||||
|
||||
get_download_link_from_aka_ms() {
|
||||
eval $invocation
|
||||
|
||||
#quality is not supported for LTS or current channel
|
||||
if [[ ! -z "$normalized_quality" && ("$normalized_channel" == "LTS" || "$normalized_channel" == "current") ]]; then
|
||||
normalized_quality=""
|
||||
say_warning "Specifying quality for current or LTS channel is not supported, the quality will be ignored."
|
||||
fi
|
||||
|
||||
say_verbose "Retrieving primary payload URL from aka.ms for channel: '$normalized_channel', quality: '$normalized_quality', product: '$normalized_product', os: '$normalized_os', architecture: '$normalized_architecture'."
|
||||
|
||||
#construct aka.ms link
|
||||
aka_ms_link="https://aka.ms/dotnet"
|
||||
if [ "$internal" = true ]; then
|
||||
aka_ms_link="$aka_ms_link/internal"
|
||||
fi
|
||||
aka_ms_link="$aka_ms_link/$normalized_channel"
|
||||
if [[ ! -z "$normalized_quality" ]]; then
|
||||
aka_ms_link="$aka_ms_link/$normalized_quality"
|
||||
fi
|
||||
aka_ms_link="$aka_ms_link/$normalized_product-$normalized_os-$normalized_architecture.tar.gz"
|
||||
say_verbose "Constructed aka.ms link: '$aka_ms_link'."
|
||||
|
||||
#get HTTP response
|
||||
#do not pass credentials as a part of the $aka_ms_link and do not apply credentials in the get_http_header function
|
||||
#otherwise the redirect link would have credentials as well
|
||||
#it would result in applying credentials twice to the resulting link and thus breaking it, and in echoing credentials to the output as a part of redirect link
|
||||
disable_feed_credential=true
|
||||
response="$(get_http_header $aka_ms_link $disable_feed_credential)"
|
||||
|
||||
say_verbose "Received response: $response"
|
||||
# Get results of all the redirects.
|
||||
http_codes=$( echo "$response" | awk '$1 ~ /^HTTP/ {print $2}' )
|
||||
# They all need to be 301, otherwise some links are broken (except for the last, which is not a redirect but 200 or 404).
|
||||
broken_redirects=$( echo "$http_codes" | sed '$d' | grep -v '301' )
|
||||
|
||||
# All HTTP codes are 301 (Moved Permanently), the redirect link exists.
|
||||
if [[ -z "$broken_redirects" ]]; then
|
||||
aka_ms_download_link=$( echo "$response" | awk '$1 ~ /^Location/{print $2}' | tail -1 | tr -d '\r')
|
||||
|
||||
if [[ -z "$aka_ms_download_link" ]]; then
|
||||
say_verbose "The aka.ms link '$aka_ms_link' is not valid: failed to get redirect location."
|
||||
return 1
|
||||
fi
|
||||
|
||||
say_verbose "The redirect location retrieved: '$aka_ms_download_link'."
|
||||
return 0
|
||||
else
|
||||
say_verbose "The aka.ms link '$aka_ms_link' is not valid: received HTTP code: $(echo "$broken_redirects" | paste -sd "," -)."
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
calculate_vars() {
|
||||
eval $invocation
|
||||
valid_legacy_download_link=true
|
||||
|
||||
#normalize input variables
|
||||
normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")"
|
||||
say_verbose "Normalized architecture: '$normalized_architecture'."
|
||||
say_verbose "normalized_architecture=$normalized_architecture"
|
||||
|
||||
normalized_os="$(get_normalized_os "$user_defined_os")"
|
||||
say_verbose "Normalized OS: '$normalized_os'."
|
||||
normalized_quality="$(get_normalized_quality "$quality")"
|
||||
say_verbose "Normalized quality: '$normalized_quality'."
|
||||
normalized_channel="$(get_normalized_channel "$channel")"
|
||||
say_verbose "Normalized channel: '$normalized_channel'."
|
||||
normalized_product="$(get_normalized_product "$runtime")"
|
||||
say_verbose "Normalized product: '$normalized_product'."
|
||||
|
||||
#try to get download location from aka.ms link
|
||||
#not applicable when exact version is specified via command or json file
|
||||
normalized_version="$(to_lowercase "$version")"
|
||||
if [[ -z "$json_file" && "$normalized_version" == "latest" ]]; then
|
||||
|
||||
valid_aka_ms_link=true;
|
||||
get_download_link_from_aka_ms || valid_aka_ms_link=false
|
||||
|
||||
if [ "$valid_aka_ms_link" == false ]; then
|
||||
# if quality is specified - exit with error - there is no fallback approach
|
||||
if [ ! -z "$normalized_quality" ]; then
|
||||
say_err "Failed to locate the latest version in the channel '$normalized_channel' with '$normalized_quality' quality for '$normalized_product', os: '$normalized_os', architecture: '$normalized_architecture'."
|
||||
say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support."
|
||||
return 1
|
||||
fi
|
||||
say_verbose "Falling back to latest.version file approach."
|
||||
else
|
||||
say_verbose "Retrieved primary payload URL from aka.ms link: '$aka_ms_download_link'."
|
||||
download_link=$aka_ms_download_link
|
||||
|
||||
say_verbose "Downloading using legacy url will not be attempted."
|
||||
valid_legacy_download_link=false
|
||||
|
||||
#get version from the path
|
||||
IFS='/'
|
||||
read -ra pathElems <<< "$download_link"
|
||||
count=${#pathElems[@]}
|
||||
specific_version="${pathElems[count-2]}"
|
||||
unset IFS;
|
||||
say_verbose "Version: '$specific_version'."
|
||||
|
||||
#Retrieve product specific version
|
||||
specific_product_version="$(get_specific_product_version "$azure_feed" "$specific_version" "$download_link")"
|
||||
say_verbose "Product specific version: '$specific_product_version'."
|
||||
|
||||
install_root="$(resolve_installation_path "$install_dir")"
|
||||
say_verbose "InstallRoot: '$install_root'."
|
||||
return
|
||||
fi
|
||||
fi
|
||||
say_verbose "normalized_os=$normalized_os"
|
||||
|
||||
specific_version="$(get_specific_version_from_version "$azure_feed" "$channel" "$normalized_architecture" "$version" "$json_file")"
|
||||
specific_product_version="$(get_specific_product_version "$azure_feed" "$specific_version")"
|
||||
@@ -1314,8 +1011,6 @@ feed_credential=""
|
||||
verbose=false
|
||||
runtime=""
|
||||
runtime_id=""
|
||||
quality=""
|
||||
internal=false
|
||||
override_non_versioned_files=true
|
||||
non_dynamic_parameters=""
|
||||
user_defined_os=""
|
||||
@@ -1332,14 +1027,6 @@ do
|
||||
shift
|
||||
version="$1"
|
||||
;;
|
||||
-q|--quality|-[Qq]uality)
|
||||
shift
|
||||
quality="$1"
|
||||
;;
|
||||
--internal|-[Ii]nternal)
|
||||
internal=true
|
||||
non_dynamic_parameters+=" $name"
|
||||
;;
|
||||
-i|--install-dir|-[Ii]nstall[Dd]ir)
|
||||
shift
|
||||
install_dir="$1"
|
||||
@@ -1397,9 +1084,7 @@ do
|
||||
--feed-credential|-[Ff]eed[Cc]redential)
|
||||
shift
|
||||
feed_credential="$1"
|
||||
#feed_credential should start with "?", for it to be added to the end of the link.
|
||||
#adding "?" at the beginning of the feed_credential if needed.
|
||||
[[ -z "$(echo $feed_credential)" ]] || [[ $feed_credential == \?* ]] || feed_credential="?$feed_credential"
|
||||
non_dynamic_parameters+=" $name "\""$1"\"""
|
||||
;;
|
||||
--runtime-id|-[Rr]untime[Ii]d)
|
||||
shift
|
||||
@@ -1431,26 +1116,15 @@ do
|
||||
echo " - LTS - most current supported release"
|
||||
echo " - 2-part version in a format A.B - represents a specific release"
|
||||
echo " examples: 2.0; 1.0"
|
||||
echo " - 3-part version in a format A.B.Cxx - represents a specific SDK release"
|
||||
echo " examples: 5.0.1xx, 5.0.2xx."
|
||||
echo " Supported since 5.0 release"
|
||||
echo " Note: The version parameter overrides the channel parameter when any version other than `latest` is used."
|
||||
echo " - Branch name"
|
||||
echo " examples: release/2.0.0; Master"
|
||||
echo " Note: The version parameter overrides the channel parameter."
|
||||
echo " -v,--version <VERSION> Use specific VERSION, Defaults to \`$version\`."
|
||||
echo " -Version"
|
||||
echo " Possible values:"
|
||||
echo " - latest - most latest build on specific channel"
|
||||
echo " - 3-part version in a format A.B.C - represents specific version of build"
|
||||
echo " examples: 2.0.0-preview2-006120; 1.1.0"
|
||||
echo " -q,--quality <quality> Download the latest build of specified quality in the channel."
|
||||
echo " -Quality"
|
||||
echo " The possible values are: daily, signed, validated, preview, GA."
|
||||
echo " Works only in combination with channel. Not applicable for current and LTS channels and will be ignored if those channels are used."
|
||||
echo " For SDK use channel in A.B.Cxx format. Using quality for SDK together with channel in A.B format is not supported."
|
||||
echo " Supported since 5.0 release."
|
||||
echo " Note: The version parameter overrides the channel parameter when any version other than `latest` is used, and therefore overrides the quality."
|
||||
echo " --internal,-Internal Download internal builds. Requires providing credentials via --feed-credential parameter."
|
||||
echo " --feed-credential <FEEDCREDENTIAL> Token to access Azure feed. Used as a query string to append to the Azure feed."
|
||||
echo " -FeedCredential This parameter typically is not specified."
|
||||
echo " -i,--install-dir <DIR> Install under specified location (see Install Location below)"
|
||||
echo " -InstallDir"
|
||||
echo " --architecture <ARCHITECTURE> Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`."
|
||||
@@ -1471,6 +1145,7 @@ do
|
||||
echo " --verbose,-Verbose Display diagnostics information."
|
||||
echo " --azure-feed,-AzureFeed Azure feed location. Defaults to $azure_feed, This parameter typically is not changed by the user."
|
||||
echo " --uncached-feed,-UncachedFeed Uncached feed location. This parameter typically is not changed by the user."
|
||||
echo " --feed-credential,-FeedCredential Azure feed shared access token. This parameter typically is not specified."
|
||||
echo " --skip-non-versioned-files Skips non-versioned files if they already exist, such as the dotnet executable."
|
||||
echo " -SkipNonVersionedFiles"
|
||||
echo " --no-cdn,-NoCdn Disable downloading from the Azure CDN, and use the uncached feed directly."
|
||||
@@ -1511,44 +1186,23 @@ say "- The SDK needs to be installed without user interaction and without admin
|
||||
say "- The SDK installation doesn't need to persist across multiple CI runs."
|
||||
say "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.\n"
|
||||
|
||||
if [ "$internal" = true ] && [ -z "$(echo $feed_credential)" ]; then
|
||||
message="Provide credentials via --feed-credential parameter."
|
||||
if [ "$dry_run" = true ]; then
|
||||
say_warning "$message"
|
||||
else
|
||||
say_err "$message"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
check_min_reqs
|
||||
calculate_vars
|
||||
script_name=$(basename "$0")
|
||||
|
||||
if [ "$dry_run" = true ]; then
|
||||
say "Payload URLs:"
|
||||
say "Primary named payload URL: ${download_link}"
|
||||
say "Primary named payload URL: $download_link"
|
||||
if [ "$valid_legacy_download_link" = true ]; then
|
||||
say "Legacy named payload URL: ${legacy_download_link}"
|
||||
say "Legacy named payload URL: $legacy_download_link"
|
||||
fi
|
||||
repeatable_command="./$script_name --version "\""$specific_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"" --os "\""$normalized_os"\"""
|
||||
|
||||
if [ ! -z "$normalized_quality" ]; then
|
||||
repeatable_command+=" --quality "\""$normalized_quality"\"""
|
||||
fi
|
||||
|
||||
if [[ "$runtime" == "dotnet" ]]; then
|
||||
repeatable_command+=" --runtime "\""dotnet"\"""
|
||||
elif [[ "$runtime" == "aspnetcore" ]]; then
|
||||
repeatable_command+=" --runtime "\""aspnetcore"\"""
|
||||
fi
|
||||
|
||||
repeatable_command+="$non_dynamic_parameters"
|
||||
|
||||
if [ -n "$feed_credential" ]; then
|
||||
repeatable_command+=" --feed-credential "\""<feed_credential>"\"""
|
||||
fi
|
||||
|
||||
say "Repeatable invocation: $repeatable_command"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"plugins": ["jest", "@typescript-eslint"],
|
||||
"extends": ["plugin:github/es6"],
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": ["plugin:github/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 9,
|
||||
@@ -17,13 +17,16 @@
|
||||
"@typescript-eslint/no-require-imports": "error",
|
||||
"@typescript-eslint/array-type": "error",
|
||||
"@typescript-eslint/await-thenable": "error",
|
||||
"@typescript-eslint/ban-ts-ignore": "error",
|
||||
"@typescript-eslint/naming-convention": [
|
||||
"error",
|
||||
{
|
||||
"selector": "default",
|
||||
"format": ["camelCase"]
|
||||
}
|
||||
],
|
||||
"camelcase": "off",
|
||||
"@typescript-eslint/camelcase": "error",
|
||||
"@typescript-eslint/class-name-casing": "error",
|
||||
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
||||
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
||||
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
|
||||
"@typescript-eslint/no-array-constructor": "error",
|
||||
"@typescript-eslint/no-empty-interface": "error",
|
||||
"@typescript-eslint/no-explicit-any": "error",
|
||||
@@ -33,7 +36,6 @@
|
||||
"@typescript-eslint/no-misused-new": "error",
|
||||
"@typescript-eslint/no-namespace": "error",
|
||||
"@typescript-eslint/no-non-null-assertion": "warn",
|
||||
"@typescript-eslint/no-object-literal-type-assertion": "error",
|
||||
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
||||
"@typescript-eslint/no-unnecessary-type-assertion": "error",
|
||||
"@typescript-eslint/no-useless-constructor": "error",
|
||||
@@ -41,19 +43,19 @@
|
||||
"@typescript-eslint/prefer-for-of": "warn",
|
||||
"@typescript-eslint/prefer-function-type": "warn",
|
||||
"@typescript-eslint/prefer-includes": "error",
|
||||
"@typescript-eslint/prefer-interface": "error",
|
||||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/require-array-sort-compare": "error",
|
||||
"@typescript-eslint/restrict-plus-operands": "error",
|
||||
"semi": "off",
|
||||
"@typescript-eslint/semi": ["error", "never"],
|
||||
"@typescript-eslint/type-annotation-spacing": "error",
|
||||
"@typescript-eslint/unbound-method": "error"
|
||||
"@typescript-eslint/unbound-method": "error",
|
||||
"filenames/match-regex" : "off",
|
||||
"github/no-then" : 1, // warning
|
||||
"semi": "off"
|
||||
},
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true,
|
||||
"jest/globals": true
|
||||
"es6": true
|
||||
}
|
||||
}
|
||||
5169
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
5169
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -25,10 +25,10 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^12.7.12",
|
||||
"@typescript-eslint/parser": "^2.8.0",
|
||||
"@typescript-eslint/parser": "^5.15.0",
|
||||
"@zeit/ncc": "^0.20.5",
|
||||
"eslint": "^6.8.0",
|
||||
"eslint-plugin-github": "^2.0.0",
|
||||
"eslint": "^8.11.0",
|
||||
"eslint-plugin-github": "^4.3.5",
|
||||
"prettier": "^1.19.1",
|
||||
"typescript": "^3.6.4"
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import * as glob from '@actions/glob'
|
||||
import * as crypto from 'crypto'
|
||||
import * as fs from 'fs'
|
||||
import * as glob from '@actions/glob'
|
||||
import * as path from 'path'
|
||||
import * as stream from 'stream'
|
||||
import * as util from 'util'
|
||||
import * as path from 'path'
|
||||
|
||||
async function run(): Promise<void> {
|
||||
// arg0 -> node
|
||||
@@ -45,7 +45,7 @@ async function run(): Promise<void> {
|
||||
result.end()
|
||||
|
||||
if (hasMatch) {
|
||||
console.log(`Find ${count} files to hash.`)
|
||||
console.log(`Found ${count} files to hash.`)
|
||||
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`)
|
||||
} else {
|
||||
console.error(`__OUTPUT____OUTPUT__`)
|
||||
@@ -53,3 +53,11 @@ async function run(): Promise<void> {
|
||||
}
|
||||
|
||||
run()
|
||||
.then(out => {
|
||||
console.log(out)
|
||||
process.exit(0)
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
@@ -157,10 +157,3 @@ if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-armv7l.tar.gz" node12 fix_nested_dir
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir
|
||||
fi
|
||||
|
||||
if [[ "$PACKAGERUNTIME" == "linux-musl-x64" ]]; then
|
||||
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12_glib fix_nested_dir
|
||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-v${NODE12_VERSION}-alpine-x64.tar.gz" node12
|
||||
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16_glib fix_nested_dir
|
||||
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16
|
||||
fi
|
||||
@@ -3,94 +3,135 @@
|
||||
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
|
||||
|
||||
var childProcess = require("child_process");
|
||||
var path = require("path")
|
||||
var path = require("path");
|
||||
|
||||
var supported = ['linux', 'darwin']
|
||||
var supported = ["linux", "darwin"];
|
||||
|
||||
if (supported.indexOf(process.platform) == -1) {
|
||||
console.log('Unsupported platform: ' + process.platform);
|
||||
console.log('Supported platforms are: ' + supported.toString());
|
||||
process.exit(1);
|
||||
console.log("Unsupported platform: " + process.platform);
|
||||
console.log("Supported platforms are: " + supported.toString());
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var stopping = false;
|
||||
var listener = null;
|
||||
|
||||
var runService = function () {
|
||||
var listenerExePath = path.join(__dirname, '../bin/Runner.Listener');
|
||||
var interactive = process.argv[2] === "interactive";
|
||||
var exitServiceAfterNFailures = Number(
|
||||
process.env.GITHUB_ACTIONS_SERVICE_EXIT_AFTER_N_FAILURES
|
||||
);
|
||||
|
||||
if (!stopping) {
|
||||
try {
|
||||
if (interactive) {
|
||||
console.log('Starting Runner listener interactively');
|
||||
listener = childProcess.spawn(listenerExePath, ['run'], { env: process.env });
|
||||
} else {
|
||||
console.log('Starting Runner listener with startup type: service');
|
||||
listener = childProcess.spawn(listenerExePath, ['run', '--startuptype', 'service'], { env: process.env });
|
||||
}
|
||||
|
||||
console.log(`Started listener process, pid: ${listener.pid}`);
|
||||
|
||||
listener.stdout.on('data', (data) => {
|
||||
process.stdout.write(data.toString('utf8'));
|
||||
});
|
||||
|
||||
listener.stderr.on('data', (data) => {
|
||||
process.stdout.write(data.toString('utf8'));
|
||||
});
|
||||
|
||||
listener.on("error", (err) => {
|
||||
console.log(`Runner listener fail to start with error ${err.message}`);
|
||||
});
|
||||
|
||||
listener.on('close', (code) => {
|
||||
console.log(`Runner listener exited with error code ${code}`);
|
||||
|
||||
if (code === 0) {
|
||||
console.log('Runner listener exit with 0 return code, stop the service, no retry needed.');
|
||||
stopping = true;
|
||||
} else if (code === 1) {
|
||||
console.log('Runner listener exit with terminated error, stop the service, no retry needed.');
|
||||
stopping = true;
|
||||
} else if (code === 2) {
|
||||
console.log('Runner listener exit with retryable error, re-launch runner in 5 seconds.');
|
||||
} else if (code === 3) {
|
||||
console.log('Runner listener exit because of updating, re-launch runner in 5 seconds.');
|
||||
} else {
|
||||
console.log('Runner listener exit with undefined return code, re-launch runner in 5 seconds.');
|
||||
}
|
||||
|
||||
if (!stopping) {
|
||||
setTimeout(runService, 5000);
|
||||
}
|
||||
});
|
||||
|
||||
} catch (ex) {
|
||||
console.log(ex);
|
||||
}
|
||||
}
|
||||
if (exitServiceAfterNFailures <= 0) {
|
||||
exitServiceAfterNFailures = NaN;
|
||||
}
|
||||
|
||||
var consecutiveFailureCount = 0;
|
||||
|
||||
var gracefulShutdown = function () {
|
||||
console.log("Shutting down runner listener");
|
||||
stopping = true;
|
||||
if (listener) {
|
||||
console.log("Sending SIGINT to runner listener to stop");
|
||||
listener.kill("SIGINT");
|
||||
|
||||
console.log("Sending SIGKILL to runner listener");
|
||||
setTimeout(() => listener.kill("SIGKILL"), 30000).unref();
|
||||
}
|
||||
};
|
||||
|
||||
var runService = function () {
|
||||
var listenerExePath = path.join(__dirname, "../bin/Runner.Listener");
|
||||
var interactive = process.argv[2] === "interactive";
|
||||
|
||||
if (!stopping) {
|
||||
try {
|
||||
if (interactive) {
|
||||
console.log("Starting Runner listener interactively");
|
||||
listener = childProcess.spawn(listenerExePath, ["run"], {
|
||||
env: process.env,
|
||||
});
|
||||
} else {
|
||||
console.log("Starting Runner listener with startup type: service");
|
||||
listener = childProcess.spawn(
|
||||
listenerExePath,
|
||||
["run", "--startuptype", "service"],
|
||||
{ env: process.env }
|
||||
);
|
||||
}
|
||||
|
||||
console.log(`Started listener process, pid: ${listener.pid}`);
|
||||
|
||||
listener.stdout.on("data", (data) => {
|
||||
if (data.toString("utf8").includes("Listening for Jobs")) {
|
||||
consecutiveFailureCount = 0;
|
||||
}
|
||||
process.stdout.write(data.toString("utf8"));
|
||||
});
|
||||
|
||||
listener.stderr.on("data", (data) => {
|
||||
process.stdout.write(data.toString("utf8"));
|
||||
});
|
||||
|
||||
listener.on("error", (err) => {
|
||||
console.log(`Runner listener fail to start with error ${err.message}`);
|
||||
});
|
||||
|
||||
listener.on("close", (code) => {
|
||||
console.log(`Runner listener exited with error code ${code}`);
|
||||
|
||||
if (code === 0) {
|
||||
console.log(
|
||||
"Runner listener exit with 0 return code, stop the service, no retry needed."
|
||||
);
|
||||
stopping = true;
|
||||
} else if (code === 1) {
|
||||
console.log(
|
||||
"Runner listener exit with terminated error, stop the service, no retry needed."
|
||||
);
|
||||
stopping = true;
|
||||
} else if (code === 2) {
|
||||
console.log(
|
||||
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
|
||||
);
|
||||
consecutiveFailureCount = 0;
|
||||
} else if (code === 3 || code === 4) {
|
||||
console.log(
|
||||
"Runner listener exit because of updating, re-launch runner in 5 seconds."
|
||||
);
|
||||
consecutiveFailureCount = 0;
|
||||
} else {
|
||||
var messagePrefix = "Runner listener exit with undefined return code";
|
||||
consecutiveFailureCount++;
|
||||
if (
|
||||
!isNaN(exitServiceAfterNFailures) &&
|
||||
consecutiveFailureCount >= exitServiceAfterNFailures
|
||||
) {
|
||||
console.error(
|
||||
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
|
||||
);
|
||||
gracefulShutdown();
|
||||
return;
|
||||
} else {
|
||||
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!stopping) {
|
||||
setTimeout(runService, 5000);
|
||||
}
|
||||
});
|
||||
} catch (ex) {
|
||||
console.log(ex);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
runService();
|
||||
console.log('Started running service');
|
||||
console.log("Started running service");
|
||||
|
||||
var gracefulShutdown = function (code) {
|
||||
console.log('Shutting down runner listener');
|
||||
stopping = true;
|
||||
if (listener) {
|
||||
console.log('Sending SIGINT to runner listener to stop');
|
||||
listener.kill('SIGINT');
|
||||
|
||||
console.log('Sending SIGKILL to runner listener');
|
||||
setTimeout(() => listener.kill('SIGKILL'), 30000);
|
||||
}
|
||||
}
|
||||
|
||||
process.on('SIGINT', () => {
|
||||
gracefulShutdown(0);
|
||||
process.on("SIGINT", () => {
|
||||
gracefulShutdown();
|
||||
});
|
||||
|
||||
process.on('SIGTERM', () => {
|
||||
gracefulShutdown(0);
|
||||
process.on("SIGTERM", () => {
|
||||
gracefulShutdown();
|
||||
});
|
||||
|
||||
@@ -17,7 +17,13 @@ RUNNER_ROOT=`pwd`
|
||||
|
||||
LAUNCH_PATH="${HOME}/Library/LaunchAgents"
|
||||
PLIST_PATH="${LAUNCH_PATH}/${SVC_NAME}.plist"
|
||||
TEMPLATE_PATH=./bin/actions.runner.plist.template
|
||||
TEMPLATE_PATH=$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE
|
||||
IS_CUSTOM_TEMPLATE=0
|
||||
if [[ -z $TEMPLATE_PATH ]]; then
|
||||
TEMPLATE_PATH=./bin/actions.runner.plist.template
|
||||
else
|
||||
IS_CUSTOM_TEMPLATE=1
|
||||
fi
|
||||
TEMP_PATH=./bin/actions.runner.plist.temp
|
||||
CONFIG_PATH=.service
|
||||
|
||||
@@ -29,7 +35,11 @@ function failed()
|
||||
}
|
||||
|
||||
if [ ! -f "${TEMPLATE_PATH}" ]; then
|
||||
failed "Must run from runner root or install is corrupt"
|
||||
if [[ $IS_CUSTOM_TEMPLATE = 0 ]]; then
|
||||
failed "Must run from runner root or install is corrupt"
|
||||
else
|
||||
failed "Service file at '$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE' using GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE env variable is not found"
|
||||
fi
|
||||
fi
|
||||
|
||||
function install()
|
||||
@@ -53,7 +63,7 @@ function install()
|
||||
mkdir -p "${log_path}" || failed "failed to create ${log_path}"
|
||||
|
||||
echo Creating ${PLIST_PATH}
|
||||
sed "s/{{User}}/${SUDO_USER:-$USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
|
||||
sed "s/{{User}}/${USER:-$SUDO_USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
|
||||
mv "${TEMP_PATH}" "${PLIST_PATH}" || failed "failed to copy plist"
|
||||
|
||||
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
|
||||
|
||||
@@ -43,6 +43,32 @@ module.exports =
|
||||
/************************************************************************/
|
||||
/******/ ({
|
||||
|
||||
/***/ 82:
|
||||
/***/ (function(__unusedmodule, exports) {
|
||||
|
||||
"use strict";
|
||||
|
||||
// We use any as a valid input type
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
/**
|
||||
* Sanitizes an input into a string so it can be passed into issueCommand safely
|
||||
* @param input input to sanitize into a string
|
||||
*/
|
||||
function toCommandValue(input) {
|
||||
if (input === null || input === undefined) {
|
||||
return '';
|
||||
}
|
||||
else if (typeof input === 'string' || input instanceof String) {
|
||||
return input;
|
||||
}
|
||||
return JSON.stringify(input);
|
||||
}
|
||||
exports.toCommandValue = toCommandValue;
|
||||
//# sourceMappingURL=utils.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 87:
|
||||
/***/ (function(module) {
|
||||
|
||||
@@ -978,6 +1004,42 @@ function regExpEscape (s) {
|
||||
}
|
||||
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 102:
|
||||
/***/ (function(__unusedmodule, exports, __webpack_require__) {
|
||||
|
||||
"use strict";
|
||||
|
||||
// For internal use, subject to change.
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
// We use any as a valid input type
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
const fs = __importStar(__webpack_require__(747));
|
||||
const os = __importStar(__webpack_require__(87));
|
||||
const utils_1 = __webpack_require__(82);
|
||||
function issueCommand(command, message) {
|
||||
const filePath = process.env[`GITHUB_${command}`];
|
||||
if (!filePath) {
|
||||
throw new Error(`Unable to find environment variable for file command ${command}`);
|
||||
}
|
||||
if (!fs.existsSync(filePath)) {
|
||||
throw new Error(`Missing file at path: ${filePath}`);
|
||||
}
|
||||
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
});
|
||||
}
|
||||
exports.issueCommand = issueCommand;
|
||||
//# sourceMappingURL=file-command.js.map
|
||||
|
||||
/***/ }),
|
||||
|
||||
/***/ 281:
|
||||
@@ -1495,12 +1557,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const glob = __importStar(__webpack_require__(281));
|
||||
const crypto = __importStar(__webpack_require__(417));
|
||||
const fs = __importStar(__webpack_require__(747));
|
||||
const glob = __importStar(__webpack_require__(281));
|
||||
const path = __importStar(__webpack_require__(622));
|
||||
const stream = __importStar(__webpack_require__(413));
|
||||
const util = __importStar(__webpack_require__(669));
|
||||
const path = __importStar(__webpack_require__(622));
|
||||
function run() {
|
||||
var e_1, _a;
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
@@ -1551,7 +1613,7 @@ function run() {
|
||||
}
|
||||
result.end();
|
||||
if (hasMatch) {
|
||||
console.log(`Find ${count} files to hash.`);
|
||||
console.log(`Found ${count} files to hash.`);
|
||||
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`);
|
||||
}
|
||||
else {
|
||||
@@ -1559,7 +1621,15 @@ function run() {
|
||||
}
|
||||
});
|
||||
}
|
||||
run();
|
||||
run()
|
||||
.then(out => {
|
||||
console.log(out);
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
|
||||
/***/ }),
|
||||
@@ -1687,17 +1757,25 @@ module.exports = require("crypto");
|
||||
|
||||
"use strict";
|
||||
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const os = __webpack_require__(87);
|
||||
const os = __importStar(__webpack_require__(87));
|
||||
const utils_1 = __webpack_require__(82);
|
||||
/**
|
||||
* Commands
|
||||
*
|
||||
* Command Format:
|
||||
* ##[name key=value;key=value]message
|
||||
* ::name key=value,key=value::message
|
||||
*
|
||||
* Examples:
|
||||
* ##[warning]This is the user warning message
|
||||
* ##[set-secret name=mypassword]definitelyNotAPassword!
|
||||
* ::warning::This is the message
|
||||
* ::set-env name=MY_VAR::some value
|
||||
*/
|
||||
function issueCommand(command, properties, message) {
|
||||
const cmd = new Command(command, properties, message);
|
||||
@@ -1722,34 +1800,39 @@ class Command {
|
||||
let cmdStr = CMD_STRING + this.command;
|
||||
if (this.properties && Object.keys(this.properties).length > 0) {
|
||||
cmdStr += ' ';
|
||||
let first = true;
|
||||
for (const key in this.properties) {
|
||||
if (this.properties.hasOwnProperty(key)) {
|
||||
const val = this.properties[key];
|
||||
if (val) {
|
||||
// safely append the val - avoid blowing up when attempting to
|
||||
// call .replace() if message is not a string for some reason
|
||||
cmdStr += `${key}=${escape(`${val || ''}`)},`;
|
||||
if (first) {
|
||||
first = false;
|
||||
}
|
||||
else {
|
||||
cmdStr += ',';
|
||||
}
|
||||
cmdStr += `${key}=${escapeProperty(val)}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cmdStr += CMD_STRING;
|
||||
// safely append the message - avoid blowing up when attempting to
|
||||
// call .replace() if message is not a string for some reason
|
||||
const message = `${this.message || ''}`;
|
||||
cmdStr += escapeData(message);
|
||||
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
|
||||
return cmdStr;
|
||||
}
|
||||
}
|
||||
function escapeData(s) {
|
||||
return s.replace(/\r/g, '%0D').replace(/\n/g, '%0A');
|
||||
return utils_1.toCommandValue(s)
|
||||
.replace(/%/g, '%25')
|
||||
.replace(/\r/g, '%0D')
|
||||
.replace(/\n/g, '%0A');
|
||||
}
|
||||
function escape(s) {
|
||||
return s
|
||||
function escapeProperty(s) {
|
||||
return utils_1.toCommandValue(s)
|
||||
.replace(/%/g, '%25')
|
||||
.replace(/\r/g, '%0D')
|
||||
.replace(/\n/g, '%0A')
|
||||
.replace(/]/g, '%5D')
|
||||
.replace(/;/g, '%3B');
|
||||
.replace(/:/g, '%3A')
|
||||
.replace(/,/g, '%2C');
|
||||
}
|
||||
//# sourceMappingURL=command.js.map
|
||||
|
||||
@@ -1769,10 +1852,19 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
||||
result["default"] = mod;
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const command_1 = __webpack_require__(431);
|
||||
const os = __webpack_require__(87);
|
||||
const path = __webpack_require__(622);
|
||||
const file_command_1 = __webpack_require__(102);
|
||||
const utils_1 = __webpack_require__(82);
|
||||
const os = __importStar(__webpack_require__(87));
|
||||
const path = __importStar(__webpack_require__(622));
|
||||
/**
|
||||
* The code to exit an action
|
||||
*/
|
||||
@@ -1793,11 +1885,21 @@ var ExitCode;
|
||||
/**
|
||||
* Sets env variable for this action and future actions in the job
|
||||
* @param name the name of the variable to set
|
||||
* @param val the value of the variable
|
||||
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function exportVariable(name, val) {
|
||||
process.env[name] = val;
|
||||
command_1.issueCommand('set-env', { name }, val);
|
||||
const convertedVal = utils_1.toCommandValue(val);
|
||||
process.env[name] = convertedVal;
|
||||
const filePath = process.env['GITHUB_ENV'] || '';
|
||||
if (filePath) {
|
||||
const delimiter = '_GitHubActionsFileCommandDelimeter_';
|
||||
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
|
||||
file_command_1.issueCommand('ENV', commandValue);
|
||||
}
|
||||
else {
|
||||
command_1.issueCommand('set-env', { name }, convertedVal);
|
||||
}
|
||||
}
|
||||
exports.exportVariable = exportVariable;
|
||||
/**
|
||||
@@ -1813,7 +1915,13 @@ exports.setSecret = setSecret;
|
||||
* @param inputPath
|
||||
*/
|
||||
function addPath(inputPath) {
|
||||
command_1.issueCommand('add-path', {}, inputPath);
|
||||
const filePath = process.env['GITHUB_PATH'] || '';
|
||||
if (filePath) {
|
||||
file_command_1.issueCommand('PATH', inputPath);
|
||||
}
|
||||
else {
|
||||
command_1.issueCommand('add-path', {}, inputPath);
|
||||
}
|
||||
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
|
||||
}
|
||||
exports.addPath = addPath;
|
||||
@@ -1836,12 +1944,22 @@ exports.getInput = getInput;
|
||||
* Sets the value of an output.
|
||||
*
|
||||
* @param name name of the output to set
|
||||
* @param value value to store
|
||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function setOutput(name, value) {
|
||||
command_1.issueCommand('set-output', { name }, value);
|
||||
}
|
||||
exports.setOutput = setOutput;
|
||||
/**
|
||||
* Enables or disables the echoing of commands into stdout for the rest of the step.
|
||||
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
|
||||
*
|
||||
*/
|
||||
function setCommandEcho(enabled) {
|
||||
command_1.issue('echo', enabled ? 'on' : 'off');
|
||||
}
|
||||
exports.setCommandEcho = setCommandEcho;
|
||||
//-----------------------------------------------------------------------
|
||||
// Results
|
||||
//-----------------------------------------------------------------------
|
||||
@@ -1858,6 +1976,13 @@ exports.setFailed = setFailed;
|
||||
//-----------------------------------------------------------------------
|
||||
// Logging Commands
|
||||
//-----------------------------------------------------------------------
|
||||
/**
|
||||
* Gets whether Actions Step Debug is on or not
|
||||
*/
|
||||
function isDebug() {
|
||||
return process.env['RUNNER_DEBUG'] === '1';
|
||||
}
|
||||
exports.isDebug = isDebug;
|
||||
/**
|
||||
* Writes debug message to user log
|
||||
* @param message debug message
|
||||
@@ -1868,18 +1993,18 @@ function debug(message) {
|
||||
exports.debug = debug;
|
||||
/**
|
||||
* Adds an error issue
|
||||
* @param message error issue message
|
||||
* @param message error issue message. Errors will be converted to string via toString()
|
||||
*/
|
||||
function error(message) {
|
||||
command_1.issue('error', message);
|
||||
command_1.issue('error', message instanceof Error ? message.toString() : message);
|
||||
}
|
||||
exports.error = error;
|
||||
/**
|
||||
* Adds an warning issue
|
||||
* @param message warning issue message
|
||||
* @param message warning issue message. Errors will be converted to string via toString()
|
||||
*/
|
||||
function warning(message) {
|
||||
command_1.issue('warning', message);
|
||||
command_1.issue('warning', message instanceof Error ? message.toString() : message);
|
||||
}
|
||||
exports.warning = warning;
|
||||
/**
|
||||
@@ -1937,8 +2062,9 @@ exports.group = group;
|
||||
* Saves state for current action, the state can only be retrieved by this action's post job execution.
|
||||
*
|
||||
* @param name name of the state to store
|
||||
* @param value value to store
|
||||
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
function saveState(name, value) {
|
||||
command_1.issueCommand('save-state', { name }, value);
|
||||
}
|
||||
|
||||
@@ -10,10 +10,11 @@ if [ -f ".path" ]; then
|
||||
echo ".path=${PATH}"
|
||||
fi
|
||||
|
||||
# insert anything to setup env when running as a service
|
||||
nodever=${GITHUB_ACTIONS_RUNNER_FORCED_NODE_VERSION:-node16}
|
||||
|
||||
# insert anything to setup env when running as a service
|
||||
# run the host process which keep the listener alive
|
||||
./externals/node12/bin/node ./bin/RunnerService.js &
|
||||
./externals/$nodever/bin/node ./bin/RunnerService.js &
|
||||
PID=$!
|
||||
wait $PID
|
||||
trap - TERM INT
|
||||
|
||||
@@ -10,7 +10,13 @@ arg_2=${2}
|
||||
RUNNER_ROOT=`pwd`
|
||||
|
||||
UNIT_PATH=/etc/systemd/system/${SVC_NAME}
|
||||
TEMPLATE_PATH=./bin/actions.runner.service.template
|
||||
TEMPLATE_PATH=$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE
|
||||
IS_CUSTOM_TEMPLATE=0
|
||||
if [[ -z $TEMPLATE_PATH ]]; then
|
||||
TEMPLATE_PATH=./bin/actions.runner.service.template
|
||||
else
|
||||
IS_CUSTOM_TEMPLATE=1
|
||||
fi
|
||||
TEMP_PATH=./bin/actions.runner.service.temp
|
||||
CONFIG_PATH=.service
|
||||
|
||||
@@ -31,7 +37,11 @@ function failed()
|
||||
}
|
||||
|
||||
if [ ! -f "${TEMPLATE_PATH}" ]; then
|
||||
failed "Must run from runner root or install is corrupt"
|
||||
if [[ $IS_CUSTOM_TEMPLATE = 0 ]]; then
|
||||
failed "Must run from runner root or install is corrupt"
|
||||
else
|
||||
failed "Service file at '$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE' using GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE env variable is not found"
|
||||
fi
|
||||
fi
|
||||
|
||||
#check if we run as root
|
||||
|
||||
@@ -30,13 +30,13 @@ date "+[%F %T-%4N] Waiting for $runnerprocessname ($runnerpid) to complete" >> "
|
||||
while [ -e /proc/$runnerpid ]
|
||||
do
|
||||
date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1
|
||||
sleep 2
|
||||
"$rootfolder"/safe_sleep.sh 2
|
||||
done
|
||||
date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1
|
||||
|
||||
# start re-organize folders
|
||||
date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1
|
||||
sleep 1
|
||||
"$rootfolder"/safe_sleep.sh 1
|
||||
|
||||
# the folder structure under runner root will be
|
||||
# ./bin -> bin.2.100.0 (junction folder)
|
||||
@@ -125,7 +125,7 @@ attemptedtargetedfix=0
|
||||
currentplatform=$(uname | awk '{print tolower($0)}')
|
||||
if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
# We needed a fix for https://github.com/actions/runner/issues/743
|
||||
# We will recreate the ./externals/node12/bin/node of the past runner version that launched the runnerlistener service
|
||||
# We will recreate the ./externals/nodeXY/bin/node of the past runner version that launched the runnerlistener service
|
||||
# Otherwise mac gatekeeper kills the processes we spawn on creation as we are running a process with no backing file
|
||||
|
||||
# We need the pid for the nodejs loop, get that here, its the parent of the runner C# pid
|
||||
@@ -135,7 +135,13 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
then
|
||||
# inspect the open file handles to find the node process
|
||||
# we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep node12/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
nodever="node16"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
|
||||
then
|
||||
nodever="node12"
|
||||
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
|
||||
fi
|
||||
if [[ $? -eq 0 && -n "$path" ]]
|
||||
then
|
||||
# trim the last 5 characters of the path '/node'
|
||||
@@ -148,7 +154,7 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
|
||||
then
|
||||
date "+[%F %T-%4N] Creating fallback node at path $path" >> "$logfile" 2>&1
|
||||
mkdir -p "$trimmedpath"
|
||||
cp "$rootfolder/externals/node12/bin/node" "$path"
|
||||
cp "$rootfolder/externals/$nodever/bin/node" "$path"
|
||||
else
|
||||
date "+[%F %T-%4N] Path for fallback node exists, skipping creating $path" >> "$logfile" 2>&1
|
||||
fi
|
||||
|
||||
@@ -8,7 +8,7 @@ if [ $user_id -eq 0 -a -z "$RUNNER_ALLOW_RUNASROOT" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check dotnet core 3.0 dependencies for Linux
|
||||
# Check dotnet Core 6.0 dependencies for Linux
|
||||
if [[ (`uname` == "Linux") ]]
|
||||
then
|
||||
command -v ldd > /dev/null
|
||||
@@ -18,25 +18,25 @@ then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
message="Execute sudo ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies."
|
||||
message="Execute sudo ./bin/installdependencies.sh to install any missing Dotnet Core 6.0 dependencies."
|
||||
|
||||
ldd ./bin/libcoreclr.so | grep 'not found'
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Dependencies is missing for Dotnet Core 3.0"
|
||||
echo "Dependencies is missing for Dotnet Core 6.0"
|
||||
echo $message
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ldd ./bin/System.Security.Cryptography.Native.OpenSsl.so | grep 'not found'
|
||||
ldd ./bin/libSystem.Security.Cryptography.Native.OpenSsl.so | grep 'not found'
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Dependencies is missing for Dotnet Core 3.0"
|
||||
echo "Dependencies is missing for Dotnet Core 6.0"
|
||||
echo $message
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ldd ./bin/System.IO.Compression.Native.so | grep 'not found'
|
||||
ldd ./bin/libSystem.IO.Compression.Native.so | grep 'not found'
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Dependencies is missing for Dotnet Core 3.0"
|
||||
echo "Dependencies is missing for Dotnet Core 6.0"
|
||||
echo $message
|
||||
exit 1
|
||||
fi
|
||||
@@ -54,7 +54,7 @@ then
|
||||
libpath=${LD_LIBRARY_PATH:-}
|
||||
$LDCONFIG_COMMAND -NXv ${libpath//:/ } 2>&1 | grep libicu >/dev/null 2>&1
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Libicu's dependencies is missing for Dotnet Core 3.0"
|
||||
echo "Libicu's dependencies is missing for Dotnet Core 6.0"
|
||||
echo $message
|
||||
exit 1
|
||||
fi
|
||||
|
||||
39
src/Misc/layoutroot/run-helper.cmd.template
Normal file
39
src/Misc/layoutroot/run-helper.cmd.template
Normal file
@@ -0,0 +1,39 @@
|
||||
@echo off
|
||||
|
||||
"%~dp0\bin\Runner.Listener.exe" run %*
|
||||
|
||||
rem using `if %ERRORLEVEL% EQU N` insterad of `if ERRORLEVEL N`
|
||||
rem `if ERRORLEVEL N` means: error level is N or MORE
|
||||
|
||||
if %ERRORLEVEL% EQU 0 (
|
||||
echo "Runner listener exit with 0 return code, stop the service, no retry needed."
|
||||
exit /b 0
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 1 (
|
||||
echo "Runner listener exit with terminated error, stop the service, no retry needed."
|
||||
exit /b 0
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 2 (
|
||||
echo "Runner listener exit with retryable error, re-launch runner in 5 seconds."
|
||||
ping 127.0.0.1 -n 6 -w 1000 >NUL
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 3 (
|
||||
rem Sleep 5 seconds to wait for the runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
|
||||
ping 127.0.0.1 -n 6 -w 1000 >NUL
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 4 (
|
||||
rem Sleep 5 seconds to wait for the ephemeral runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
|
||||
ping 127.0.0.1 -n 6 -w 1000 >NUL
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
echo "Exiting after unknown error code: %ERRORLEVEL%"
|
||||
exit /b 0
|
||||
46
src/Misc/layoutroot/run-helper.sh.template
Executable file
46
src/Misc/layoutroot/run-helper.sh.template
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Validate not sudo
|
||||
user_id=`id -u`
|
||||
if [ $user_id -eq 0 -a -z "$RUNNER_ALLOW_RUNASROOT" ]; then
|
||||
echo "Must not run interactively with sudo"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Run
|
||||
shopt -s nocasematch
|
||||
|
||||
SOURCE="${BASH_SOURCE[0]}"
|
||||
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
SOURCE="$(readlink "$SOURCE")"
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
||||
done
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
"$DIR"/bin/Runner.Listener run $*
|
||||
|
||||
returnCode=$?
|
||||
if [[ $returnCode == 0 ]]; then
|
||||
echo "Runner listener exit with 0 return code, stop the service, no retry needed."
|
||||
exit 0
|
||||
elif [[ $returnCode == 1 ]]; then
|
||||
echo "Runner listener exit with terminated error, stop the service, no retry needed."
|
||||
exit 0
|
||||
elif [[ $returnCode == 2 ]]; then
|
||||
echo "Runner listener exit with retryable error, re-launch runner in 5 seconds."
|
||||
"$DIR"/safe_sleep.sh 5
|
||||
exit 2
|
||||
elif [[ $returnCode == 3 ]]; then
|
||||
# Sleep 5 seconds to wait for the runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
|
||||
"$DIR"/safe_sleep.sh 5
|
||||
exit 2
|
||||
elif [[ $returnCode == 4 ]]; then
|
||||
# Sleep 5 seconds to wait for the ephemeral runner update process finish
|
||||
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
|
||||
"$DIR"/safe_sleep.sh 5
|
||||
exit 2
|
||||
else
|
||||
echo "Exiting with unknown error code: ${returnCode}"
|
||||
exit 0
|
||||
fi
|
||||
@@ -13,21 +13,19 @@ if defined VERBOSE_ARG (
|
||||
rem Unblock files in the root of the layout folder. E.g. .cmd files.
|
||||
powershell.exe -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "$VerbosePreference = %VERBOSE_ARG% ; Get-ChildItem -LiteralPath '%~dp0' | ForEach-Object { Write-Verbose ('Unblock: {0}' -f $_.FullName) ; $_ } | Unblock-File | Out-Null"
|
||||
|
||||
if /i "%~1" equ "localRun" (
|
||||
rem ********************************************************************************
|
||||
rem Local run.
|
||||
rem ********************************************************************************
|
||||
"%~dp0bin\Runner.Listener.exe" %*
|
||||
) else (
|
||||
rem ********************************************************************************
|
||||
rem Run.
|
||||
rem ********************************************************************************
|
||||
"%~dp0bin\Runner.Listener.exe" run %*
|
||||
|
||||
rem Return code 4 means the run once runner received an update message.
|
||||
rem Sleep 5 seconds to wait for the update process finish and run the runner again.
|
||||
if ERRORLEVEL 4 (
|
||||
timeout /t 5 /nobreak > NUL
|
||||
"%~dp0bin\Runner.Listener.exe" run %*
|
||||
)
|
||||
rem ********************************************************************************
|
||||
rem Run.
|
||||
rem ********************************************************************************
|
||||
|
||||
:launch_helper
|
||||
copy "%~dp0run-helper.cmd.template" "%~dp0run-helper.cmd" /Y
|
||||
call "%~dp0run-helper.cmd" %*
|
||||
|
||||
if %ERRORLEVEL% EQU 1 (
|
||||
echo "Restarting runner..."
|
||||
goto :launch_helper
|
||||
) else (
|
||||
echo "Exiting runner..."
|
||||
exit /b 0
|
||||
)
|
||||
|
||||
@@ -1,64 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Validate not sudo
|
||||
user_id=`id -u`
|
||||
if [ $user_id -eq 0 -a -z "$RUNNER_ALLOW_RUNASROOT" ]; then
|
||||
echo "Must not run interactively with sudo"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Change directory to the script root directory
|
||||
# https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within
|
||||
SOURCE="${BASH_SOURCE[0]}"
|
||||
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
SOURCE="$(readlink "$SOURCE")"
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
SOURCE="$(readlink "$SOURCE")"
|
||||
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
|
||||
done
|
||||
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
|
||||
|
||||
# Do not "cd $DIR". For localRun, the current directory is expected to be the repo location on disk.
|
||||
|
||||
# Run
|
||||
shopt -s nocasematch
|
||||
if [[ "$1" == "localRun" ]]; then
|
||||
"$DIR"/bin/Runner.Listener $*
|
||||
else
|
||||
"$DIR"/bin/Runner.Listener run $*
|
||||
|
||||
# Return code 3 means the run once runner received an update message.
|
||||
# Sleep 5 seconds to wait for the update process finish
|
||||
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
|
||||
# run the helper process which keep the listener alive
|
||||
while :;
|
||||
do
|
||||
"$DIR"/run-helper.sh $*
|
||||
returnCode=$?
|
||||
if [[ $returnCode == 3 ]]; then
|
||||
if [ ! -x "$(command -v sleep)" ]; then
|
||||
if [ ! -x "$(command -v ping)" ]; then
|
||||
COUNT="0"
|
||||
while [[ $COUNT != 5000 ]]; do
|
||||
echo "SLEEP" > /dev/null
|
||||
COUNT=$[$COUNT+1]
|
||||
done
|
||||
else
|
||||
ping -c 5 127.0.0.1 > /dev/null
|
||||
fi
|
||||
else
|
||||
sleep 5
|
||||
fi
|
||||
elif [[ $returnCode == 4 ]]; then
|
||||
if [ ! -x "$(command -v sleep)" ]; then
|
||||
if [ ! -x "$(command -v ping)" ]; then
|
||||
COUNT="0"
|
||||
while [[ $COUNT != 5000 ]]; do
|
||||
echo "SLEEP" > /dev/null
|
||||
COUNT=$[$COUNT+1]
|
||||
done
|
||||
else
|
||||
ping -c 5 127.0.0.1 > /dev/null
|
||||
fi
|
||||
else
|
||||
sleep 5
|
||||
fi
|
||||
"$DIR"/bin/Runner.Listener run $*
|
||||
if [[ $returnCode -eq 2 ]]; then
|
||||
echo "Restarting runner..."
|
||||
else
|
||||
exit $returnCode
|
||||
echo "Exiting runner..."
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
6
src/Misc/layoutroot/safe_sleep.sh
Normal file
6
src/Misc/layoutroot/safe_sleep.sh
Normal file
@@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SECONDS=0
|
||||
while [[ $SECONDS != $1 ]]; do
|
||||
:
|
||||
done
|
||||
57
src/Misc/runnercoreassets
Normal file
57
src/Misc/runnercoreassets
Normal file
@@ -0,0 +1,57 @@
|
||||
actions.runner.plist.template
|
||||
actions.runner.service.template
|
||||
checkScripts/downloadCert.js
|
||||
checkScripts/makeWebRequest.js
|
||||
darwin.svc.sh.template
|
||||
hashFiles/index.js
|
||||
installdependencies.sh
|
||||
macos-run-invoker.js
|
||||
Microsoft.IdentityModel.Logging.dll
|
||||
Microsoft.IdentityModel.Tokens.dll
|
||||
Minimatch.dll
|
||||
Newtonsoft.Json.Bson.dll
|
||||
Newtonsoft.Json.dll
|
||||
Runner.Common.deps.json
|
||||
Runner.Common.dll
|
||||
Runner.Common.pdb
|
||||
Runner.Listener
|
||||
Runner.Listener.deps.json
|
||||
Runner.Listener.dll
|
||||
Runner.Listener.exe
|
||||
Runner.Listener.pdb
|
||||
Runner.Listener.runtimeconfig.json
|
||||
Runner.PluginHost
|
||||
Runner.PluginHost.deps.json
|
||||
Runner.PluginHost.dll
|
||||
Runner.PluginHost.exe
|
||||
Runner.PluginHost.pdb
|
||||
Runner.PluginHost.runtimeconfig.json
|
||||
Runner.Plugins.deps.json
|
||||
Runner.Plugins.dll
|
||||
Runner.Plugins.pdb
|
||||
Runner.Sdk.deps.json
|
||||
Runner.Sdk.dll
|
||||
Runner.Sdk.pdb
|
||||
Runner.Worker
|
||||
Runner.Worker.deps.json
|
||||
Runner.Worker.dll
|
||||
Runner.Worker.exe
|
||||
Runner.Worker.pdb
|
||||
Runner.Worker.runtimeconfig.json
|
||||
RunnerService.exe
|
||||
RunnerService.exe.config
|
||||
RunnerService.js
|
||||
RunnerService.pdb
|
||||
runsvc.sh
|
||||
Sdk.deps.json
|
||||
Sdk.dll
|
||||
Sdk.pdb
|
||||
System.IdentityModel.Tokens.Jwt.dll
|
||||
System.Net.Http.Formatting.dll
|
||||
System.Security.Cryptography.Pkcs.dll
|
||||
System.Security.Cryptography.ProtectedData.dll
|
||||
System.ServiceProcess.ServiceController.dll
|
||||
systemd.svc.sh.template
|
||||
update.cmd.template
|
||||
update.sh.template
|
||||
YamlDotNet.dll
|
||||
263
src/Misc/runnerdotnetruntimeassets
Normal file
263
src/Misc/runnerdotnetruntimeassets
Normal file
@@ -0,0 +1,263 @@
|
||||
api-ms-win-core-console-l1-1-0.dll
|
||||
api-ms-win-core-console-l1-2-0.dll
|
||||
api-ms-win-core-datetime-l1-1-0.dll
|
||||
api-ms-win-core-debug-l1-1-0.dll
|
||||
api-ms-win-core-errorhandling-l1-1-0.dll
|
||||
api-ms-win-core-file-l1-1-0.dll
|
||||
api-ms-win-core-file-l1-2-0.dll
|
||||
api-ms-win-core-file-l2-1-0.dll
|
||||
api-ms-win-core-handle-l1-1-0.dll
|
||||
api-ms-win-core-heap-l1-1-0.dll
|
||||
api-ms-win-core-interlocked-l1-1-0.dll
|
||||
api-ms-win-core-libraryloader-l1-1-0.dll
|
||||
api-ms-win-core-localization-l1-2-0.dll
|
||||
api-ms-win-core-memory-l1-1-0.dll
|
||||
api-ms-win-core-namedpipe-l1-1-0.dll
|
||||
api-ms-win-core-processenvironment-l1-1-0.dll
|
||||
api-ms-win-core-processthreads-l1-1-0.dll
|
||||
api-ms-win-core-processthreads-l1-1-1.dll
|
||||
api-ms-win-core-profile-l1-1-0.dll
|
||||
api-ms-win-core-rtlsupport-l1-1-0.dll
|
||||
api-ms-win-core-string-l1-1-0.dll
|
||||
api-ms-win-core-synch-l1-1-0.dll
|
||||
api-ms-win-core-synch-l1-2-0.dll
|
||||
api-ms-win-core-sysinfo-l1-1-0.dll
|
||||
api-ms-win-core-timezone-l1-1-0.dll
|
||||
api-ms-win-core-util-l1-1-0.dll
|
||||
api-ms-win-crt-conio-l1-1-0.dll
|
||||
api-ms-win-crt-convert-l1-1-0.dll
|
||||
api-ms-win-crt-environment-l1-1-0.dll
|
||||
api-ms-win-crt-filesystem-l1-1-0.dll
|
||||
api-ms-win-crt-heap-l1-1-0.dll
|
||||
api-ms-win-crt-locale-l1-1-0.dll
|
||||
api-ms-win-crt-math-l1-1-0.dll
|
||||
api-ms-win-crt-multibyte-l1-1-0.dll
|
||||
api-ms-win-crt-private-l1-1-0.dll
|
||||
api-ms-win-crt-process-l1-1-0.dll
|
||||
api-ms-win-crt-runtime-l1-1-0.dll
|
||||
api-ms-win-crt-stdio-l1-1-0.dll
|
||||
api-ms-win-crt-string-l1-1-0.dll
|
||||
api-ms-win-crt-time-l1-1-0.dll
|
||||
api-ms-win-crt-utility-l1-1-0.dll
|
||||
clrcompression.dll
|
||||
clretwrc.dll
|
||||
clrjit.dll
|
||||
coreclr.dll
|
||||
createdump
|
||||
createdump.exe
|
||||
dbgshim.dll
|
||||
hostfxr.dll
|
||||
hostpolicy.dll
|
||||
libclrjit.dylib
|
||||
libclrjit.so
|
||||
libcoreclr.dylib
|
||||
libcoreclr.so
|
||||
libcoreclrtraceptprovider.so
|
||||
libdbgshim.dylib
|
||||
libdbgshim.so
|
||||
libhostfxr.dylib
|
||||
libhostfxr.so
|
||||
libhostpolicy.dylib
|
||||
libhostpolicy.so
|
||||
libmscordaccore.dylib
|
||||
libmscordaccore.so
|
||||
libmscordbi.dylib
|
||||
libmscordbi.so
|
||||
Microsoft.CSharp.dll
|
||||
Microsoft.DiaSymReader.Native.amd64.dll
|
||||
Microsoft.VisualBasic.Core.dll
|
||||
Microsoft.VisualBasic.dll
|
||||
Microsoft.Win32.Primitives.dll
|
||||
Microsoft.Win32.Registry.dll
|
||||
mscordaccore.dll
|
||||
mscordaccore_amd64_amd64_6.0.21.52210.dll
|
||||
mscordbi.dll
|
||||
mscorlib.dll
|
||||
mscorrc.debug.dll
|
||||
mscorrc.dll
|
||||
msquic.dll
|
||||
netstandard.dll
|
||||
SOS_README.md
|
||||
System.AppContext.dll
|
||||
System.Buffers.dll
|
||||
System.Collections.Concurrent.dll
|
||||
System.Collections.dll
|
||||
System.Collections.Immutable.dll
|
||||
System.Collections.NonGeneric.dll
|
||||
System.Collections.Specialized.dll
|
||||
System.ComponentModel.Annotations.dll
|
||||
System.ComponentModel.DataAnnotations.dll
|
||||
System.ComponentModel.dll
|
||||
System.ComponentModel.EventBasedAsync.dll
|
||||
System.ComponentModel.Primitives.dll
|
||||
System.ComponentModel.TypeConverter.dll
|
||||
System.Configuration.dll
|
||||
System.Console.dll
|
||||
System.Core.dll
|
||||
System.Data.Common.dll
|
||||
System.Data.DataSetExtensions.dll
|
||||
System.Data.dll
|
||||
System.Diagnostics.Contracts.dll
|
||||
System.Diagnostics.Debug.dll
|
||||
System.Diagnostics.DiagnosticSource.dll
|
||||
System.Diagnostics.FileVersionInfo.dll
|
||||
System.Diagnostics.Process.dll
|
||||
System.Diagnostics.StackTrace.dll
|
||||
System.Diagnostics.TextWriterTraceListener.dll
|
||||
System.Diagnostics.Tools.dll
|
||||
System.Diagnostics.TraceSource.dll
|
||||
System.Diagnostics.Tracing.dll
|
||||
System.dll
|
||||
System.Drawing.dll
|
||||
System.Drawing.Primitives.dll
|
||||
System.Dynamic.Runtime.dll
|
||||
System.Formats.Asn1.dll
|
||||
System.Globalization.Calendars.dll
|
||||
System.Globalization.dll
|
||||
System.Globalization.Extensions.dll
|
||||
System.Globalization.Native.dylib
|
||||
System.Globalization.Native.so
|
||||
System.IO.Compression.Brotli.dll
|
||||
System.IO.Compression.dll
|
||||
System.IO.Compression.FileSystem.dll
|
||||
System.IO.Compression.Native.a
|
||||
System.IO.Compression.Native.dll
|
||||
System.IO.Compression.Native.dylib
|
||||
System.IO.Compression.Native.so
|
||||
System.IO.Compression.ZipFile.dll
|
||||
System.IO.dll
|
||||
System.IO.FileSystem.AccessControl.dll
|
||||
System.IO.FileSystem.dll
|
||||
System.IO.FileSystem.DriveInfo.dll
|
||||
System.IO.FileSystem.Primitives.dll
|
||||
System.IO.FileSystem.Watcher.dll
|
||||
System.IO.IsolatedStorage.dll
|
||||
System.IO.MemoryMappedFiles.dll
|
||||
System.IO.Pipes.AccessControl.dll
|
||||
System.IO.Pipes.dll
|
||||
System.IO.UnmanagedMemoryStream.dll
|
||||
System.Linq.dll
|
||||
System.Linq.Expressions.dll
|
||||
System.Linq.Parallel.dll
|
||||
System.Linq.Queryable.dll
|
||||
System.Memory.dll
|
||||
System.Native.a
|
||||
System.Native.dylib
|
||||
System.Native.so
|
||||
System.Net.dll
|
||||
System.Net.Http.dll
|
||||
System.Net.Http.Json.dll
|
||||
System.Net.Http.Native.a
|
||||
System.Net.Http.Native.dylib
|
||||
System.Net.Http.Native.so
|
||||
System.Net.HttpListener.dll
|
||||
System.Net.Mail.dll
|
||||
System.Net.NameResolution.dll
|
||||
System.Net.NetworkInformation.dll
|
||||
System.Net.Ping.dll
|
||||
System.Net.Primitives.dll
|
||||
System.Net.Quic.dll
|
||||
System.Net.Requests.dll
|
||||
System.Net.Security.dll
|
||||
System.Net.Security.Native.a
|
||||
System.Net.Security.Native.dylib
|
||||
System.Net.Security.Native.so
|
||||
System.Net.ServicePoint.dll
|
||||
System.Net.Sockets.dll
|
||||
System.Net.WebClient.dll
|
||||
System.Net.WebHeaderCollection.dll
|
||||
System.Net.WebProxy.dll
|
||||
System.Net.WebSockets.Client.dll
|
||||
System.Net.WebSockets.dll
|
||||
System.Numerics.dll
|
||||
System.Numerics.Vectors.dll
|
||||
System.ObjectModel.dll
|
||||
System.Private.CoreLib.dll
|
||||
System.Private.DataContractSerialization.dll
|
||||
System.Private.Uri.dll
|
||||
System.Private.Xml.dll
|
||||
System.Private.Xml.Linq.dll
|
||||
System.Reflection.DispatchProxy.dll
|
||||
System.Reflection.dll
|
||||
System.Reflection.Emit.dll
|
||||
System.Reflection.Emit.ILGeneration.dll
|
||||
System.Reflection.Emit.Lightweight.dll
|
||||
System.Reflection.Extensions.dll
|
||||
System.Reflection.Metadata.dll
|
||||
System.Reflection.Primitives.dll
|
||||
System.Reflection.TypeExtensions.dll
|
||||
System.Resources.Reader.dll
|
||||
System.Resources.ResourceManager.dll
|
||||
System.Resources.Writer.dll
|
||||
System.Runtime.CompilerServices.Unsafe.dll
|
||||
System.Runtime.CompilerServices.VisualC.dll
|
||||
System.Runtime.dll
|
||||
System.Runtime.Extensions.dll
|
||||
System.Runtime.Handles.dll
|
||||
System.Runtime.InteropServices.dll
|
||||
System.Runtime.InteropServices.RuntimeInformation.dll
|
||||
System.Runtime.InteropServices.WindowsRuntime.dll
|
||||
System.Runtime.Intrinsics.dll
|
||||
System.Runtime.Loader.dll
|
||||
System.Runtime.Numerics.dll
|
||||
System.Runtime.Serialization.dll
|
||||
System.Runtime.Serialization.Formatters.dll
|
||||
System.Runtime.Serialization.Json.dll
|
||||
System.Runtime.Serialization.Primitives.dll
|
||||
System.Runtime.Serialization.Xml.dll
|
||||
System.Runtime.WindowsRuntime.dll
|
||||
System.Runtime.WindowsRuntime.UI.Xaml.dll
|
||||
System.Security.AccessControl.dll
|
||||
System.Security.Claims.dll
|
||||
System.Security.Cryptography.Algorithms.dll
|
||||
System.Security.Cryptography.Cng.dll
|
||||
System.Security.Cryptography.Csp.dll
|
||||
System.Security.Cryptography.Encoding.dll
|
||||
System.Security.Cryptography.Native.Apple.a
|
||||
System.Security.Cryptography.Native.Apple.dylib
|
||||
System.Security.Cryptography.Native.OpenSsl.a
|
||||
System.Security.Cryptography.Native.OpenSsl.dylib
|
||||
System.Security.Cryptography.Native.OpenSsl.so
|
||||
System.Security.Cryptography.OpenSsl.dll
|
||||
System.Security.Cryptography.Primitives.dll
|
||||
System.Security.Cryptography.X509Certificates.dll
|
||||
System.Security.Cryptography.XCertificates.dll
|
||||
System.Security.dll
|
||||
System.Security.Principal.dll
|
||||
System.Security.Principal.Windows.dll
|
||||
System.Security.SecureString.dll
|
||||
System.ServiceModel.Web.dll
|
||||
System.ServiceProcess.dll
|
||||
System.Text.Encoding.CodePages.dll
|
||||
System.Text.Encoding.dll
|
||||
System.Text.Encoding.Extensions.dll
|
||||
System.Text.Encodings.Web.dll
|
||||
System.Text.Json.dll
|
||||
System.Text.RegularExpressions.dll
|
||||
System.Threading.Channels.dll
|
||||
System.Threading.dll
|
||||
System.Threading.Overlapped.dll
|
||||
System.Threading.Tasks.Dataflow.dll
|
||||
System.Threading.Tasks.dll
|
||||
System.Threading.Tasks.Extensions.dll
|
||||
System.Threading.Tasks.Parallel.dll
|
||||
System.Threading.Thread.dll
|
||||
System.Threading.ThreadPool.dll
|
||||
System.Threading.Timer.dll
|
||||
System.Transactions.dll
|
||||
System.Transactions.Local.dll
|
||||
System.ValueTuple.dll
|
||||
System.Web.dll
|
||||
System.Web.HttpUtility.dll
|
||||
System.Windows.dll
|
||||
System.Xml.dll
|
||||
System.Xml.Linq.dll
|
||||
System.Xml.ReaderWriter.dll
|
||||
System.Xml.Serialization.dll
|
||||
System.Xml.XDocument.dll
|
||||
System.Xml.XmlDocument.dll
|
||||
System.Xml.XmlSerializer.dll
|
||||
System.Xml.XPath.dll
|
||||
System.Xml.XPath.XDocument.dll
|
||||
ucrtbase.dll
|
||||
WindowsBase.dll
|
||||
24
src/Misc/trimmedpackages_targz.json
Normal file
24
src/Misc/trimmedpackages_targz.json
Normal file
@@ -0,0 +1,24 @@
|
||||
[
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.tar.gz",
|
||||
"TrimmedContents": {
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
}
|
||||
]
|
||||
24
src/Misc/trimmedpackages_zip.json
Normal file
24
src/Misc/trimmedpackages_zip.json
Normal file
@@ -0,0 +1,24 @@
|
||||
[
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.zip",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>",
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_RUNTIME_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.zip",
|
||||
"TrimmedContents": {
|
||||
"dotnetRuntime": "<RUNTIME_HASH>"
|
||||
}
|
||||
},
|
||||
{
|
||||
"HashValue": "<NO_EXTERNALS_HASH>",
|
||||
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.zip",
|
||||
"TrimmedContents": {
|
||||
"externals": "<EXTERNALS_HASH>"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -33,6 +33,9 @@ namespace GitHub.Runner.Common
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string PoolName { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool DisableUpdate { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool Ephemeral { get; set; }
|
||||
|
||||
|
||||
@@ -129,6 +129,7 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string Ephemeral = "ephemeral";
|
||||
public static readonly string Help = "help";
|
||||
public static readonly string Replace = "replace";
|
||||
public static readonly string DisableUpdate = "disableupdate";
|
||||
public static readonly string Once = "once"; // Keep this around since customers still relies on it
|
||||
public static readonly string RunAsService = "runasservice";
|
||||
public static readonly string Unattended = "unattended";
|
||||
@@ -155,7 +156,8 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string LowDiskSpace = "LOW_DISK_SPACE";
|
||||
public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND";
|
||||
public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/";
|
||||
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
|
||||
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
|
||||
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
|
||||
}
|
||||
|
||||
public static class RunnerEvent
|
||||
@@ -187,6 +189,12 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string Success = "success";
|
||||
}
|
||||
|
||||
public static class Hooks
|
||||
{
|
||||
public static readonly string JobStartedStepName = "Set up runner";
|
||||
public static readonly string JobCompletedStepName = "Complete runner";
|
||||
}
|
||||
|
||||
public static class Path
|
||||
{
|
||||
public static readonly string ActionsDirectory = "_actions";
|
||||
@@ -218,11 +226,15 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string AllowUnsupportedStopCommandTokens = "ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS";
|
||||
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
|
||||
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
|
||||
public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
|
||||
}
|
||||
|
||||
public static class Agent
|
||||
{
|
||||
public static readonly string ToolsDirectory = "agent.ToolsDirectory";
|
||||
|
||||
// Set this env var to "node12" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
|
||||
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
|
||||
}
|
||||
|
||||
public static class System
|
||||
|
||||
@@ -60,6 +60,7 @@ namespace GitHub.Runner.Common
|
||||
case "GitHub.Runner.Worker.IFileCommandExtension":
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.AddPathFileCommand, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.SetEnvFileCommand, Runner.Worker");
|
||||
Add<T>(extensions, "GitHub.Runner.Worker.CreateStepSummaryCommand, Runner.Worker");
|
||||
break;
|
||||
case "GitHub.Runner.Listener.Check.ICheckExtension":
|
||||
Add<T>(extensions, "GitHub.Runner.Listener.Check.InternetCheck, Runner.Listener");
|
||||
|
||||
@@ -98,14 +98,14 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
int logPageSize;
|
||||
string logSizeEnv = Environment.GetEnvironmentVariable($"{hostType.ToUpperInvariant()}_LOGSIZE");
|
||||
if (!string.IsNullOrEmpty(logSizeEnv) || !int.TryParse(logSizeEnv, out logPageSize))
|
||||
if (string.IsNullOrEmpty(logSizeEnv) || !int.TryParse(logSizeEnv, out logPageSize))
|
||||
{
|
||||
logPageSize = _defaultLogPageSize;
|
||||
}
|
||||
|
||||
int logRetentionDays;
|
||||
string logRetentionDaysEnv = Environment.GetEnvironmentVariable($"{hostType.ToUpperInvariant()}_LOGRETENTION");
|
||||
if (!string.IsNullOrEmpty(logRetentionDaysEnv) || !int.TryParse(logRetentionDaysEnv, out logRetentionDays))
|
||||
if (string.IsNullOrEmpty(logRetentionDaysEnv) || !int.TryParse(logRetentionDaysEnv, out logRetentionDays))
|
||||
{
|
||||
logRetentionDays = _defaultLogRetentionDays;
|
||||
}
|
||||
@@ -193,6 +193,11 @@ namespace GitHub.Runner.Common
|
||||
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
|
||||
}
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
_trace.Warning($"Runner is running under insecure mode: HTTPS server certifcate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
|
||||
}
|
||||
|
||||
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
|
||||
if (File.Exists(credFile))
|
||||
{
|
||||
@@ -200,9 +205,19 @@ namespace GitHub.Runner.Common
|
||||
if (credData != null &&
|
||||
credData.Data.TryGetValue("clientId", out var clientId))
|
||||
{
|
||||
_userAgents.Add(new ProductInfoHeaderValue($"RunnerId", clientId));
|
||||
_userAgents.Add(new ProductInfoHeaderValue("ClientId", clientId));
|
||||
}
|
||||
}
|
||||
|
||||
var runnerFile = GetConfigFile(WellKnownConfigFile.Runner);
|
||||
if (File.Exists(runnerFile))
|
||||
{
|
||||
var runnerSettings = IOUtil.LoadObject<RunnerSettings>(runnerFile);
|
||||
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
|
||||
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
|
||||
}
|
||||
|
||||
_userAgents.Add(new ProductInfoHeaderValue("CommitSHA", BuildConstants.Source.CommitHash));
|
||||
}
|
||||
|
||||
public string GetDirectory(WellKnownDirectory directory)
|
||||
@@ -342,7 +357,7 @@ namespace GitHub.Runner.Common
|
||||
GetDirectory(WellKnownDirectory.Root),
|
||||
".setup_info");
|
||||
break;
|
||||
|
||||
|
||||
case WellKnownConfigFile.Telemetry:
|
||||
path = Path.Combine(
|
||||
GetDirectory(WellKnownDirectory.Diag),
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System;
|
||||
using System.Net.Http;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
@@ -13,7 +14,14 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
public HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy)
|
||||
{
|
||||
return new HttpClientHandler() { Proxy = webProxy };
|
||||
var client = new HttpClientHandler() { Proxy = webProxy };
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
client.ServerCertificateCustomValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
||||
}
|
||||
|
||||
return client;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,32 +1,36 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Net.Http;
|
||||
using System.Net.WebSockets;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using Newtonsoft.Json;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
[ServiceLocator(Default = typeof(JobServer))]
|
||||
public interface IJobServer : IRunnerService
|
||||
public interface IJobServer : IRunnerService, IAsyncDisposable
|
||||
{
|
||||
Task ConnectAsync(VssConnection jobConnection);
|
||||
|
||||
void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint);
|
||||
|
||||
// logging and console
|
||||
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
|
||||
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken);
|
||||
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken);
|
||||
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
|
||||
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
|
||||
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
|
||||
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
|
||||
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
|
||||
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
|
||||
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
|
||||
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken);
|
||||
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
public sealed class JobServer : RunnerService, IJobServer
|
||||
@@ -34,6 +38,20 @@ namespace GitHub.Runner.Common
|
||||
private bool _hasConnection;
|
||||
private VssConnection _connection;
|
||||
private TaskHttpClient _taskClient;
|
||||
private ClientWebSocket _websocketClient;
|
||||
|
||||
private ServiceEndpoint _serviceEndpoint;
|
||||
|
||||
private int totalBatchedLinesAttemptedByWebsocket = 0;
|
||||
private int failedAttemptsToPostBatchedLinesByWebsocket = 0;
|
||||
|
||||
|
||||
private static readonly TimeSpan _minDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(100);
|
||||
private static readonly TimeSpan _maxDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(500);
|
||||
private static readonly int _minWebsocketFailurePercentageAllowed = 50;
|
||||
private static readonly int _minWebsocketBatchedLinesCountToConsider = 5;
|
||||
|
||||
private Task _websocketConnectTask;
|
||||
|
||||
public async Task ConnectAsync(VssConnection jobConnection)
|
||||
{
|
||||
@@ -42,7 +60,7 @@ namespace GitHub.Runner.Common
|
||||
int attemptCount = totalAttempts;
|
||||
var configurationStore = HostContext.GetService<IConfigurationStore>();
|
||||
var runnerSettings = configurationStore.GetSettings();
|
||||
|
||||
|
||||
while (!_connection.HasAuthenticated && attemptCount-- > 0)
|
||||
{
|
||||
try
|
||||
@@ -117,6 +135,19 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
public void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint)
|
||||
{
|
||||
this._serviceEndpoint = serviceEndpoint;
|
||||
InitializeWebsocketClient(TimeSpan.Zero);
|
||||
}
|
||||
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, "Shutdown", CancellationToken.None);
|
||||
GC.SuppressFinalize(this);
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
private void CheckConnection()
|
||||
{
|
||||
if (!_hasConnection)
|
||||
@@ -125,6 +156,48 @@ namespace GitHub.Runner.Common
|
||||
}
|
||||
}
|
||||
|
||||
private void InitializeWebsocketClient(TimeSpan delay)
|
||||
{
|
||||
if (_serviceEndpoint.Authorization != null &&
|
||||
_serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out var accessToken) &&
|
||||
!string.IsNullOrEmpty(accessToken))
|
||||
{
|
||||
if (_serviceEndpoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl) && !string.IsNullOrEmpty(feedStreamUrl))
|
||||
{
|
||||
// let's ensure we use the right scheme
|
||||
feedStreamUrl = feedStreamUrl.Replace("https://", "wss://").Replace("http://", "ws://");
|
||||
Trace.Info($"Creating websocket client ..." + feedStreamUrl);
|
||||
this._websocketClient = new ClientWebSocket();
|
||||
this._websocketClient.Options.SetRequestHeader("Authorization", $"Bearer {accessToken}");
|
||||
this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay);
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"No FeedStreamUrl found, so we will use Rest API calls for sending feed data");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"No access token from the service endpoint");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task ConnectWebSocketClient(string feedStreamUrl, TimeSpan delay)
|
||||
{
|
||||
try
|
||||
{
|
||||
Trace.Info($"Attempting to start websocket client with delay {delay}.");
|
||||
await Task.Delay(delay);
|
||||
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), default(CancellationToken));
|
||||
Trace.Info($"Successfully started websocket client.");
|
||||
}
|
||||
catch(Exception ex)
|
||||
{
|
||||
Trace.Info("Exception caught during websocket client connect, fallback of HTTP would be used now instead of websocket.");
|
||||
Trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------
|
||||
// Feedback: WebConsole, TimelineRecords and Logs
|
||||
//-----------------------------------------------------------------
|
||||
@@ -135,16 +208,72 @@ namespace GitHub.Runner.Common
|
||||
return _taskClient.AppendLogContentAsync(scopeIdentifier, hubName, planId, logId, uploadStream, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken)
|
||||
public async Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
|
||||
}
|
||||
var pushedLinesViaWebsocket = false;
|
||||
if (_websocketConnectTask != null)
|
||||
{
|
||||
await _websocketConnectTask;
|
||||
}
|
||||
|
||||
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine, cancellationToken: cancellationToken);
|
||||
// "_websocketClient != null" implies either: We have a successful connection OR we have to attempt sending again and then reconnect
|
||||
// ...in other words, if websocket client is null, we will skip sending to websocket and just use rest api calls to send data
|
||||
if (_websocketClient != null)
|
||||
{
|
||||
var linesWrapper = startLine.HasValue? new TimelineRecordFeedLinesWrapper(stepId, lines, startLine.Value): new TimelineRecordFeedLinesWrapper(stepId, lines);
|
||||
var jsonData = StringUtil.ConvertToJson(linesWrapper);
|
||||
try
|
||||
{
|
||||
totalBatchedLinesAttemptedByWebsocket++;
|
||||
var jsonDataBytes = Encoding.UTF8.GetBytes(jsonData);
|
||||
// break the message into chunks of 1024 bytes
|
||||
for (var i = 0; i < jsonDataBytes.Length; i += 1 * 1024)
|
||||
{
|
||||
var lastChunk = i + (1 * 1024) >= jsonDataBytes.Length;
|
||||
var chunk = new ArraySegment<byte>(jsonDataBytes, i, Math.Min(1 * 1024, jsonDataBytes.Length - i));
|
||||
await _websocketClient.SendAsync(chunk, WebSocketMessageType.Text, endOfMessage:lastChunk, cancellationToken);
|
||||
}
|
||||
|
||||
pushedLinesViaWebsocket = true;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
failedAttemptsToPostBatchedLinesByWebsocket++;
|
||||
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
|
||||
Trace.Error(ex);
|
||||
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
|
||||
{
|
||||
// let's consider failure percentage
|
||||
if (failedAttemptsToPostBatchedLinesByWebsocket * 100 / totalBatchedLinesAttemptedByWebsocket > _minWebsocketFailurePercentageAllowed)
|
||||
{
|
||||
Trace.Info($"Exhausted websocket allowed retries, we will not attempt websocket connection for this job to post lines again.");
|
||||
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.InternalServerError, "Shutdown due to failures", cancellationToken);
|
||||
// By setting it to null, we will ensure that we never try websocket path again for this job
|
||||
_websocketClient = null;
|
||||
}
|
||||
}
|
||||
|
||||
if (_websocketClient != null)
|
||||
{
|
||||
var delay = BackoffTimerHelper.GetRandomBackoff(_minDelayForWebsocketReconnect, _maxDelayForWebsocketReconnect);
|
||||
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}).");
|
||||
InitializeWebsocketClient(delay);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!pushedLinesViaWebsocket)
|
||||
{
|
||||
if (startLine.HasValue)
|
||||
{
|
||||
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine.Value, cancellationToken: cancellationToken);
|
||||
}
|
||||
else
|
||||
{
|
||||
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)
|
||||
@@ -186,10 +315,10 @@ namespace GitHub.Runner.Common
|
||||
//-----------------------------------------------------------------
|
||||
// Action download info
|
||||
//-----------------------------------------------------------------
|
||||
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken)
|
||||
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken)
|
||||
{
|
||||
CheckConnection();
|
||||
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, actions, cancellationToken: cancellationToken);
|
||||
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, jobId, actions, cancellationToken: cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Sdk;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Common
|
||||
{
|
||||
@@ -72,10 +71,11 @@ namespace GitHub.Runner.Common
|
||||
|
||||
// Web console dequeue will start with process queue every 250ms for the first 60*4 times (~60 seconds).
|
||||
// Then the dequeue will happen every 500ms.
|
||||
// In this way, customer still can get instance live console output on job start,
|
||||
// In this way, customer still can get instance live console output on job start,
|
||||
// at the same time we can cut the load to server after the build run for more than 60s
|
||||
private int _webConsoleLineAggressiveDequeueCount = 0;
|
||||
private const int _webConsoleLineAggressiveDequeueLimit = 4 * 60;
|
||||
private const int _webConsoleLineQueueSizeLimit = 1024;
|
||||
private bool _webConsoleLineAggressiveDequeue = true;
|
||||
private bool _firstConsoleOutputs = true;
|
||||
|
||||
@@ -89,6 +89,10 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
Trace.Entering();
|
||||
|
||||
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
_jobServer.InitializeWebsocketClient(serviceEndPoint);
|
||||
|
||||
if (_queueInProcess)
|
||||
{
|
||||
Trace.Info("No-opt, all queue process tasks are running.");
|
||||
@@ -156,13 +160,28 @@ namespace GitHub.Runner.Common
|
||||
await ProcessTimelinesUpdateQueueAsync(runOnce: true);
|
||||
Trace.Info("Timeline update queue drained.");
|
||||
|
||||
Trace.Info($"Disposing job server ...");
|
||||
await _jobServer.DisposeAsync();
|
||||
|
||||
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
|
||||
}
|
||||
|
||||
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
|
||||
{
|
||||
Trace.Verbose("Enqueue web console line queue: {0}", line);
|
||||
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line, lineNumber));
|
||||
// We only process 500 lines of the queue everytime.
|
||||
// If the queue is backing up due to slow Http request or flood of output from step,
|
||||
// we will drop the output to avoid extra memory consumption from the runner since the live console feed is best effort.
|
||||
if (!string.IsNullOrEmpty(line) && _webConsoleLineQueue.Count < _webConsoleLineQueueSizeLimit)
|
||||
{
|
||||
Trace.Verbose("Enqueue web console line queue: {0}", line);
|
||||
if (line.Length > 1024)
|
||||
{
|
||||
Trace.Verbose("Web console line is more than 1024 chars, truncate to first 1024 chars");
|
||||
line = $"{line.Substring(0, 1024)}...";
|
||||
}
|
||||
|
||||
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line, lineNumber));
|
||||
}
|
||||
}
|
||||
|
||||
public void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource)
|
||||
@@ -230,12 +249,6 @@ namespace GitHub.Runner.Common
|
||||
stepRecordIds.Add(lineInfo.StepRecordId);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(lineInfo.Line) && lineInfo.Line.Length > 1024)
|
||||
{
|
||||
Trace.Verbose("Web console line is more than 1024 chars, truncate to first 1024 chars");
|
||||
lineInfo.Line = $"{lineInfo.Line.Substring(0, 1024)}...";
|
||||
}
|
||||
|
||||
stepsConsoleLines[lineInfo.StepRecordId].Add(new TimelineRecordLogLine(lineInfo.Line, lineInfo.LineNumber));
|
||||
linesCounter++;
|
||||
|
||||
@@ -286,15 +299,7 @@ namespace GitHub.Runner.Common
|
||||
{
|
||||
try
|
||||
{
|
||||
// we will not requeue failed batch, since the web console lines are time sensitive.
|
||||
if (batch[0].LineNumber.HasValue)
|
||||
{
|
||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber.Value, default(CancellationToken));
|
||||
}
|
||||
else
|
||||
{
|
||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), default(CancellationToken));
|
||||
}
|
||||
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, default(CancellationToken));
|
||||
|
||||
if (_firstConsoleOutputs)
|
||||
{
|
||||
@@ -483,8 +488,8 @@ namespace GitHub.Runner.Common
|
||||
|
||||
if (runOnce)
|
||||
{
|
||||
// continue process timeline records update,
|
||||
// we might have more records need update,
|
||||
// continue process timeline records update,
|
||||
// we might have more records need update,
|
||||
// since we just create a new sub-timeline
|
||||
if (pendingSubtimelineUpdate)
|
||||
{
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netcoreapp3.1</TargetFramework>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<OutputType>Library</OutputType>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;linux-musl-x64;osx-x64</RuntimeIdentifiers>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
|
||||
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
|
||||
<AssetTargetFallback>portable-net45+win8</AssetTargetFallback>
|
||||
<NoWarn>NU1701;NU1603</NoWarn>
|
||||
<Version>$(Version)</Version>
|
||||
<TieredCompilationQuickJit>true</TieredCompilationQuickJit>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -51,7 +51,7 @@ namespace GitHub.Runner.Common
|
||||
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
|
||||
|
||||
// agent update
|
||||
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState);
|
||||
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState, string trace);
|
||||
}
|
||||
|
||||
public sealed class RunnerServer : RunnerService, IRunnerServer
|
||||
@@ -341,25 +341,10 @@ namespace GitHub.Runner.Common
|
||||
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState)
|
||||
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState, string trace)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.Generic);
|
||||
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState);
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------
|
||||
// Runner Auth Url
|
||||
//-----------------------------------------------------------------
|
||||
public Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.MessageQueue);
|
||||
return _messageTaskAgentClient.GetAgentAuthUrlAsync(runnerPoolId, runnerId);
|
||||
}
|
||||
|
||||
public Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error)
|
||||
{
|
||||
CheckConnection(RunnerConnectionType.MessageQueue);
|
||||
return _messageTaskAgentClient.ReportAgentAuthUrlMigrationErrorAsync(runnerPoolId, runnerId, error);
|
||||
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
16
src/Runner.Common/Util/NodeUtil.cs
Normal file
16
src/Runner.Common/Util/NodeUtil.cs
Normal file
@@ -0,0 +1,16 @@
|
||||
using System;
|
||||
using System.Collections.ObjectModel;
|
||||
|
||||
namespace GitHub.Runner.Common.Util
|
||||
{
|
||||
public static class NodeUtil
|
||||
{
|
||||
private const string _defaultNodeVersion = "node16";
|
||||
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] {"node12", "node16"});
|
||||
public static string GetInternalNodeVersion()
|
||||
{
|
||||
var forcedNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);
|
||||
return !string.IsNullOrEmpty(forcedNodeVersion) && BuiltInNodeVersions.Contains(forcedNodeVersion) ? forcedNodeVersion : _defaultNodeVersion;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -10,6 +10,7 @@ using System.Net.NetworkInformation;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
|
||||
@@ -314,12 +315,12 @@ namespace GitHub.Runner.Listener.Check
|
||||
});
|
||||
|
||||
var downloadCertScript = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "downloadCert");
|
||||
var node12 = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{downloadCertScript}\"' ");
|
||||
var node = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{downloadCertScript}\"' ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
|
||||
await processInvoker.ExecuteAsync(
|
||||
hostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
node12,
|
||||
node,
|
||||
$"\"{downloadCertScript}\"",
|
||||
env,
|
||||
true,
|
||||
|
||||
@@ -6,6 +6,7 @@ using System.Net;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Listener.Check
|
||||
@@ -144,12 +145,12 @@ namespace GitHub.Runner.Listener.Check
|
||||
});
|
||||
|
||||
var makeWebRequestScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "makeWebRequest.js");
|
||||
var node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{makeWebRequestScript}\"' ");
|
||||
var node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{makeWebRequestScript}\"' ");
|
||||
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
|
||||
await processInvoker.ExecuteAsync(
|
||||
HostContext.GetDirectory(WellKnownDirectory.Root),
|
||||
node12,
|
||||
node,
|
||||
$"\"{makeWebRequestScript}\"",
|
||||
env,
|
||||
true,
|
||||
|
||||
@@ -29,6 +29,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
Constants.Runner.CommandLine.Flags.Check,
|
||||
Constants.Runner.CommandLine.Flags.Commit,
|
||||
Constants.Runner.CommandLine.Flags.DisableUpdate,
|
||||
Constants.Runner.CommandLine.Flags.Ephemeral,
|
||||
Constants.Runner.CommandLine.Flags.Help,
|
||||
Constants.Runner.CommandLine.Flags.Once,
|
||||
@@ -68,6 +69,7 @@ namespace GitHub.Runner.Listener
|
||||
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
|
||||
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
|
||||
public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral);
|
||||
public bool DisableUpdate => TestFlag(Constants.Runner.CommandLine.Flags.DisableUpdate);
|
||||
|
||||
// Keep this around since customers still relies on it
|
||||
public bool RunOnce => TestFlag(Constants.Runner.CommandLine.Flags.Once);
|
||||
@@ -243,6 +245,7 @@ namespace GitHub.Runner.Listener
|
||||
validator: Validators.ServerUrlValidator);
|
||||
}
|
||||
|
||||
#if OS_WINDOWS
|
||||
public string GetWindowsLogonAccount(string defaultValue, string descriptionMsg)
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
@@ -260,7 +263,7 @@ namespace GitHub.Runner.Listener
|
||||
defaultValue: string.Empty,
|
||||
validator: Validators.NonEmptyValidator);
|
||||
}
|
||||
|
||||
#endif
|
||||
public string GetWork()
|
||||
{
|
||||
return GetArgOrPrompt(
|
||||
|
||||
@@ -54,7 +54,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
Trace.Info(nameof(LoadSettings));
|
||||
if (!IsConfigured())
|
||||
{
|
||||
throw new InvalidOperationException("Not configured. Run config.(sh/cmd) to configure the runner.");
|
||||
throw new NonRetryableException("Not configured. Run config.(sh/cmd) to configure the runner.");
|
||||
}
|
||||
|
||||
RunnerSettings settings = _store.GetSettings();
|
||||
@@ -196,6 +196,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
TaskAgent agent;
|
||||
while (true)
|
||||
{
|
||||
runnerSettings.DisableUpdate = command.DisableUpdate;
|
||||
runnerSettings.Ephemeral = command.Ephemeral;
|
||||
runnerSettings.AgentName = command.GetRunnerName();
|
||||
|
||||
@@ -213,11 +214,22 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
if (command.GetReplace())
|
||||
{
|
||||
// Update existing agent with new PublicKey, agent version.
|
||||
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral);
|
||||
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
|
||||
|
||||
try
|
||||
{
|
||||
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
|
||||
if (command.DisableUpdate &&
|
||||
command.DisableUpdate != agent.DisableUpdate)
|
||||
{
|
||||
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'DisableUpdate' flag.");
|
||||
}
|
||||
if (command.Ephemeral &&
|
||||
command.Ephemeral != agent.Ephemeral)
|
||||
{
|
||||
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'Ephemeral' flag.");
|
||||
}
|
||||
|
||||
_term.WriteSuccessMessage("Successfully replaced the runner");
|
||||
break;
|
||||
}
|
||||
@@ -236,11 +248,22 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
else
|
||||
{
|
||||
// Create a new agent.
|
||||
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral);
|
||||
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
|
||||
|
||||
try
|
||||
{
|
||||
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
|
||||
if (command.DisableUpdate &&
|
||||
command.DisableUpdate != agent.DisableUpdate)
|
||||
{
|
||||
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'DisableUpdate' flag.");
|
||||
}
|
||||
if (command.Ephemeral &&
|
||||
command.Ephemeral != agent.Ephemeral)
|
||||
{
|
||||
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'Ephemeral' flag.");
|
||||
}
|
||||
|
||||
_term.WriteSuccessMessage("Runner successfully added");
|
||||
break;
|
||||
}
|
||||
@@ -466,7 +489,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
|
||||
|
||||
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral)
|
||||
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
|
||||
{
|
||||
ArgUtil.NotNull(agent, nameof(agent));
|
||||
agent.Authorization = new TaskAgentAuthorization
|
||||
@@ -478,6 +501,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
agent.Version = BuildConstants.RunnerPackage.Version;
|
||||
agent.OSDescription = RuntimeInformation.OSDescription;
|
||||
agent.Ephemeral = ephemeral;
|
||||
agent.DisableUpdate = disableUpdate;
|
||||
agent.MaxParallelism = 1;
|
||||
|
||||
agent.Labels.Clear();
|
||||
@@ -494,7 +518,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
return agent;
|
||||
}
|
||||
|
||||
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral)
|
||||
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
|
||||
{
|
||||
TaskAgent agent = new TaskAgent(agentName)
|
||||
{
|
||||
@@ -506,6 +530,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
Version = BuildConstants.RunnerPackage.Version,
|
||||
OSDescription = RuntimeInformation.OSDescription,
|
||||
Ephemeral = ephemeral,
|
||||
DisableUpdate = disableUpdate
|
||||
};
|
||||
|
||||
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
|
||||
@@ -588,32 +613,52 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
throw new ArgumentException($"'{githubUrl}' should point to an org or repository.");
|
||||
}
|
||||
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
int retryCount = 0;
|
||||
while(retryCount < 3)
|
||||
{
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
|
||||
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var jsonResponse = await response.Content.ReadAsStringAsync();
|
||||
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var errorResponse = await response.Content.ReadAsStringAsync();
|
||||
_term.WriteError(errorResponse);
|
||||
response.EnsureSuccessStatusCode();
|
||||
return null;
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
|
||||
try
|
||||
{
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var jsonResponse = await response.Content.ReadAsStringAsync();
|
||||
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
|
||||
}
|
||||
else if(response.StatusCode == System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
// It doesn't make sense to retry in this case, so just stop
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var errorResponse = await response.Content.ReadAsStringAsync();
|
||||
_term.WriteError(errorResponse);
|
||||
response.EnsureSuccessStatusCode();
|
||||
}
|
||||
}
|
||||
catch(Exception ex) when (retryCount < 2)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
Trace.Info("Retrying in 5 seconds");
|
||||
}
|
||||
}
|
||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
|
||||
await Task.Delay(backOff);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
|
||||
@@ -629,35 +674,57 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runner-registration";
|
||||
}
|
||||
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
int retryCount = 0;
|
||||
while (retryCount < 3)
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
|
||||
var bodyObject = new Dictionary<string, string>()
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
{"url", githubUrl},
|
||||
{"runner_event", runnerEvent}
|
||||
};
|
||||
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
||||
var bodyObject = new Dictionary<string, string>()
|
||||
{
|
||||
{"url", githubUrl},
|
||||
{"runner_event", runnerEvent}
|
||||
};
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var jsonResponse = await response.Content.ReadAsStringAsync();
|
||||
return StringUtil.ConvertFromJson<GitHubAuthResult>(jsonResponse);
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var errorResponse = await response.Content.ReadAsStringAsync();
|
||||
_term.WriteError(errorResponse);
|
||||
response.EnsureSuccessStatusCode();
|
||||
return null;
|
||||
try
|
||||
{
|
||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
||||
|
||||
if(response.IsSuccessStatusCode)
|
||||
{
|
||||
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var jsonResponse = await response.Content.ReadAsStringAsync();
|
||||
return StringUtil.ConvertFromJson<GitHubAuthResult>(jsonResponse);
|
||||
}
|
||||
else if(response.StatusCode == System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
// It doesn't make sense to retry in this case, so just stop
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
|
||||
var errorResponse = await response.Content.ReadAsStringAsync();
|
||||
_term.WriteError(errorResponse);
|
||||
// Something else bad happened, let's go to our retry logic
|
||||
response.EnsureSuccessStatusCode();
|
||||
}
|
||||
}
|
||||
catch(Exception ex) when (retryCount < 2)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
Trace.Info("Retrying in 5 seconds");
|
||||
}
|
||||
}
|
||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
|
||||
await Task.Delay(backOff);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#if OS_WINDOWS
|
||||
#pragma warning disable CA1416
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
@@ -141,7 +142,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
Trace.Entering();
|
||||
LocalGroupInfo groupInfo = new LocalGroupInfo();
|
||||
groupInfo.Name = groupName;
|
||||
groupInfo.Comment = StringUtil.Format("Built-in group used by Team Foundation Server.");
|
||||
groupInfo.Comment = StringUtil.Format("Built-in group used by GitHub Actions Runner.");
|
||||
|
||||
int returnCode = NetLocalGroupAdd(null, // computer name
|
||||
1, // 1 means include comment
|
||||
@@ -1327,4 +1328,5 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
public IntPtr hProfile;
|
||||
}
|
||||
}
|
||||
#pragma warning restore CA1416
|
||||
#endif
|
||||
|
||||
@@ -48,13 +48,12 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
string repoOrOrgName = regex.Replace(settings.RepoOrOrgName, "-");
|
||||
|
||||
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgName, settings.AgentName);
|
||||
|
||||
if (serviceName.Length > 80)
|
||||
if (serviceName.Length > MaxServiceNameLength)
|
||||
{
|
||||
Trace.Verbose($"Calculated service name is too long (> 80 chars). Trying again by calculating a shorter name.");
|
||||
|
||||
int exceededCharLength = serviceName.Length - 80;
|
||||
string repoOrOrgNameSubstring = StringUtil.SubstringPrefix(repoOrOrgName, 45);
|
||||
Trace.Verbose($"Calculated service name is too long (> {MaxServiceNameLength} chars). Trying again by calculating a shorter name.");
|
||||
// Add 5 to add -xxxx random number on the end
|
||||
int exceededCharLength = serviceName.Length - MaxServiceNameLength + 5;
|
||||
string repoOrOrgNameSubstring = StringUtil.SubstringPrefix(repoOrOrgName, MaxRepoOrgCharacters);
|
||||
|
||||
exceededCharLength -= repoOrOrgName.Length - repoOrOrgNameSubstring.Length;
|
||||
|
||||
@@ -66,6 +65,10 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
runnerNameSubstring = StringUtil.SubstringPrefix(settings.AgentName, settings.AgentName.Length - exceededCharLength);
|
||||
}
|
||||
|
||||
// Lets add a suffix with a random number to reduce the chance of collisions between runner names once we truncate
|
||||
var random = new Random();
|
||||
var num = random.Next(1000, 9999).ToString();
|
||||
runnerNameSubstring +=$"-{num}";
|
||||
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgNameSubstring, runnerNameSubstring);
|
||||
}
|
||||
|
||||
@@ -73,5 +76,12 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
Trace.Info($"Service name '{serviceName}' display name '{serviceDisplayName}' will be used for service configuration.");
|
||||
}
|
||||
#if (OS_LINUX || OS_OSX)
|
||||
const int MaxServiceNameLength = 150;
|
||||
const int MaxRepoOrgCharacters = 70;
|
||||
#elif OS_WINDOWS
|
||||
const int MaxServiceNameLength = 80;
|
||||
const int MaxRepoOrgCharacters = 45;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,6 +67,8 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
return !string.IsNullOrEmpty(value);
|
||||
}
|
||||
|
||||
#if OS_WINDOWS
|
||||
#pragma warning disable CA1416
|
||||
public static bool NTAccountValidator(string arg)
|
||||
{
|
||||
if (string.IsNullOrEmpty(arg) || String.IsNullOrEmpty(arg.TrimStart('.', '\\')))
|
||||
@@ -87,5 +89,7 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
|
||||
return true;
|
||||
}
|
||||
#pragma warning restore CA1416
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#if OS_WINDOWS
|
||||
#pragma warning disable CA1416
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
@@ -169,4 +170,5 @@ namespace GitHub.Runner.Listener.Configuration
|
||||
}
|
||||
}
|
||||
}
|
||||
#pragma warning restore CA1416
|
||||
#endif
|
||||
|
||||
@@ -2,17 +2,19 @@ using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Services.WebApi;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using System.Linq;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using GitHub.Services.WebApi.Jwt;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -34,6 +36,7 @@ namespace GitHub.Runner.Listener
|
||||
// and the server will not send another job while this one is still running.
|
||||
public sealed class JobDispatcher : RunnerService, IJobDispatcher
|
||||
{
|
||||
private static Regex _invalidJsonRegex = new Regex(@"invalid\ Json\ at\ position\ '(\d+)':", RegexOptions.Compiled | RegexOptions.IgnoreCase);
|
||||
private readonly Lazy<Dictionary<long, TaskResult>> _localRunJobResult = new Lazy<Dictionary<long, TaskResult>>();
|
||||
private int _poolId;
|
||||
|
||||
@@ -282,7 +285,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
// at this point, the job execution might encounter some dead lock and even not able to be cancelled.
|
||||
// no need to localize the exception string should never happen.
|
||||
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds.");
|
||||
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be cancelled within 45 seconds.");
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -360,7 +363,7 @@ namespace GitHub.Runner.Listener
|
||||
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
|
||||
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
|
||||
|
||||
// wait till first renew succeed or job request is canceled
|
||||
// wait till first renew succeed or job request is cancelled
|
||||
// not even start worker if the first renew fail
|
||||
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
|
||||
|
||||
@@ -701,7 +704,7 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
// OperationCanceledException may caused by http timeout or _lockRenewalTokenSource.Cance();
|
||||
// Stop renew only on cancellation token fired.
|
||||
Trace.Info($"job renew has been canceled, stop renew job request {requestId}.");
|
||||
Trace.Info($"job renew has been cancelled, stop renew job request {requestId}.");
|
||||
return;
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -759,7 +762,7 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
catch (OperationCanceledException) when (token.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info($"job renew has been canceled, stop renew job request {requestId}.");
|
||||
Trace.Info($"job renew has been cancelled, stop renew job request {requestId}.");
|
||||
}
|
||||
}
|
||||
else
|
||||
@@ -964,6 +967,30 @@ namespace GitHub.Runner.Listener
|
||||
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
|
||||
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
|
||||
|
||||
try
|
||||
{
|
||||
if (!string.IsNullOrEmpty(errorMessage) &&
|
||||
message.Variables.TryGetValue("DistributedTask.EnableRunnerIPCDebug", out var enableRunnerIPCDebug) &&
|
||||
StringUtil.ConvertToBoolean(enableRunnerIPCDebug.Value))
|
||||
{
|
||||
// the trace should be best effort and not affect any job result
|
||||
var match = _invalidJsonRegex.Match(errorMessage);
|
||||
if (match.Success &&
|
||||
match.Groups.Count == 2)
|
||||
{
|
||||
var jsonPosition = int.Parse(match.Groups[1].Value);
|
||||
var serializedJobMessage = JsonUtility.ToString(message);
|
||||
var originalJson = serializedJobMessage.Substring(jsonPosition - 10, 20);
|
||||
errorMessage = $"Runner sent Json at position '{jsonPosition}': {originalJson} ({Convert.ToBase64String(Encoding.UTF8.GetBytes(originalJson))})\n{errorMessage}";
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error(ex);
|
||||
errorMessage = $"Fail to check json IPC error: {ex.Message}\n{errorMessage}";
|
||||
}
|
||||
|
||||
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
|
||||
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
|
||||
jobRecord.ErrorCount++;
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Services.Common;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Security.Cryptography;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using GitHub.Services.OAuth;
|
||||
using System.Diagnostics;
|
||||
using System.Runtime.InteropServices;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.OAuth;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -33,6 +33,7 @@ namespace GitHub.Runner.Listener
|
||||
private IRunnerServer _runnerServer;
|
||||
private TaskAgentSession _session;
|
||||
private TimeSpan _getNextMessageRetryInterval;
|
||||
private bool _accessTokenRevoked = false;
|
||||
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
|
||||
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
|
||||
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
|
||||
@@ -111,6 +112,7 @@ namespace GitHub.Runner.Listener
|
||||
catch (TaskAgentAccessTokenExpiredException)
|
||||
{
|
||||
Trace.Info("Runner OAuth token has been revoked. Session creation failed.");
|
||||
_accessTokenRevoked = true;
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -154,9 +156,16 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
if (_session != null && _session.SessionId != Guid.Empty)
|
||||
{
|
||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
if (!_accessTokenRevoked)
|
||||
{
|
||||
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
|
||||
using (var ts = new CancellationTokenSource(TimeSpan.FromSeconds(30)))
|
||||
{
|
||||
await _runnerServer.DeleteAgentSessionAsync(_settings.PoolId, _session.SessionId, ts.Token);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Warning("Runner OAuth token has been revoked. Skip deleting session.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -205,6 +214,7 @@ namespace GitHub.Runner.Listener
|
||||
catch (TaskAgentAccessTokenExpiredException)
|
||||
{
|
||||
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
|
||||
_accessTokenRevoked = true;
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netcoreapp3.1</TargetFramework>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<OutputType>Exe</OutputType>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;linux-musl-x64;osx-x64</RuntimeIdentifiers>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
|
||||
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
|
||||
<AssetTargetFallback>portable-net45+win8</AssetTargetFallback>
|
||||
<NoWarn>NU1701;NU1603</NoWarn>
|
||||
<Version>$(Version)</Version>
|
||||
<TieredCompilationQuickJit>true</TieredCompilationQuickJit>
|
||||
<PredefinedCulturesOnly>false</PredefinedCulturesOnly>
|
||||
<PublishReadyToRunComposite>true</PublishReadyToRunComposite>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
@@ -25,6 +25,12 @@
|
||||
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Include="..\Misc\runnercoreassets">
|
||||
<LogicalName>GitHub.Runner.Listener.runnercoreassets</LogicalName>
|
||||
</EmbeddedResource>
|
||||
</ItemGroup>
|
||||
|
||||
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
|
||||
<DebugType>portable</DebugType>
|
||||
</PropertyGroup>
|
||||
|
||||
@@ -12,6 +12,7 @@ using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Linq;
|
||||
using GitHub.Runner.Listener.Check;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace GitHub.Runner.Listener
|
||||
{
|
||||
@@ -318,7 +319,7 @@ namespace GitHub.Runner.Listener
|
||||
|
||||
IJobDispatcher jobDispatcher = null;
|
||||
CancellationTokenSource messageQueueLoopTokenSource = CancellationTokenSource.CreateLinkedTokenSource(HostContext.RunnerShutdownToken);
|
||||
|
||||
|
||||
// Should we try to cleanup ephemeral runners
|
||||
bool runOnceJobCompleted = false;
|
||||
try
|
||||
@@ -407,8 +408,29 @@ namespace GitHub.Runner.Listener
|
||||
{
|
||||
autoUpdateInProgress = true;
|
||||
var runnerUpdateMessage = JsonUtility.FromString<AgentRefreshMessage>(message.Body);
|
||||
#if DEBUG
|
||||
// Can mock the update for testing
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_IS_MOCK_UPDATE")))
|
||||
{
|
||||
|
||||
// The mock_update_messages.json file should be an object with keys being the current version and values being the targeted mock version object
|
||||
// Example: { "2.283.2": {"targetVersion":"2.284.1"}, "2.284.1": {"targetVersion":"2.285.0"}}
|
||||
var mockUpdatesPath = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), "mock_update_messages.json");
|
||||
if (File.Exists(mockUpdatesPath))
|
||||
{
|
||||
var mockUpdateMessages = JsonUtility.FromString<Dictionary<string, AgentRefreshMessage>>(File.ReadAllText(mockUpdatesPath));
|
||||
if (mockUpdateMessages.ContainsKey(BuildConstants.RunnerPackage.Version))
|
||||
{
|
||||
var mockTargetVersion = mockUpdateMessages[BuildConstants.RunnerPackage.Version].TargetVersion;
|
||||
_term.WriteLine($"Mocking update, using version {mockTargetVersion} instead of {runnerUpdateMessage.TargetVersion}");
|
||||
Trace.Info($"Mocking update, using version {mockTargetVersion} instead of {runnerUpdateMessage.TargetVersion}");
|
||||
runnerUpdateMessage = new AgentRefreshMessage(runnerUpdateMessage.AgentId, mockTargetVersion, runnerUpdateMessage.Timeout);
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
var selfUpdater = HostContext.GetService<ISelfUpdater>();
|
||||
selfUpdateTask = selfUpdater.SelfUpdate(runnerUpdateMessage, jobDispatcher, !runOnce && HostContext.StartupType != StartupType.Service, HostContext.RunnerShutdownToken);
|
||||
selfUpdateTask = selfUpdater.SelfUpdate(runnerUpdateMessage, jobDispatcher, false, HostContext.RunnerShutdownToken);
|
||||
Trace.Info("Refresh message received, kick-off selfupdate background process.");
|
||||
}
|
||||
else
|
||||
@@ -425,6 +447,7 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"Received job message of length {message.Body.Length} from service, with hash '{IOUtil.GetSha256Hash(message.Body)}'");
|
||||
var jobMessage = StringUtil.ConvertFromJson<Pipelines.AgentJobRequestMessage>(message.Body);
|
||||
jobDispatcher.Run(jobMessage, runOnce);
|
||||
if (runOnce)
|
||||
@@ -539,6 +562,7 @@ Config Options:
|
||||
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
|
||||
--replace Replace any existing runner with the same name (default false)
|
||||
--pat GitHub personal access token used for checking network connectivity when executing `.{separator}run.{ext} --check`
|
||||
--disableupdate Disable self-hosted runner automatic update to the latest released version`
|
||||
--ephemeral Configure the runner to only take one job and then let the service un-configure the runner after the job finishes (default false)");
|
||||
|
||||
#if OS_WINDOWS
|
||||
@@ -546,7 +570,7 @@ Config Options:
|
||||
_term.WriteLine($@" --windowslogonaccount string Account to run the service as. Requires runasservice");
|
||||
_term.WriteLine($@" --windowslogonpassword string Password for the service account. Requires runasservice");
|
||||
#endif
|
||||
_term.WriteLine($@"
|
||||
_term.WriteLine($@"
|
||||
Examples:
|
||||
Check GitHub server network connectivity:
|
||||
.{separator}run.{ext} --check --url <url> --pat <pat>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,14 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netcoreapp3.1</TargetFramework>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<OutputType>Exe</OutputType>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;linux-musl-x64;osx-x64</RuntimeIdentifiers>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
|
||||
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
|
||||
<AssetTargetFallback>portable-net45+win8</AssetTargetFallback>
|
||||
<NoWarn>NU1701;NU1603</NoWarn>
|
||||
<Version>$(Version)</Version>
|
||||
<TieredCompilationQuickJit>true</TieredCompilationQuickJit>
|
||||
<PredefinedCulturesOnly>false</PredefinedCulturesOnly>
|
||||
<PublishReadyToRunComposite>true</PublishReadyToRunComposite>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -444,7 +444,7 @@ namespace GitHub.Runner.Plugins.Artifact
|
||||
{
|
||||
// We should never
|
||||
context.Error($"Error '{ex.Message}' when downloading file '{fileToDownload}'. (Downloader {downloaderId})");
|
||||
throw ex;
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -469,7 +469,7 @@ namespace GitHub.Runner.Plugins.Artifact
|
||||
try
|
||||
{
|
||||
uploadTimer.Restart();
|
||||
using (HttpResponseMessage response = await _fileContainerHttpClient.UploadFileAsync(_containerId, itemPath, fs, _projectId, cancellationToken: token, chunkSize: 4 * 1024 * 1024))
|
||||
using (HttpResponseMessage response = await _fileContainerHttpClient.UploadFileAsync(_containerId, itemPath, fs, _projectId, cancellationToken: token))
|
||||
{
|
||||
if (response == null || response.StatusCode != HttpStatusCode.Created)
|
||||
{
|
||||
@@ -528,7 +528,7 @@ namespace GitHub.Runner.Plugins.Artifact
|
||||
catch (Exception ex)
|
||||
{
|
||||
context.Output($"File error '{ex.Message}' when uploading file '{fileToUpload}'.");
|
||||
throw ex;
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -166,7 +166,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
|
||||
}
|
||||
else
|
||||
{
|
||||
// delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time.
|
||||
// delete the index.lock file left by previous cancelled build or any operation cause git.exe crash last time.
|
||||
string lockFile = Path.Combine(targetPath, ".git\\index.lock");
|
||||
if (File.Exists(lockFile))
|
||||
{
|
||||
@@ -181,7 +181,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
|
||||
}
|
||||
}
|
||||
|
||||
// delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time.
|
||||
// delete the shallow.lock file left by previous cancelled build or any operation cause git.exe crash last time.
|
||||
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
|
||||
if (File.Exists(shallowLockFile))
|
||||
{
|
||||
|
||||
@@ -150,7 +150,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
|
||||
}
|
||||
else
|
||||
{
|
||||
// delete the index.lock file left by previous canceled build or any operation cause git.exe crash last time.
|
||||
// delete the index.lock file left by previous cancelled build or any operation cause git.exe crash last time.
|
||||
string lockFile = Path.Combine(targetPath, ".git\\index.lock");
|
||||
if (File.Exists(lockFile))
|
||||
{
|
||||
@@ -165,7 +165,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
|
||||
}
|
||||
}
|
||||
|
||||
// delete the shallow.lock file left by previous canceled build or any operation cause git.exe crash last time.
|
||||
// delete the shallow.lock file left by previous cancelled build or any operation cause git.exe crash last time.
|
||||
string shallowLockFile = Path.Combine(targetPath, ".git\\shallow.lock");
|
||||
if (File.Exists(shallowLockFile))
|
||||
{
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netcoreapp3.1</TargetFramework>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<OutputType>Library</OutputType>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;linux-musl-x64;osx-x64</RuntimeIdentifiers>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
|
||||
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
|
||||
<AssetTargetFallback>portable-net45+win8</AssetTargetFallback>
|
||||
<NoWarn>NU1701;NU1603</NoWarn>
|
||||
<Version>$(Version)</Version>
|
||||
<TieredCompilationQuickJit>true</TieredCompilationQuickJit>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>netcoreapp3.1</TargetFramework>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
<OutputType>Library</OutputType>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;linux-musl-x64;osx-x64</RuntimeIdentifiers>
|
||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
|
||||
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
|
||||
<AssetTargetFallback>portable-net45+win8</AssetTargetFallback>
|
||||
<NoWarn>NU1701;NU1603</NoWarn>
|
||||
<Version>$(Version)</Version>
|
||||
<TieredCompilationQuickJit>true</TieredCompilationQuickJit>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -108,7 +108,7 @@ namespace GitHub.Runner.Sdk
|
||||
}
|
||||
|
||||
// Create a new token source for the parallel query. The parallel query should be
|
||||
// canceled after the first error is encountered. Otherwise the number of exceptions
|
||||
// cancelled after the first error is encountered. Otherwise the number of exceptions
|
||||
// could get out of control for a large directory with access denied on every file.
|
||||
using (var tokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken))
|
||||
{
|
||||
|
||||
@@ -27,6 +27,11 @@ namespace GitHub.Runner.Sdk
|
||||
|
||||
VssClientHttpRequestSettings.Default.UserAgent = headerValues;
|
||||
VssHttpMessageHandler.DefaultWebProxy = proxy;
|
||||
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||
{
|
||||
VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
|
||||
}
|
||||
}
|
||||
|
||||
public static VssConnection CreateConnection(Uri serverUri, VssCredentials credentials, IEnumerable<DelegatingHandler> additionalDelegatingHandler = null, TimeSpan? timeout = null)
|
||||
|
||||
@@ -178,7 +178,7 @@ namespace GitHub.Runner.Worker
|
||||
Message = $"Invoked ::stopCommand:: with token: [{stopToken}]",
|
||||
Type = JobTelemetryType.ActionCommand
|
||||
};
|
||||
context.JobTelemetry.Add(telemetry);
|
||||
context.Global.JobTelemetry.Add(telemetry);
|
||||
}
|
||||
|
||||
if (isTokenInvalid && !allowUnsecureStopCommandTokens)
|
||||
@@ -381,6 +381,13 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
HostContext.SecretMasker.AddValue(command.Data);
|
||||
Trace.Info($"Add new secret mask with length of {command.Data.Length}");
|
||||
|
||||
// Also add each individual line. Typically individual lines are processed from STDOUT of child processes.
|
||||
var split = command.Data.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
|
||||
foreach (var item in split)
|
||||
{
|
||||
HostContext.SecretMasker.AddValue(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -278,7 +278,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
// Clone action so we can modify the condition without affecting the original
|
||||
var clonedAction = action.Clone() as Pipelines.ActionStep;
|
||||
_cachedEmbeddedPostSteps[parentStepId].Push(clonedAction);
|
||||
_cachedEmbeddedPostSteps[parentStepId].Push(clonedAction);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -443,7 +443,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
for (var i = 0; i < compositeAction.Steps.Count; i++)
|
||||
{
|
||||
// Store Id's for later load actions
|
||||
// Load stored Ids for later load actions
|
||||
compositeAction.Steps[i].Id = _cachedEmbeddedStepIds[action.Id][i];
|
||||
if (string.IsNullOrEmpty(executionContext.Global.Variables.Get("DistributedTask.EnableCompositeActions")) && compositeAction.Steps[i].Reference.Type != Pipelines.ActionSourceType.Script)
|
||||
{
|
||||
@@ -451,6 +451,16 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_cachedEmbeddedStepIds[action.Id] = new List<Guid>();
|
||||
foreach (var compositeStep in compositeAction.Steps)
|
||||
{
|
||||
var guid = Guid.NewGuid();
|
||||
compositeStep.Id = guid;
|
||||
_cachedEmbeddedStepIds[action.Id].Add(guid);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -641,10 +651,10 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
try
|
||||
{
|
||||
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
|
||||
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, executionContext.Root.Id, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
|
||||
break;
|
||||
}
|
||||
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is canceled.
|
||||
catch (Exception ex) when (!executionContext.CancellationToken.IsCancellationRequested) // Do not retry if the run is cancelled.
|
||||
{
|
||||
// UnresolvableActionDownloadInfoException is a 422 client error, don't retry
|
||||
// Some possible cases are:
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.ObjectTemplating;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Handlers;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
{
|
||||
@@ -141,21 +140,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
|
||||
|
||||
// Makes directory for event_path data
|
||||
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
|
||||
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
|
||||
Directory.CreateDirectory(workflowDirectory);
|
||||
|
||||
var gitHubEvent = ExecutionContext.GetGitHubContext("event");
|
||||
|
||||
// adds the GitHub event path/file if the event exists
|
||||
if (gitHubEvent != null)
|
||||
{
|
||||
var workflowFile = Path.Combine(workflowDirectory, "event.json");
|
||||
Trace.Info($"Write event payload to {workflowFile}");
|
||||
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
|
||||
ExecutionContext.SetGitHubContext("event_path", workflowFile);
|
||||
}
|
||||
ExecutionContext.WriteWebhookPayload();
|
||||
|
||||
// Set GITHUB_ACTION_REPOSITORY if this Action is from a repository
|
||||
if (Action.Reference is Pipelines.RepositoryPathReference repoPathReferenceAction &&
|
||||
@@ -274,8 +259,8 @@ namespace GitHub.Runner.Worker
|
||||
actionDirectory: definition.Directory,
|
||||
localActionContainerSetupSteps: localActionContainerSetupSteps);
|
||||
|
||||
// Print out action details
|
||||
handler.PrintActionDetails(Stage);
|
||||
// Print out action details and log telemetry
|
||||
handler.PrepareExecution(Stage);
|
||||
|
||||
// Run the task.
|
||||
try
|
||||
|
||||
@@ -11,7 +11,7 @@ using GitHub.Runner.Sdk;
|
||||
|
||||
namespace GitHub.Runner.Worker.Container
|
||||
{
|
||||
[ServiceLocator(Default = typeof(DockerCommandManager))]
|
||||
[ServiceLocator(Default = typeof(DockerHookCommandManager))]
|
||||
public interface IDockerCommandManager : IRunnerService
|
||||
{
|
||||
string DockerPath { get; }
|
||||
@@ -188,7 +188,7 @@ namespace GitHub.Runner.Worker.Container
|
||||
return outputStrings.FirstOrDefault();
|
||||
}
|
||||
|
||||
public async Task<int> DockerRun(IExecutionContext context, ContainerInfo container, EventHandler<ProcessDataReceivedEventArgs> stdoutDataReceived, EventHandler<ProcessDataReceivedEventArgs> stderrDataReceived)
|
||||
public virtual async Task<int> DockerRun(IExecutionContext context, ContainerInfo container, EventHandler<ProcessDataReceivedEventArgs> stdoutDataReceived, EventHandler<ProcessDataReceivedEventArgs> stderrDataReceived)
|
||||
{
|
||||
IList<string> dockerOptions = new List<string>();
|
||||
// OPTIONS
|
||||
@@ -258,7 +258,7 @@ namespace GitHub.Runner.Worker.Container
|
||||
return await ExecuteDockerCommandAsync(context, "run", optionsString, container.ContainerEnvironmentVariables, stdoutDataReceived, stderrDataReceived, context.CancellationToken);
|
||||
}
|
||||
|
||||
public async Task<int> DockerStart(IExecutionContext context, string containerId)
|
||||
public virtual async Task<int> DockerStart(IExecutionContext context, string containerId)
|
||||
{
|
||||
return await ExecuteDockerCommandAsync(context, "start", containerId, context.CancellationToken);
|
||||
}
|
||||
|
||||
50
src/Runner.Worker/Container/DockerHookCommandManager.cs
Normal file
50
src/Runner.Worker/Container/DockerHookCommandManager.cs
Normal file
@@ -0,0 +1,50 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Worker.Handlers;
|
||||
|
||||
namespace GitHub.Runner.Worker.Container
|
||||
{
|
||||
public class DockerHookCommandManager : DockerCommandManager
|
||||
{
|
||||
public override async Task<int> DockerStart(IExecutionContext context, string containerId)
|
||||
{
|
||||
// check for env var
|
||||
// execute script
|
||||
|
||||
// Create the handler data.
|
||||
var path = "/home/ferenc/Documents/runner/_layout/docker_run.sh";
|
||||
var scriptDirectory = Path.GetDirectoryName(path);
|
||||
var stepHost = HostContext.CreateService<IDefaultStepHost>();
|
||||
var prependPath = string.Join(Path.PathSeparator.ToString(), context.Global.PrependPath.Reverse<string>());
|
||||
Dictionary<string, string> inputs = new()
|
||||
{
|
||||
["script"] = $"CONT_ID={containerId} " + "/usr/bin/bash" + " " + path,
|
||||
// /bin/bash
|
||||
["shell"] = ScriptHandlerHelpers.GetDefaultShellForScript(path, Trace, prependPath)
|
||||
};
|
||||
|
||||
// Create the handler
|
||||
var handlerFactory = HostContext.GetService<IHandlerFactory>();
|
||||
var handler = handlerFactory.Create(
|
||||
context,
|
||||
action: new ScriptReference(),
|
||||
stepHost,
|
||||
new ScriptActionExecutionData(),
|
||||
inputs,
|
||||
environment: new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
|
||||
context.Global.Variables,
|
||||
actionDirectory: scriptDirectory,
|
||||
localActionContainerSetupSteps: null);
|
||||
handler.PrepareExecution(ActionRunStage.Main); // TODO: find out stage
|
||||
|
||||
await handler.RunAsync(ActionRunStage.Main);
|
||||
|
||||
return ((int?) handler.ExecutionContext.CommandResult) ?? 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -55,12 +55,14 @@ namespace GitHub.Runner.Worker
|
||||
// Our container feature requires to map working directory from host to the container.
|
||||
// If we are already inside a container, we will not able to find out the real working direcotry path on the host.
|
||||
#if OS_WINDOWS
|
||||
#pragma warning disable CA1416
|
||||
// service CExecSvc is Container Execution Agent.
|
||||
ServiceController[] scServices = ServiceController.GetServices();
|
||||
if (scServices.Any(x => String.Equals(x.ServiceName, "cexecsvc", StringComparison.OrdinalIgnoreCase) && x.Status == ServiceControllerStatus.Running))
|
||||
{
|
||||
throw new NotSupportedException("Container feature is not supported when runner is already running inside container.");
|
||||
}
|
||||
#pragma warning restore CA1416
|
||||
#else
|
||||
var initProcessCgroup = File.ReadLines("/proc/1/cgroup");
|
||||
if (initProcessCgroup.Any(x => x.IndexOf(":/docker/", StringComparison.OrdinalIgnoreCase) >= 0))
|
||||
@@ -70,6 +72,7 @@ namespace GitHub.Runner.Worker
|
||||
#endif
|
||||
|
||||
#if OS_WINDOWS
|
||||
#pragma warning disable CA1416
|
||||
// Check OS version (Windows server 1803 is required)
|
||||
object windowsInstallationType = Registry.GetValue(@"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion", "InstallationType", defaultValue: null);
|
||||
ArgUtil.NotNull(windowsInstallationType, nameof(windowsInstallationType));
|
||||
@@ -88,6 +91,7 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(@"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion\ReleaseId");
|
||||
}
|
||||
#pragma warning restore CA1416
|
||||
#endif
|
||||
|
||||
// Check docker client/server version
|
||||
@@ -334,6 +338,18 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
if (!string.IsNullOrEmpty(container.ContainerId))
|
||||
{
|
||||
if(!container.IsJobContainer)
|
||||
{
|
||||
// Print logs for service container jobs (not the "action" job itself b/c that's already logged).
|
||||
executionContext.Output($"Print service container logs: {container.ContainerDisplayName}");
|
||||
|
||||
int logsExitCode = await _dockerManager.DockerLogs(executionContext, container.ContainerId);
|
||||
if (logsExitCode != 0)
|
||||
{
|
||||
executionContext.Warning($"Docker logs fail with exit code {logsExitCode}");
|
||||
}
|
||||
}
|
||||
|
||||
executionContext.Output($"Stop and remove container: {container.ContainerDisplayName}");
|
||||
|
||||
int rmExitCode = await _dockerManager.DockerRemove(executionContext, container.ContainerId);
|
||||
|
||||
@@ -15,8 +15,8 @@ using GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
using GitHub.Services.WebApi;
|
||||
@@ -52,8 +52,7 @@ namespace GitHub.Runner.Worker
|
||||
Dictionary<string, string> IntraActionState { get; }
|
||||
Dictionary<string, VariableValue> JobOutputs { get; }
|
||||
ActionsEnvironmentReference ActionsEnvironment { get; }
|
||||
List<ActionsStepTelemetry> ActionsStepsTelemetry { get; }
|
||||
List<JobTelemetry> JobTelemetry { get; }
|
||||
ActionsStepTelemetry StepTelemetry { get; }
|
||||
DictionaryContextData ExpressionValues { get; }
|
||||
IList<IFunctionInfo> ExpressionFunctions { get; }
|
||||
JobContext JobContext { get; }
|
||||
@@ -109,12 +108,17 @@ namespace GitHub.Runner.Worker
|
||||
// others
|
||||
void ForceTaskComplete();
|
||||
void RegisterPostJobStep(IStep step);
|
||||
void PublishStepTelemetry();
|
||||
void WriteWebhookPayload();
|
||||
}
|
||||
|
||||
public sealed class ExecutionContext : RunnerService, IExecutionContext
|
||||
{
|
||||
private const int _maxIssueCount = 10;
|
||||
private const int _throttlingDelayReportThreshold = 10 * 1000; // Don't report throttling with less than 10 seconds delay
|
||||
private const int _maxIssueMessageLength = 4096; // Don't send issue with huge message since we can't forward them from actions to check annotation.
|
||||
private const int _maxIssueCountInTelemetry = 3; // Only send the first 3 issues to telemetry
|
||||
private const int _maxIssueMessageLengthInTelemetry = 256; // Only send the first 256 characters of issue message to telemetry
|
||||
|
||||
private readonly TimelineRecord _record = new TimelineRecord();
|
||||
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>();
|
||||
@@ -139,6 +143,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// only job level ExecutionContext will track throttling delay.
|
||||
private long _totalThrottlingDelayInMilliseconds = 0;
|
||||
private bool _stepTelemetryPublished = false;
|
||||
|
||||
public Guid Id => _record.Id;
|
||||
public Guid EmbeddedId { get; private set; }
|
||||
@@ -152,8 +157,7 @@ namespace GitHub.Runner.Worker
|
||||
public Dictionary<string, VariableValue> JobOutputs { get; private set; }
|
||||
|
||||
public ActionsEnvironmentReference ActionsEnvironment { get; private set; }
|
||||
public List<ActionsStepTelemetry> ActionsStepsTelemetry { get; private set; }
|
||||
public List<JobTelemetry> JobTelemetry { get; private set; }
|
||||
public ActionsStepTelemetry StepTelemetry { get; } = new ActionsStepTelemetry();
|
||||
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
|
||||
public IList<IFunctionInfo> ExpressionFunctions { get; } = new List<IFunctionInfo>();
|
||||
|
||||
@@ -273,9 +277,9 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
Trace.Info($"'post' of '{actionRunner.DisplayName}' already push to child post step stack.");
|
||||
}
|
||||
else
|
||||
else
|
||||
{
|
||||
Root.EmbeddedStepsWithPostRegistered[actionRunner.Action.Id] = actionRunner.Condition;
|
||||
Root.EmbeddedStepsWithPostRegistered[actionRunner.Action.Id] = actionRunner.Condition;
|
||||
}
|
||||
return;
|
||||
}
|
||||
@@ -294,7 +298,20 @@ namespace GitHub.Runner.Worker
|
||||
Root.PostJobSteps.Push(step);
|
||||
}
|
||||
|
||||
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, ActionRunStage stage, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null)
|
||||
public IExecutionContext CreateChild(
|
||||
Guid recordId,
|
||||
string displayName,
|
||||
string refName,
|
||||
string scopeName,
|
||||
string contextName,
|
||||
ActionRunStage stage,
|
||||
Dictionary<string, string> intraActionState = null,
|
||||
int? recordOrder = null,
|
||||
IPagingLogger logger = null,
|
||||
bool isEmbedded = false,
|
||||
CancellationTokenSource cancellationTokenSource = null,
|
||||
Guid embeddedId = default(Guid),
|
||||
string siblingScopeName = null)
|
||||
{
|
||||
Trace.Entering();
|
||||
|
||||
@@ -306,7 +323,6 @@ namespace GitHub.Runner.Worker
|
||||
child.Stage = stage;
|
||||
child.EmbeddedId = embeddedId;
|
||||
child.SiblingScopeName = siblingScopeName;
|
||||
child.JobTelemetry = JobTelemetry;
|
||||
if (intraActionState == null)
|
||||
{
|
||||
child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
@@ -346,6 +362,9 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
child.IsEmbedded = isEmbedded;
|
||||
child.StepTelemetry.StepId = recordId;
|
||||
child.StepTelemetry.Stage = stage.ToString();
|
||||
child.StepTelemetry.IsEmbedded = isEmbedded;
|
||||
|
||||
return child;
|
||||
}
|
||||
@@ -354,7 +373,13 @@ namespace GitHub.Runner.Worker
|
||||
/// An embedded execution context shares the same record ID, record name, logger,
|
||||
/// and a linked cancellation token.
|
||||
/// </summary>
|
||||
public IExecutionContext CreateEmbeddedChild(string scopeName, string contextName, Guid embeddedId, ActionRunStage stage, Dictionary<string, string> intraActionState = null, string siblingScopeName = null)
|
||||
public IExecutionContext CreateEmbeddedChild(
|
||||
string scopeName,
|
||||
string contextName,
|
||||
Guid embeddedId,
|
||||
ActionRunStage stage,
|
||||
Dictionary<string, string> intraActionState = null,
|
||||
string siblingScopeName = null)
|
||||
{
|
||||
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token), intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName);
|
||||
}
|
||||
@@ -404,6 +429,8 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
PublishStepTelemetry();
|
||||
|
||||
if (Root != this)
|
||||
{
|
||||
// only dispose TokenSource for step level ExecutionContext
|
||||
@@ -519,11 +546,20 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
|
||||
issue.Message = HostContext.SecretMasker.MaskSecrets(issue.Message);
|
||||
if (issue.Message.Length > _maxIssueMessageLength)
|
||||
{
|
||||
issue.Message = issue.Message[.._maxIssueMessageLength];
|
||||
}
|
||||
|
||||
// Tracking the line number (logFileLineNumber) and step number (stepNumber) for each issue that gets created
|
||||
// Actions UI from the run summary page use both values to easily link to an exact locations in logs where annotations originate from
|
||||
if (_record.Order != null)
|
||||
{
|
||||
issue.Data["stepNumber"] = _record.Order.ToString();
|
||||
}
|
||||
|
||||
if (issue.Type == IssueType.Error)
|
||||
{
|
||||
// tracking line number for each issue in log file
|
||||
// log UI use this to navigate from issue to log
|
||||
if (!string.IsNullOrEmpty(logMessage))
|
||||
{
|
||||
long logLineNumber = Write(WellKnownTags.Error, logMessage);
|
||||
@@ -539,8 +575,6 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else if (issue.Type == IssueType.Warning)
|
||||
{
|
||||
// tracking line number for each issue in log file
|
||||
// log UI use this to navigate from issue to log
|
||||
if (!string.IsNullOrEmpty(logMessage))
|
||||
{
|
||||
long logLineNumber = Write(WellKnownTags.Warning, logMessage);
|
||||
@@ -556,9 +590,6 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else if (issue.Type == IssueType.Notice)
|
||||
{
|
||||
|
||||
// tracking line number for each issue in log file
|
||||
// log UI use this to navigate from issue to log
|
||||
if (!string.IsNullOrEmpty(logMessage))
|
||||
{
|
||||
long logLineNumber = Write(WellKnownTags.Notice, logMessage);
|
||||
@@ -648,22 +679,29 @@ namespace GitHub.Runner.Worker
|
||||
// Variables
|
||||
Global.Variables = new Variables(HostContext, message.Variables);
|
||||
|
||||
if (Global.Variables.GetBoolean("DistributedTask.ForceInternalNodeVersionOnRunnerTo12") ?? false)
|
||||
{
|
||||
Environment.SetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion, "node12");
|
||||
}
|
||||
|
||||
// Environment variables shared across all actions
|
||||
Global.EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
|
||||
|
||||
// Job defaults shared across all actions
|
||||
Global.JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Job Telemetry
|
||||
Global.JobTelemetry = new List<JobTelemetry>();
|
||||
|
||||
// ActionsStepTelemetry for entire job
|
||||
Global.StepsTelemetry = new List<ActionsStepTelemetry>();
|
||||
|
||||
// Job Outputs
|
||||
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Actions environment
|
||||
ActionsEnvironment = message.ActionsEnvironment;
|
||||
|
||||
// ActionsStepTelemetry
|
||||
ActionsStepsTelemetry = new List<ActionsStepTelemetry>();
|
||||
|
||||
JobTelemetry = new List<JobTelemetry>();
|
||||
|
||||
// Service container info
|
||||
Global.ServiceContainers = new List<ContainerInfo>();
|
||||
@@ -895,6 +933,83 @@ namespace GitHub.Runner.Worker
|
||||
return Root._matchers ?? Array.Empty<IssueMatcherConfig>();
|
||||
}
|
||||
|
||||
public void PublishStepTelemetry()
|
||||
{
|
||||
if (!_stepTelemetryPublished)
|
||||
{
|
||||
// Add to the global steps telemetry only if we have something to log.
|
||||
if (!string.IsNullOrEmpty(StepTelemetry?.Type))
|
||||
{
|
||||
if (!IsEmbedded)
|
||||
{
|
||||
StepTelemetry.Result = _record.Result;
|
||||
}
|
||||
|
||||
if (!IsEmbedded &&
|
||||
_record.FinishTime != null &&
|
||||
_record.StartTime != null)
|
||||
{
|
||||
StepTelemetry.ExecutionTimeInSeconds = (int)Math.Ceiling((_record.FinishTime - _record.StartTime)?.TotalSeconds ?? 0);
|
||||
}
|
||||
|
||||
if (!IsEmbedded &&
|
||||
_record.Issues.Count > 0)
|
||||
{
|
||||
foreach (var issue in _record.Issues)
|
||||
{
|
||||
if ((issue.Type == IssueType.Error || issue.Type == IssueType.Warning) &&
|
||||
!string.IsNullOrEmpty(issue.Message))
|
||||
{
|
||||
string issueTelemetry;
|
||||
if (issue.Message.Length > _maxIssueMessageLengthInTelemetry)
|
||||
{
|
||||
issueTelemetry = $"{issue.Message[.._maxIssueMessageLengthInTelemetry]}";
|
||||
}
|
||||
else
|
||||
{
|
||||
issueTelemetry = issue.Message;
|
||||
}
|
||||
|
||||
StepTelemetry.ErrorMessages.Add(issueTelemetry);
|
||||
|
||||
// Only send over the first 3 issues to avoid sending too much data.
|
||||
if (StepTelemetry.ErrorMessages.Count >= _maxIssueCountInTelemetry)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Trace.Info($"Publish step telemetry for current step {StringUtil.ConvertToJson(StepTelemetry)}.");
|
||||
Global.StepsTelemetry.Add(StepTelemetry);
|
||||
_stepTelemetryPublished = true;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info($"Step telemetry has already been published.");
|
||||
}
|
||||
}
|
||||
|
||||
public void WriteWebhookPayload()
|
||||
{
|
||||
// Makes directory for event_path data
|
||||
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
|
||||
var workflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
|
||||
Directory.CreateDirectory(workflowDirectory);
|
||||
var gitHubEvent = GetGitHubContext("event");
|
||||
|
||||
// adds the GitHub event path/file if the event exists
|
||||
if (gitHubEvent != null)
|
||||
{
|
||||
var workflowFile = Path.Combine(workflowDirectory, "event.json");
|
||||
Trace.Info($"Write event payload to {workflowFile}");
|
||||
File.WriteAllText(workflowFile, gitHubEvent, new UTF8Encoding(false));
|
||||
SetGitHubContext("event_path", workflowFile);
|
||||
}
|
||||
}
|
||||
|
||||
private void InitializeTimelineRecord(Guid timelineId, Guid timelineRecordId, Guid? parentTimelineRecordId, string recordType, string displayName, string refName, int? order)
|
||||
{
|
||||
_mainTimelineId = timelineId;
|
||||
|
||||
@@ -7,6 +7,7 @@ using GitHub.Runner.Sdk;
|
||||
using System.Reflection;
|
||||
using System.Threading;
|
||||
using System.Collections.Generic;
|
||||
using GitHub.Runner.Common.Util;
|
||||
|
||||
namespace GitHub.Runner.Worker.Expressions
|
||||
{
|
||||
@@ -62,7 +63,7 @@ namespace GitHub.Runner.Worker.Expressions
|
||||
string binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
|
||||
string runnerRoot = new DirectoryInfo(binDir).Parent.FullName;
|
||||
|
||||
string node = Path.Combine(runnerRoot, "externals", "node12", "bin", $"node{IOUtil.ExeExtension}");
|
||||
string node = Path.Combine(runnerRoot, "externals", NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
|
||||
string hashFilesScript = Path.Combine(binDir, "hashFiles");
|
||||
var hashResult = string.Empty;
|
||||
var p = new ProcessInvoker(new HashFilesTrace(context.Trace));
|
||||
|
||||
@@ -181,6 +181,10 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
throw new Exception($"Invalid environment variable value. Matching delimiter not found '{delimiter}'");
|
||||
}
|
||||
if (newline == null)
|
||||
{
|
||||
throw new Exception($"Invalid environment variable value. EOF marker missing new line.");
|
||||
}
|
||||
endIndex = index - newline.Length;
|
||||
tempLine = ReadLine(text, ref index, out newline);
|
||||
}
|
||||
@@ -259,4 +263,72 @@ namespace GitHub.Runner.Worker
|
||||
return text.Substring(originalIndex, lfIndex - originalIndex);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class CreateStepSummaryCommand : RunnerService, IFileCommandExtension
|
||||
{
|
||||
private const int _attachmentSizeLimit = 128 * 1024;
|
||||
|
||||
public string ContextName => "step_summary";
|
||||
public string FilePrefix => "step_summary_";
|
||||
|
||||
public Type ExtensionType => typeof(IFileCommandExtension);
|
||||
|
||||
public void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container)
|
||||
{
|
||||
if (!context.Global.Variables.GetBoolean("DistributedTask.UploadStepSummary") ?? true)
|
||||
{
|
||||
Trace.Info("Step Summary is disabled; skipping attachment upload");
|
||||
return;
|
||||
}
|
||||
|
||||
if (String.IsNullOrEmpty(filePath) || !File.Exists(filePath))
|
||||
{
|
||||
Trace.Info($"Step Summary file ({filePath}) does not exist; skipping attachment upload");
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var fileSize = new FileInfo(filePath).Length;
|
||||
if (fileSize == 0)
|
||||
{
|
||||
Trace.Info($"Step Summary file ({filePath}) is empty; skipping attachment upload");
|
||||
return;
|
||||
}
|
||||
|
||||
if (fileSize > _attachmentSizeLimit)
|
||||
{
|
||||
context.Error(String.Format(Constants.Runner.UnsupportedSummarySize, _attachmentSizeLimit / 1024, fileSize / 1024));
|
||||
Trace.Info($"Step Summary file ({filePath}) is too large ({fileSize} bytes); skipping attachment upload");
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
Trace.Verbose($"Step Summary file exists: {filePath} and has a file size of {fileSize} bytes");
|
||||
var scrubbedFilePath = filePath + "-scrubbed";
|
||||
|
||||
using (var streamReader = new StreamReader(filePath))
|
||||
using (var streamWriter = new StreamWriter(scrubbedFilePath))
|
||||
{
|
||||
string line;
|
||||
while ((line = streamReader.ReadLine()) != null)
|
||||
{
|
||||
var maskedLine = HostContext.SecretMasker.MaskSecrets(line);
|
||||
streamWriter.WriteLine(maskedLine);
|
||||
}
|
||||
}
|
||||
|
||||
var attachmentName = context.Id.ToString();
|
||||
|
||||
Trace.Info($"Queueing file ({filePath}) for attachment upload ({attachmentName})");
|
||||
// Attachments must be added to the parent context (job), not the current context (step)
|
||||
context.Root.QueueAttachFile(ChecksAttachmentType.StepSummary, attachmentName, scrubbedFilePath);
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Trace.Error($"Error while processing file ({filePath}): {e}");
|
||||
context.Error($"Failed to create step summary using 'GITHUB_STEP_SUMMARY': {e.Message}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,10 +8,10 @@ namespace GitHub.Runner.Worker
|
||||
{
|
||||
private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
"action",
|
||||
"action_path",
|
||||
"action_ref",
|
||||
"action_repository",
|
||||
"action",
|
||||
"actor",
|
||||
"api_url",
|
||||
"base_ref",
|
||||
@@ -22,18 +22,20 @@ namespace GitHub.Runner.Worker
|
||||
"head_ref",
|
||||
"job",
|
||||
"path",
|
||||
"ref",
|
||||
"ref_name",
|
||||
"ref_protected",
|
||||
"ref_type",
|
||||
"repository",
|
||||
"ref",
|
||||
"repository_owner",
|
||||
"repository",
|
||||
"retention_days",
|
||||
"run_attempt",
|
||||
"run_id",
|
||||
"run_number",
|
||||
"server_url",
|
||||
"sha",
|
||||
"step_summary",
|
||||
"triggering_actor",
|
||||
"workflow",
|
||||
"workspace",
|
||||
};
|
||||
|
||||
@@ -14,6 +14,8 @@ namespace GitHub.Runner.Worker
|
||||
public PlanFeatures Features { get; set; }
|
||||
public IList<String> FileTable { get; set; }
|
||||
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; set; }
|
||||
public List<ActionsStepTelemetry> StepsTelemetry { get; set; }
|
||||
public List<JobTelemetry> JobTelemetry { get; set; }
|
||||
public TaskOrchestrationPlanReference Plan { get; set; }
|
||||
public List<string> PrependPath { get; set; }
|
||||
public List<ContainerInfo> ServiceContainers { get; set; }
|
||||
|
||||
@@ -42,7 +42,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
ArgUtil.NotNull(Data.PreSteps, nameof(Data.PreSteps));
|
||||
steps = Data.PreSteps;
|
||||
}
|
||||
}
|
||||
else if (stage == ActionRunStage.Post)
|
||||
{
|
||||
ArgUtil.NotNull(Data.PostSteps, nameof(Data.PostSteps));
|
||||
@@ -60,14 +60,14 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
Trace.Info($"Skipping executing post step id: {step.Id}, name: ${step.DisplayName}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
ArgUtil.NotNull(Data.Steps, nameof(Data.Steps));
|
||||
steps = Data.Steps;
|
||||
}
|
||||
|
||||
// Add Telemetry to JobContext to send with JobCompleteMessage
|
||||
// Set extra telemetry base on the current context.
|
||||
if (stage == ActionRunStage.Main)
|
||||
{
|
||||
var hasRunsStep = false;
|
||||
@@ -83,20 +83,16 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
hasUsesStep = true;
|
||||
}
|
||||
}
|
||||
var pathReference = Action as Pipelines.RepositoryPathReference;
|
||||
var telemetry = new ActionsStepTelemetry {
|
||||
Ref = GetActionRef(),
|
||||
HasPreStep = Data.HasPre,
|
||||
HasPostStep = Data.HasPost,
|
||||
IsEmbedded = ExecutionContext.IsEmbedded,
|
||||
Type = "composite",
|
||||
HasRunsStep = hasRunsStep,
|
||||
HasUsesStep = hasUsesStep,
|
||||
StepCount = steps.Count
|
||||
};
|
||||
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
|
||||
|
||||
ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre;
|
||||
ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost;
|
||||
|
||||
ExecutionContext.StepTelemetry.HasRunsStep = hasRunsStep;
|
||||
ExecutionContext.StepTelemetry.HasUsesStep = hasUsesStep;
|
||||
ExecutionContext.StepTelemetry.StepCount = steps.Count;
|
||||
}
|
||||
|
||||
ExecutionContext.StepTelemetry.Type = "composite";
|
||||
|
||||
try
|
||||
{
|
||||
// Inputs of the composite step
|
||||
@@ -117,7 +113,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// Create embedded steps
|
||||
var embeddedSteps = new List<IStep>();
|
||||
|
||||
// If we need to setup containers beforehand, do it
|
||||
// If we need to setup containers beforehand, do it
|
||||
// only relevant for local composite actions that need to JIT download/setup containers
|
||||
if (LocalActionContainerSetupSteps != null && LocalActionContainerSetupSteps.Count > 0)
|
||||
{
|
||||
@@ -152,7 +148,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
else
|
||||
{
|
||||
step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(childScopeName);
|
||||
step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(childScopeName);
|
||||
}
|
||||
|
||||
// Shallow copy github context
|
||||
@@ -303,13 +299,13 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// Register job cancellation call back only if job cancellation token not been fire before each step run
|
||||
if (!ExecutionContext.Root.CancellationToken.IsCancellationRequested)
|
||||
{
|
||||
// Test the condition again. The job was canceled after the condition was originally evaluated.
|
||||
// Test the condition again. The job was cancelled after the condition was originally evaluated.
|
||||
jobCancelRegister = ExecutionContext.Root.CancellationToken.Register(() =>
|
||||
{
|
||||
// Mark job as cancelled
|
||||
ExecutionContext.Root.Result = TaskResult.Canceled;
|
||||
ExecutionContext.Root.JobContext.Status = ExecutionContext.Root.Result?.ToActionResult();
|
||||
|
||||
|
||||
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
|
||||
var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only
|
||||
var conditionReTestResult = false;
|
||||
@@ -378,14 +374,14 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
// Condition is false
|
||||
Trace.Info("Skipping step due to condition evaluation.");
|
||||
step.ExecutionContext.Result = TaskResult.Skipped;
|
||||
SetStepConclusion(step, TaskResult.Skipped);
|
||||
continue;
|
||||
}
|
||||
else if (conditionEvaluateError != null)
|
||||
{
|
||||
// Condition error
|
||||
step.ExecutionContext.Error(conditionEvaluateError);
|
||||
step.ExecutionContext.Result = TaskResult.Failed;
|
||||
SetStepConclusion(step, TaskResult.Failed);
|
||||
ExecutionContext.Result = TaskResult.Failed;
|
||||
break;
|
||||
}
|
||||
@@ -393,7 +389,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
await RunStepAsync(step);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
finally
|
||||
{
|
||||
@@ -403,13 +399,15 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
jobCancelRegister = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Check failed or canceled
|
||||
// Check failed or cancelled
|
||||
if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled)
|
||||
{
|
||||
Trace.Info($"Update job result with current composite step result '{step.ExecutionContext.Result}'.");
|
||||
ExecutionContext.Result = TaskResultUtil.MergeTaskResults(ExecutionContext.Result, step.ExecutionContext.Result.Value);
|
||||
}
|
||||
|
||||
// Update context
|
||||
SetStepsContext(step);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -431,13 +429,13 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
Trace.Error($"Caught timeout exception from step: {ex.Message}");
|
||||
step.ExecutionContext.Error("The action has timed out.");
|
||||
step.ExecutionContext.Result = TaskResult.Failed;
|
||||
SetStepConclusion(step, TaskResult.Failed);
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Error($"Caught cancellation exception from step: {ex}");
|
||||
step.ExecutionContext.Error(ex);
|
||||
step.ExecutionContext.Result = TaskResult.Canceled;
|
||||
SetStepConclusion(step, TaskResult.Canceled);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
@@ -445,17 +443,33 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// Log the error and fail the step
|
||||
Trace.Error($"Caught exception from step: {ex}");
|
||||
step.ExecutionContext.Error(ex);
|
||||
step.ExecutionContext.Result = TaskResult.Failed;
|
||||
SetStepConclusion(step, TaskResult.Failed);
|
||||
}
|
||||
|
||||
// Merge execution context result with command result
|
||||
if (step.ExecutionContext.CommandResult != null)
|
||||
{
|
||||
step.ExecutionContext.Result = Common.Util.TaskResultUtil.MergeTaskResults(step.ExecutionContext.Result, step.ExecutionContext.CommandResult.Value);
|
||||
SetStepConclusion(step, Common.Util.TaskResultUtil.MergeTaskResults(step.ExecutionContext.Result, step.ExecutionContext.CommandResult.Value));
|
||||
}
|
||||
|
||||
Trace.Info($"Step result: {step.ExecutionContext.Result}");
|
||||
step.ExecutionContext.Debug($"Finished: {step.DisplayName}");
|
||||
step.ExecutionContext.PublishStepTelemetry();
|
||||
}
|
||||
|
||||
private void SetStepConclusion(IStep step, TaskResult result)
|
||||
{
|
||||
step.ExecutionContext.Result = result;
|
||||
SetStepsContext(step);
|
||||
}
|
||||
private void SetStepsContext(IStep step)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(step.ExecutionContext.ContextName) && !step.ExecutionContext.ContextName.StartsWith("__", StringComparison.Ordinal))
|
||||
{
|
||||
// TODO: when we support continue on error, we may need to do logic here to change conclusion based on the continue on error result
|
||||
step.ExecutionContext.Global.StepsContext.SetOutcome(step.ExecutionContext.ScopeName, step.ExecutionContext.ContextName, (step.ExecutionContext.Result ?? TaskResult.Succeeded).ToActionResult());
|
||||
step.ExecutionContext.Global.StepsContext.SetConclusion(step.ExecutionContext.ScopeName, step.ExecutionContext.ContextName, (step.ExecutionContext.Result ?? TaskResult.Succeeded).ToActionResult());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using System;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using System.Linq;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
@@ -70,18 +70,13 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
|
||||
string type = Action.Type == Pipelines.ActionSourceType.Repository ? "Dockerfile" : "DockerHub";
|
||||
// Add Telemetry to JobContext to send with JobCompleteMessage
|
||||
// Set extra telemetry base on the current context.
|
||||
if (stage == ActionRunStage.Main)
|
||||
{
|
||||
var telemetry = new ActionsStepTelemetry {
|
||||
Ref = GetActionRef(),
|
||||
HasPreStep = Data.HasPre,
|
||||
HasPostStep = Data.HasPost,
|
||||
IsEmbedded = ExecutionContext.IsEmbedded,
|
||||
Type = type
|
||||
};
|
||||
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
|
||||
ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre;
|
||||
ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost;
|
||||
}
|
||||
ExecutionContext.StepTelemetry.Type = type;
|
||||
|
||||
// run container
|
||||
var container = new ContainerInfo(HostContext)
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading.Tasks;
|
||||
using System.Linq;
|
||||
using System.IO;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using System.Linq;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
@@ -22,7 +22,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
string ActionDirectory { get; set; }
|
||||
List<JobExtensionRunner> LocalActionContainerSetupSteps { get; set; }
|
||||
Task RunAsync(ActionRunStage stage);
|
||||
void PrintActionDetails(ActionRunStage stage);
|
||||
void PrepareExecution(ActionRunStage stage);
|
||||
}
|
||||
|
||||
public abstract class Handler : RunnerService
|
||||
@@ -36,6 +36,7 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
protected IActionCommandManager ActionCommandManager { get; private set; }
|
||||
|
||||
public Pipelines.ActionStepDefinitionReference Action { get; set; }
|
||||
public bool IsActionStep => Action != null;
|
||||
public Dictionary<string, string> Environment { get; set; }
|
||||
public Variables RuntimeVariables { get; set; }
|
||||
public IExecutionContext ExecutionContext { get; set; }
|
||||
@@ -44,29 +45,50 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
public string ActionDirectory { get; set; }
|
||||
public List<JobExtensionRunner> LocalActionContainerSetupSteps { get; set; }
|
||||
|
||||
public virtual string GetActionRef()
|
||||
public void PrepareExecution(ActionRunStage stage)
|
||||
{
|
||||
if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry)
|
||||
// Print out action details
|
||||
PrintActionDetails(stage);
|
||||
|
||||
// Get telemetry for the action
|
||||
PopulateActionTelemetry(stage);
|
||||
}
|
||||
|
||||
protected void PopulateActionTelemetry(ActionRunStage stage)
|
||||
{
|
||||
if (!IsActionStep)
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Type = "runner";
|
||||
ExecutionContext.StepTelemetry.Action = $"{stage} Job Hook";
|
||||
}
|
||||
else if (Action.Type == Pipelines.ActionSourceType.ContainerRegistry)
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Type = "docker";
|
||||
var registryAction = Action as Pipelines.ContainerRegistryReference;
|
||||
return registryAction.Image;
|
||||
ExecutionContext.StepTelemetry.Action = registryAction.Image;
|
||||
}
|
||||
else if (Action.Type == Pipelines.ActionSourceType.Script)
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Type = "run";
|
||||
}
|
||||
else if (Action.Type == Pipelines.ActionSourceType.Repository)
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Type = "repository";
|
||||
var repoAction = Action as Pipelines.RepositoryPathReference;
|
||||
if (string.Equals(repoAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return repoAction.Path;
|
||||
ExecutionContext.StepTelemetry.Action = repoAction.Path;
|
||||
}
|
||||
else
|
||||
{
|
||||
ExecutionContext.StepTelemetry.Ref = repoAction.Ref;
|
||||
if (string.IsNullOrEmpty(repoAction.Path))
|
||||
{
|
||||
return $"{repoAction.Name}@{repoAction.Ref}";
|
||||
ExecutionContext.StepTelemetry.Action = repoAction.Name;
|
||||
}
|
||||
else
|
||||
{
|
||||
return $"{repoAction.Name}/{repoAction.Path}@{repoAction.Ref}";
|
||||
ExecutionContext.StepTelemetry.Action = $"{repoAction.Name}/{repoAction.Path}";
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -75,11 +97,11 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
// this should never happen
|
||||
Trace.Error($"Can't generate ref for {Action.Type.ToString()}");
|
||||
}
|
||||
return "";
|
||||
}
|
||||
public virtual void PrintActionDetails(ActionRunStage stage)
|
||||
|
||||
protected virtual void PrintActionDetails(ActionRunStage stage)
|
||||
{
|
||||
|
||||
|
||||
if (stage == ActionRunStage.Post)
|
||||
{
|
||||
ExecutionContext.Output($"Post job cleanup.");
|
||||
|
||||
@@ -55,7 +55,23 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
else if (data.ExecutionType == ActionExecutionType.NodeJS)
|
||||
{
|
||||
handler = HostContext.CreateService<INodeScriptActionHandler>();
|
||||
(handler as INodeScriptActionHandler).Data = data as NodeJSActionExecutionData;
|
||||
var nodeData = data as NodeJSActionExecutionData;
|
||||
|
||||
// With node12 EoL in 04/2022, we want to be able to uniformly upgrade all JS actions to node16 from the server
|
||||
if (string.Equals(nodeData.NodeVersion, "node12", StringComparison.InvariantCultureIgnoreCase) &&
|
||||
(executionContext.Global.Variables.GetBoolean("DistributedTask.ForceGithubJavascriptActionsToNode16") ?? false))
|
||||
{
|
||||
// The user can opt out of this behaviour by setting this variable to true, either setting 'env' in their workflow or as an environment variable on their machine
|
||||
executionContext.Global.EnvironmentVariables.TryGetValue(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion, out var workflowOptOut);
|
||||
var isWorkflowOptOutSet = !string.IsNullOrEmpty(workflowOptOut);
|
||||
var isLocalOptOut = StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Actions.AllowActionsUseUnsecureNodeVersion));
|
||||
bool isOptOut = isWorkflowOptOutSet ? StringUtil.ConvertToBoolean(workflowOptOut) : isLocalOptOut;
|
||||
if (!isOptOut)
|
||||
{
|
||||
nodeData.NodeVersion = "node16";
|
||||
}
|
||||
}
|
||||
(handler as INodeScriptActionHandler).Data = nodeData;
|
||||
}
|
||||
else if (data.ExecutionType == ActionExecutionType.Script)
|
||||
{
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
using System.IO;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
using System;
|
||||
using System.Linq;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
{
|
||||
@@ -74,19 +73,13 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
target = Data.Post;
|
||||
}
|
||||
|
||||
// Add Telemetry to JobContext to send with JobCompleteMessage
|
||||
// Set extra telemetry base on the current context.
|
||||
if (stage == ActionRunStage.Main)
|
||||
{
|
||||
var telemetry = new ActionsStepTelemetry
|
||||
{
|
||||
Ref = GetActionRef(),
|
||||
HasPreStep = Data.HasPre,
|
||||
HasPostStep = Data.HasPost,
|
||||
IsEmbedded = ExecutionContext.IsEmbedded,
|
||||
Type = Data.NodeVersion
|
||||
};
|
||||
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
|
||||
ExecutionContext.StepTelemetry.HasPreStep = Data.HasPre;
|
||||
ExecutionContext.StepTelemetry.HasPostStep = Data.HasPost;
|
||||
}
|
||||
ExecutionContext.StepTelemetry.Type = Data.NodeVersion;
|
||||
|
||||
ArgUtil.NotNullOrEmpty(target, nameof(target));
|
||||
target = Path.Combine(ActionDirectory, target);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
using System.Threading.Tasks;
|
||||
using System;
|
||||
using GitHub.Runner.Sdk;
|
||||
using System;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Sdk;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker.Handlers
|
||||
@@ -35,6 +35,8 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
}
|
||||
|
||||
ArgUtil.NotNullOrEmpty(plugin, nameof(plugin));
|
||||
// Set extra telemetry base on the current context.
|
||||
ExecutionContext.StepTelemetry.Type = plugin;
|
||||
|
||||
// Update the env dictionary.
|
||||
AddPrependPathToEnvironment();
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user