Compare commits

..

33 Commits

Author SHA1 Message Date
Tingluo Huang
c645de9aee Delete docker-image.yml 2021-12-13 23:25:57 -05:00
TingluoHuang
0e4f76ec4e . 2021-10-28 22:59:19 -04:00
TingluoHuang
af18df4621 . 2021-10-28 21:39:55 -04:00
TingluoHuang
5215d95637 . 2021-10-28 21:31:52 -04:00
TingluoHuang
e750eb7e38 . 2021-10-28 21:20:48 -04:00
TingluoHuang
ca1f621077 . 2021-10-28 21:07:52 -04:00
TingluoHuang
80d0b58f3c . 2021-10-28 19:57:37 -04:00
TingluoHuang
11ff2be7e9 . 2021-10-28 19:48:42 -04:00
TingluoHuang
3ce763338d . 2021-10-28 19:40:25 -04:00
TingluoHuang
a45c0278e6 . 2021-10-28 19:18:21 -04:00
TingluoHuang
658d36c1bc . 2021-10-28 19:01:29 -04:00
TingluoHuang
ca3b803237 . 2021-10-28 18:50:44 -04:00
TingluoHuang
4fa691f73e . 2021-10-28 18:40:09 -04:00
TingluoHuang
dfcfae49e5 . 2021-10-14 16:57:24 -04:00
TingluoHuang
1235dc1cea . 2021-10-14 16:53:58 -04:00
TingluoHuang
cc0d0bed90 . 2021-10-14 16:49:54 -04:00
TingluoHuang
0fac863568 . 2021-10-14 16:46:45 -04:00
TingluoHuang
42e7359f5c . 2021-10-14 16:40:42 -04:00
TingluoHuang
5639175ecb . 2021-10-14 16:33:36 -04:00
TingluoHuang
7128998d77 . 2021-10-14 16:29:59 -04:00
TingluoHuang
f37e9f80a6 . 2021-10-14 15:33:18 -04:00
TingluoHuang
0fa08423d2 . 2021-10-14 15:28:59 -04:00
TingluoHuang
029106a1dc . 2021-10-14 15:11:17 -04:00
TingluoHuang
493a2a0bf7 . 2021-10-14 15:02:29 -04:00
TingluoHuang
43f983486e . 2021-10-14 14:59:34 -04:00
TingluoHuang
f6053b616c . 2021-10-14 14:55:12 -04:00
TingluoHuang
4f4608b710 . 2021-10-14 14:52:45 -04:00
TingluoHuang
28686c40d2 . 2021-10-14 14:50:28 -04:00
TingluoHuang
ce1679bb6f . 2021-10-14 14:48:42 -04:00
TingluoHuang
0a7611b0b5 podman 2021-10-14 14:45:09 -04:00
TingluoHuang
b3fee33a92 ref_* context. 2021-10-13 09:58:07 -04:00
Ferenc Hammerl
d83ef5549e Keep env vars alphabetical 2021-10-13 09:57:37 -04:00
TingluoHuang
fe6719d120 c 2021-10-13 09:57:37 -04:00
128 changed files with 29366 additions and 2478 deletions

View File

@@ -1,18 +1,12 @@
---
name: 🛑 Report a bug in the runner application
about: If you have issues with GitHub Actions, please follow the "support for GitHub Actions" link, below.
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
<!--
👋 You're opening a bug report against the GitHub Actions **runner application**.
🛑 Please stop if you're not certain that the bug you're seeing is in the runner application - if you have general problems with actions, workflows, or runners, please see the [GitHub Community Support Forum](https://github.community/c/code-to-cloud/52) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃
-->
**Describe the bug**
A clear and concise description of what the bug is.

View File

@@ -1,11 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: ✅ Support for GitHub Actions
url: https://github.community/c/code-to-cloud/52
about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum.
- name: ✅ Feedback and suggestions for GitHub Actions
url: https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback
about: If you have feedback or suggestions about GitHub Actions, please open a discussion (or add to an existing one) in the GitHub Actions Feedback. GitHub Actions Product Managers and Engineers monitor the feedback forum.
- name: ‼️ GitHub Security Bug Bounty
url: https://bounty.github.com/
about: Please report security vulnerabilities here.

View File

@@ -1,24 +1,19 @@
---
name: 🛑 Request a feature in the runner application
about: If you have feature requests for GitHub Actions, please use the "feedback and suggestions for GitHub Actions" link below.
name: Feature Request
about: Create a request to help us improve
title: ''
labels: enhancement
assignees: ''
---
<!--
👋 You're opening a request for an enhancement in the GitHub Actions **runner application**.
Thank you 🙇‍♀ for wanting to create a feature in this repository. Before you do, please ensure you are filing the issue in the right place. Issues should only be opened on if the issue **relates to code in this repository**.
🛑 Please stop if you're not certain that the feature you want is in the runner application - if you have a suggestion for improving GitHub Actions, please see the [GitHub Actions Feedback](https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback) discussion forum which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃
Some additional useful links:
* If you have found a security issue [please submit it here](https://hackerone.com/github)
* If you have questions or issues with the service, writing workflows or actions, then please [visit the GitHub Community Forum's Actions Board](https://github.community/t5/GitHub-Actions/bd-p/actions)
* If you are having an issue or have a question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
* If you are having an issue or question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
If you have a feature request that is relevant to this repository, the runner, then please include the information below:
-->
**Describe the enhancement**
A clear and concise description of what the features or enhancement you need.

View File

@@ -37,7 +37,7 @@ jobs:
devScript: ./dev.sh
- runtime: win-x64
os: windows-2019
os: windows-latest
devScript: ./dev
runs-on: ${{ matrix.os }}
@@ -57,29 +57,6 @@ jobs:
working-directory: src
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
# Check runtime/externals hash
- name: Compute/Compare runtime and externals Hash
shell: bash
run: |
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
echo "Current Externals hash result: $EXTERNALS_HASH"
NeedUpdate=0
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
NeedUpdate=1
fi
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
NeedUpdate=1
fi
exit $NeedUpdate
env:
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
# Create runner package tar.gz/zip
- name: Package Release
if: github.event_name != 'pull_request'
@@ -90,11 +67,7 @@ jobs:
# Upload runner package tar.gz/zip as artifact
- name: Publish Artifact
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v2
uses: actions/upload-artifact@v1
with:
name: runner-package-${{ matrix.runtime }}
path: |
_package
_package_trims/trim_externals
_package_trims/trim_runtime
_package_trims/trim_runtime_externals
path: _package

View File

@@ -1,12 +1,7 @@
name: "Code Scanning - Action"
permissions:
security-events: write
on:
push:
branches:
- main
pull_request:
schedule:
- cron: '0 0 * * 0'

View File

@@ -51,21 +51,6 @@ jobs:
linux-arm-sha: ${{ steps.sha.outputs.linux-arm-sha256 }}
win-x64-sha: ${{ steps.sha.outputs.win-x64-sha256 }}
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }}
linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }}
linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }}
win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }}
osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }}
linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }}
linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }}
linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }}
win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }}
osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }}
linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }}
linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }}
linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }}
win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }}
osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }}
strategy:
matrix:
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
@@ -87,7 +72,7 @@ jobs:
devScript: ./dev.sh
- runtime: win-x64
os: windows-2019
os: windows-latest
devScript: ./dev
runs-on: ${{ matrix.os }}
@@ -114,6 +99,14 @@ jobs:
${{ matrix.devScript }} package Release ${{ matrix.runtime }}
working-directory: src
# Upload runner package tar.gz/zip as artifact.
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
- name: Publish Artifact
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v1
with:
name: runner-packages
path: _package
# compute shas and set as job outputs to use in release notes
- run: brew install coreutils #needed for shasum util
if: ${{ matrix.os == 'macOS-latest' }}
@@ -127,91 +120,6 @@ jobs:
id: sha
name: Compute SHA256
working-directory: _package
- run: |
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noexternals
name: Compute SHA256
working-directory: _package_trims/trim_externals
- run: |
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noruntime
name: Compute SHA256
working-directory: _package_trims/trim_runtime
- run: |
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noruntime_noexternals
name: Compute SHA256
working-directory: _package_trims/trim_runtime_externals
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
if: matrix.runtime == 'win-x64'
uses: actions/github-script@0.3.0
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const core = require('@actions/core')
const fs = require('fs');
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_zip.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
console.log(trimmedPackages)
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
if: matrix.runtime != 'win-x64'
uses: actions/github-script@0.3.0
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const core = require('@actions/core')
const fs = require('fs');
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_targz.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
console.log(trimmedPackages)
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
# Upload runner package tar.gz/zip as artifact.
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
- name: Publish Artifact
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v2
with:
name: runner-packages
path: |
_package
_package_trims/trim_externals
_package_trims/trim_runtime
_package_trims/trim_runtime_externals
${{ matrix.runtime }}-trimmedpackages.json
release:
needs: build
runs-on: ubuntu-latest
@@ -242,21 +150,6 @@ jobs:
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}')
console.log(releaseNote)
core.setOutput('version', runnerVersion);
core.setOutput('note', releaseNote);
@@ -272,14 +165,14 @@ jobs:
body: |
${{ steps.releaseNote.outputs.note }}
# Upload release assets (full runner packages)
# Upload release assets
- name: Upload Release Asset (win-x64)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_content_type: application/octet-stream
@@ -289,7 +182,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
@@ -299,7 +192,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
@@ -309,7 +202,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
@@ -319,210 +212,6 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trim externals)
- name: Upload Release Asset (win-x64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trim runtime)
- name: Upload Release Asset (win-x64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trim runtime and externals)
- name: Upload Release Asset (win-x64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trimmedpackages.json)
- name: Upload Release Asset (win-x64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/win-x64-trimmedpackages.json
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/linux-x64-trimmedpackages.json
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/osx-x64-trimmedpackages.json
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/linux-arm-trimmedpackages.json
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream

2
.gitignore vendored
View File

@@ -19,9 +19,7 @@
node_modules
_downloads
_layout
_layout_trims
_package
_package_trims
_dotnetsdk
TestResults
TestLogs

View File

@@ -5,7 +5,7 @@
# GitHub Actions Runner
[![Actions Status](https://github.com/actions/runner/workflows/Runner%20CI/badge.svg)](https://github.com/actions/runner/actions)
[![Runner E2E Test](https://github.com/actions-canary/actions-runner-e2e/actions/workflows/runner_e2etest.yml/badge.svg)](https://github.com/actions-canary/actions-runner-e2e/actions/workflows/runner_e2etest.yml)
[![Runner E2E Test](https://github.com/actions/runner/workflows/Runner%20E2E%20Test/badge.svg)](https://github.com/actions/runner/actions)
The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the [hosted virtual environments](https://github.com/actions/virtual-environments), or you can [self-host the runner](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners) in your own environment.

View File

@@ -1,71 +0,0 @@
# ADR 1438: Support Conditionals In Composite Actions
**Date**: 2021-10-13
**Status**: Accepted
## Context
We recently shipped composite actions, which allows you to reuse individual steps inside an action.
However, one of the [most requested features](https://github.com/actions/runner/issues/834) has been a way to support the `if` keyword.
### Goals
- We want to keep consistent with current behavior
- We want to support conditionals via the `if` keyword
- Our built in functions like `success` should be implementable without calling them, for example you can do `job.status == success` rather then `success()` currently.
### How does composite currently work?
Currently, we have limited conditional support in composite actions for `pre` and `post` steps.
These are based on the `job status`, and support keywords like `always()`, `failed()`, `success()` and `cancelled()`.
However, generic or main steps do **not** support conditionals.
By default, in a regular workflow, a step runs on the `success()` condition. Which looks at the **job** **status**, sees if it is successful and runs.
By default, in a composite action, main steps run until a single step fails in that composite action, then the composite action is halted early. It does **not** care about the job status.
Pre, and post steps in composite actions use the job status to determine if they should run.
### How do we go forward?
Well, if we think about what composite actions are currently doing when invoking main steps, they are checking if the current composite action is successful.
Lets formalize that concept into a "real" idea.
- We will add an `action_status` field to the github context to mimic the [job's context status](https://docs.github.com/en/actions/learn-github-actions/contexts#job-context).
- We have an existing concept that does this `action_path` which is only set for composite actions on the github context.
- In a composite action during a main step, the `success()` function will check if `action_status == success`, rather then `job_status == success`. Failure will work the same way.
- Pre and post steps in composite actions will not change, they will continue to check the job status.
### Nested Scenario
For nested composite actions, we will follow the existing behavior, you only care about your current composite action, not any parents.
For example, lets imagine a scenario with a simple nested composite action
```
- Job
- Regular Step
- Composite Action
- runs: exit 1
- if: always()
uses: A child composite action
- if: success()
runs: echo "this should print"
- runs: echo "this should also print"
- if: success()
runs: echo "this will not print as the current composite action has failed already"
```
The child composite actions steps should run in this example, the child composite action has not yet failed, so it should run all steps until a step fails. This is consistent with how a composite action currently works in production if the main job fails but a composite action is invoked with `if:always()` or `if: failure()`
### Other options explored
We could add the `current_step_status` to the job context rather then `__status` to the steps context, however this comes with two major downsides:
- We need to support the field for every type of step, because its non trivial to remove a field from the job context once it has been added (its readonly)
- For all actions besides composite it would only every be `success`
- Its weird to have a `current_step` value on the job context
- We also explored a `__status` on the steps context.
- The `__` is required to prevent us from colliding with a step with id: status
- This felt wrong because the naming was not smooth, and did not fit into current conventions.
### Consequences
- github context has a new field for the status of the current composite action.
- We support conditional's in composite actions
- We keep the existing behavior for all users, but allow them to expand that functionality.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 138 KiB

After

Width:  |  Height:  |  Size: 158 KiB

View File

@@ -23,8 +23,8 @@ You might see something like this which indicate a dependency's missing.
./config.sh
libunwind.so.8 => not found
libunwind-x86_64.so.8 => not found
Dependencies is missing for Dotnet Core 6.0
Execute ./bin/installdependencies.sh to install any missing Dotnet Core 6.0 dependencies.
Dependencies is missing for Dotnet Core 3.0
Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies.
```
You can easily correct the problem by executing `./bin/installdependencies.sh`.
The `installdependencies.sh` script should install all required dependencies on all supported Linux versions

74
job.yml Normal file
View File

@@ -0,0 +1,74 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: pod-admin
namespace: default
rules:
- apiGroups: [""]
resources: ["pods", "pods/log", "pods/attach", "pods/exec"]
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: default-pod-admin
namespace: default
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: pod-admin
subjects:
- kind: ServiceAccount
name: default
namespace: default
---
apiVersion: batch/v1
kind: Job
metadata:
namespace: default
name: actions-runners
spec:
template:
spec:
# hostNetwork: true
volumes:
- name: runner-working
emptyDir: {}
containers:
- name: k8srunner
image: huangtingluo/kube-runner:v0
imagePullPolicy: Always
volumeMounts:
- mountPath: /actions-runner/_work
name: runner-working
env:
- name: GITHUB_PAT
value: ghp_
- name: RUNNER_CONFIG_URL
value: https://github.com/bbq-beets/ting-test
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
fieldPath: spec.nodeName
- name: K8S_POD_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
- name: K8S_POD_NAMESPACE
valueFrom:
fieldRef:
fieldPath: metadata.namespace
- name: K8S_POD_IP
valueFrom:
fieldRef:
fieldPath: status.podIP
- name: K8S_POD_SERVICE_ACCOUNT
valueFrom:
fieldRef:
fieldPath: spec.serviceAccountName
restartPolicy: Never
backoffLimit: 1
completions: 1
parallelism: 1

View File

@@ -1,15 +1,11 @@
## Features
- N/A
## Bugs
- Fix breaking change in dotnet 6 around globalization-invariant. (#1609)
- Fixed an issue where ephemeral runners did not restart after upgrading (#1396)
## Misc
- N/A
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
@@ -80,21 +76,3 @@ The SHA-256 checksums for the packages included in this build are shown below:
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
- actions-runner-win-x64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-x64_noexternals --><WIN_X64_SHA_NOEXTERNALS><!-- END SHA win-x64_noexternals -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noexternals --><OSX_X64_SHA_NOEXTERNALS><!-- END SHA osx-x64_noexternals -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noexternals --><LINUX_X64_SHA_NOEXTERNALS><!-- END SHA linux-x64_noexternals -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noexternals --><LINUX_ARM64_SHA_NOEXTERNALS><!-- END SHA linux-arm64_noexternals -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noexternals --><LINUX_ARM_SHA_NOEXTERNALS><!-- END SHA linux-arm_noexternals -->
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-x64_noruntime --><WIN_X64_SHA_NORUNTIME><!-- END SHA win-x64_noruntime -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-x64_noruntime --><OSX_X64_SHA_NORUNTIME><!-- END SHA osx-x64_noruntime -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-x64_noruntime --><LINUX_X64_SHA_NORUNTIME><!-- END SHA linux-x64_noruntime -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm64_noruntime --><LINUX_ARM64_SHA_NORUNTIME><!-- END SHA linux-arm64_noruntime -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm_noruntime --><LINUX_ARM_SHA_NORUNTIME><!-- END SHA linux-arm_noruntime -->
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-x64_noruntime_noexternals --><WIN_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-x64_noruntime_noexternals -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noruntime_noexternals --><OSX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-x64_noruntime_noexternals -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noruntime_noexternals --><LINUX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-x64_noruntime_noexternals -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noruntime_noexternals --><LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm64_noruntime_noexternals -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noruntime_noexternals --><LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm_noruntime_noexternals -->

View File

@@ -1 +1 @@
2.286.1
<Update to ./src/runnerversion when creating release>

78
src/Dockerfile Normal file
View File

@@ -0,0 +1,78 @@
FROM mcr.microsoft.com/dotnet/sdk:3.1 AS Build
# ENV RUNNER_CONFIG_URL=""
# ENV GITHUB_PAT=""
# ENV RUNNER_NAME=""
# ENV RUNNER_GROUP=""
# ENV RUNNER_LABELS=""
# ENV GITHUB_RUNNER_SCOPE=""
# ENV GITHUB_SERVER_URL=""
# ENV GITHUB_API_URL=""
# ENV K8S_HOST_IP=""
RUN apt-get update --fix-missing \
&& apt-get install -y --no-install-recommends \
curl \
# jq \
# git \
apt-utils \
apt-transport-https \
unzip \
net-tools\
gnupg2\
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Install kubectl
# RUN curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \
# echo "deb https://apt.kubernetes.io/ kubernetes-xenial main" | tee -a /etc/apt/sources.list.d/kubernetes.list && \
# apt-get update && apt-get -y install --no-install-recommends kubectl
# Install docker
# RUN curl -fsSL https://get.docker.com -o get-docker.sh
# RUN sh get-docker.sh
# Allow runner to run as root
# ENV RUNNER_ALLOW_RUNASROOT=1
# Directory for runner to operate in
RUN mkdir /actions-runner
RUN mkdir /actions-runner/src
WORKDIR /actions-runner/src
COPY ./ /actions-runner/src
RUN /actions-runner/src/dev.sh l
FROM mcr.microsoft.com/dotnet/core/runtime-deps:3.1
ENV RUNNER_CONFIG_URL=""
ENV GITHUB_PAT=""
RUN apt-get update --fix-missing \
&& apt-get install -y --no-install-recommends \
curl \
# jq \
# git \
# apt-utils \
# apt-transport-https \
# unzip \
# net-tools\
gnupg2\
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Install kubectl
RUN curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \
echo "deb https://apt.kubernetes.io/ kubernetes-xenial main" | tee -a /etc/apt/sources.list.d/kubernetes.list && \
apt-get update && apt-get -y install --no-install-recommends kubectl
# Allow runner to run as root
ENV RUNNER_ALLOW_RUNASROOT=1
# Directory for runner to operate in
RUN mkdir /actions-runner
WORKDIR /actions-runner
COPY --from=Build /actions-runner/_layout /actions-runner
ENTRYPOINT ["./entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@@ -0,0 +1,59 @@
{
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/es6"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
"sourceType": "module",
"project": "./tsconfig.json"
},
"rules": {
"eslint-comments/no-use": "off",
"import/no-namespace": "off",
"no-console": "off",
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/ban-ts-ignore": "error",
"camelcase": "off",
"@typescript-eslint/camelcase": "error",
"@typescript-eslint/class-name-casing": "error",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
"@typescript-eslint/no-extraneous-class": "error",
"@typescript-eslint/no-for-in-array": "error",
"@typescript-eslint/no-inferrable-types": "error",
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-object-literal-type-assertion": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/no-var-requires": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-interface": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true,
"jest/globals": true
}
}

View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@@ -0,0 +1,11 @@
{
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"semi": false,
"singleQuote": true,
"trailingComma": "none",
"bracketSpacing": false,
"arrowParens": "avoid",
"parser": "typescript"
}

View File

@@ -0,0 +1 @@
To update kubeInnerHandler under `Misc/layoutbin` run `npm install && npm run all`

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,36 @@
{
"name": "kubeInnerHandler",
"version": "1.0.0",
"description": "GitHub Actions",
"main": "lib/kubeInnerHandler.js",
"scripts": {
"build": "tsc",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"lint": "eslint src/**/*.ts",
"pack": "ncc build -o ../../layoutbin/kubeInnerHandler",
"all": "npm run build && npm run format && npm run lint && npm run pack"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/runner.git"
},
"keywords": [
"actions"
],
"author": "GitHub Actions",
"license": "MIT",
"dependencies": {
"@actions/exec": "^1.1.0",
"@actions/core": "^1.6.0"
},
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^2.8.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^6.8.0",
"eslint-plugin-github": "^2.0.0",
"prettier": "^1.19.1",
"typescript": "^3.6.4"
}
}

View File

@@ -0,0 +1,49 @@
import * as exec from '@actions/exec'
import * as core from '@actions/core'
import * as events from 'events'
import * as readline from 'readline'
async function run(): Promise<void> {
let input = ''
const rl = readline.createInterface({
input: process.stdin
})
rl.on('line', line => {
core.debug(`Line from STDIN: ${line}`)
input = line
})
await events.once(rl, 'close')
core.debug(input)
const execInput = JSON.parse(input)
core.debug(JSON.stringify(execInput))
// podman exec -i --workdir /__w/canary/canary
// -e GITHUB_JOB -e GITHUB_REF -e GITHUB_SHA -e GITHUB_REPOSITORY
// -e GITHUB_REPOSITORY_OWNER -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER
// -e GITHUB_RETENTION_DAYS -e GITHUB_RUN_ATTEMPT -e GITHUB_ACTOR
// -e GITHUB_WORKFLOW -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GITHUB_EVENT_NAME
// -e GITHUB_SERVER_URL -e GITHUB_API_URL -e GITHUB_GRAPHQL_URL
// -e GITHUB_WORKSPACE -e GITHUB_ACTION -e GITHUB_EVENT_PATH -e GITHUB_ACTION_REPOSITORY
// -e GITHUB_ACTION_REF -e GITHUB_PATH -e GITHUB_ENV -e RUNNER_DEBUG
// -e RUNNER_OS -e RUNNER_NAME -e RUNNER_TOOL_CACHE
// -e RUNNER_TEMP -e RUNNER_WORKSPACE
// eccdf520697a035599d6e8c8dc801f004fdd3797cdce88f590aba3669a88d9bc sh -e /__w/_temp/d3b30383-719c-4e76-a16f-8f85443352be.sh
const execArgs = []
const args = (<string>execInput.arguments).split(' ')
core.debug(JSON.stringify(args))
execArgs.push(...args)
core.debug(JSON.stringify(execArgs))
await exec.exec(execInput.fileName, execArgs, {
env: execInput.environmentVariables
})
}
run()

View File

@@ -0,0 +1,12 @@
{
"compilerOptions": {
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"outDir": "./lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
"strict": true, /* Enable all strict type-checking options. */
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
},
"exclude": ["node_modules", "**/*.test.ts"]
}

View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@@ -0,0 +1,59 @@
{
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/es6"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
"sourceType": "module",
"project": "./tsconfig.json"
},
"rules": {
"eslint-comments/no-use": "off",
"import/no-namespace": "off",
"no-console": "off",
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/ban-ts-ignore": "error",
"camelcase": "off",
"@typescript-eslint/camelcase": "error",
"@typescript-eslint/class-name-casing": "error",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
"@typescript-eslint/no-extraneous-class": "error",
"@typescript-eslint/no-for-in-array": "error",
"@typescript-eslint/no-inferrable-types": "error",
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-object-literal-type-assertion": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/no-var-requires": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-interface": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true,
"jest/globals": true
}
}

View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@@ -0,0 +1,11 @@
{
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"semi": false,
"singleQuote": true,
"trailingComma": "none",
"bracketSpacing": false,
"arrowParens": "avoid",
"parser": "typescript"
}

View File

@@ -0,0 +1 @@
To update kubectlHandler under `Misc/layoutbin` run `npm install && npm run all`

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,36 @@
{
"name": "kubectlHandler",
"version": "1.0.0",
"description": "GitHub Actions",
"main": "lib/kubectlHandler.js",
"scripts": {
"build": "tsc",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"lint": "eslint src/**/*.ts",
"pack": "ncc build -o ../../layoutbin/kubectlHandler",
"all": "npm run build && npm run format && npm run lint && npm run pack"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/runner.git"
},
"keywords": [
"actions"
],
"author": "GitHub Actions",
"license": "MIT",
"dependencies": {
"@actions/exec": "^1.1.0",
"@actions/core": "^1.6.0"
},
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^2.8.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^6.8.0",
"eslint-plugin-github": "^2.0.0",
"prettier": "^1.19.1",
"typescript": "^3.6.4"
}
}

View File

@@ -0,0 +1,156 @@
import * as exec from '@actions/exec'
import * as core from '@actions/core'
import * as events from 'events'
import * as readline from 'readline'
async function run(): Promise<void> {
let input = ''
const rl = readline.createInterface({
input: process.stdin
})
rl.on('line', line => {
core.debug(`Line from STDIN: ${line}`)
input = line
})
await events.once(rl, 'close')
core.debug(input)
const inputJson = JSON.parse(input)
core.debug(JSON.stringify(inputJson))
const command = inputJson.command
if (command === 'Create') {
const creationInput = inputJson.creationInput
core.debug(JSON.stringify(creationInput))
const containers = creationInput.containers
const jobContainer = containers[0]
// const networkName = 'actions_podman_network'
// // podman network create {network} -> track and return `network` for ${{job.container.network}}
// await exec.exec('podman', ['network', 'create', networkName])
const containerImage = `${jobContainer.containerImage}`
// podman pull docker.io/library/{image}
// await exec.exec('podman', ['pull', containerImage])
// kubectl run e088c842be1f46b394212618408aaba0_node1016jessie_6196c9
// --image=node:10.16-jessie
// -- tail -f /dev/null
const runArgs = ['run', 'job-container']
// runArgs.push(`--workdir=${jobContainer.containerWorkDirectory}`)
// runArgs.push(`--network=${networkName}`)
// for (const mountVolume of jobContainer.mountVolumes) {
// runArgs.push(
// `-v=${mountVolume.sourceVolumePath}:${mountVolume.targetVolumePath}`
// )
// }
runArgs.push(`--image=${containerImage}`)
runArgs.push(`--`)
runArgs.push(`tail`)
runArgs.push(`-f`)
runArgs.push(`/dev/null`)
core.debug(JSON.stringify(runArgs))
// const containerId = await exec.getExecOutput('podman', [
// 'create',
// // `--workdir ${jobContainer.containerWorkDirectory}`,
// `--network=${networkName}`,
// // `-v=/Users/ting/Desktop/runner/_layout/_work:/__w`,
// `--entrypoint=${jobContainer.containerEntryPoint}`,
// `${containerImage}`,
// `${jobContainer.containerEntryPointArgs}`
// ])
await exec.exec('kubectl', runArgs)
// get PATH inside the container
const waitArgs = ['wait', '--for=condition=Ready', 'pod/job-container']
await exec.exec('kubectl', waitArgs)
// output containerId for ${{job.container.id}}
// copy over node.js
const cpNodeArgs = [
'cp',
'/actions-runner/externals/node12/bin',
'job-container:/__runner_util/'
]
await exec.exec('kubectl', cpNodeArgs)
// copy over innerhandler
const cpKubeInnerArgs = [
'cp',
'/actions-runner/bin/kubeInnerHandler',
'job-container:/__runner_util/kubeInnerHandler'
]
await exec.exec('kubectl', cpKubeInnerArgs)
// copy over _work
const cpWorkArgs = ['cp', '/actions-runner/_work', 'job-container:/__w/']
await exec.exec('kubectl', cpWorkArgs)
const creationOutput = {
JobContainerId: 'job-container',
Network: 'job-container'
}
const output = JSON.stringify({CreationOutput: creationOutput})
core.debug(output)
process.stderr.write(
`___CONTAINER_ENGINE_HANDLER_OUTPUT___${output}___CONTAINER_ENGINE_HANDLER_OUTPUT___`
)
} else if (command === 'Remove') {
const removeInput = inputJson.removeInput
core.debug(JSON.stringify(removeInput))
// const jobContainerId = removeInput.jobContainerId
// await exec.exec('kubectl', ['delete', 'pod', jobContainerId, '--force'])
// await exec.exec('podman', ['network', 'rm', '-f', network])
} else if (command === 'Exec') {
const execInput = inputJson.execInput
core.debug(JSON.stringify(execInput))
// podman exec -i --workdir /__w/canary/canary
// -e GITHUB_JOB -e GITHUB_REF -e GITHUB_SHA -e GITHUB_REPOSITORY
// -e GITHUB_REPOSITORY_OWNER -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER
// -e GITHUB_RETENTION_DAYS -e GITHUB_RUN_ATTEMPT -e GITHUB_ACTOR
// -e GITHUB_WORKFLOW -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GITHUB_EVENT_NAME
// -e GITHUB_SERVER_URL -e GITHUB_API_URL -e GITHUB_GRAPHQL_URL
// -e GITHUB_WORKSPACE -e GITHUB_ACTION -e GITHUB_EVENT_PATH -e GITHUB_ACTION_REPOSITORY
// -e GITHUB_ACTION_REF -e GITHUB_PATH -e GITHUB_ENV -e RUNNER_DEBUG
// -e RUNNER_OS -e RUNNER_NAME -e RUNNER_TOOL_CACHE
// -e RUNNER_TEMP -e RUNNER_WORKSPACE
// eccdf520697a035599d6e8c8dc801f004fdd3797cdce88f590aba3669a88d9bc sh -e /__w/_temp/d3b30383-719c-4e76-a16f-8f85443352be.sh
const cpTempArgs = [
'cp',
'/actions-runner/_work/_temp',
'job-container:/__w/'
]
await exec.exec('kubectl', cpTempArgs)
const execArgs = ['exec']
execArgs.push(execInput.jobContainer.containerId)
execArgs.push('-i')
execArgs.push('-t')
execArgs.push('--')
execArgs.push('/__runner_util/node')
execArgs.push('/__runner_util/kubeInnerHandler')
core.debug(JSON.stringify(execArgs))
await exec.exec('kubectl', execArgs, {
input: Buffer.from(JSON.stringify(execInput))
})
}
}
run()

View File

@@ -0,0 +1,12 @@
{
"compilerOptions": {
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"outDir": "./lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
"strict": true, /* Enable all strict type-checking options. */
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
},
"exclude": ["node_modules", "**/*.test.ts"]
}

View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@@ -0,0 +1,59 @@
{
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/es6"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
"sourceType": "module",
"project": "./tsconfig.json"
},
"rules": {
"eslint-comments/no-use": "off",
"import/no-namespace": "off",
"no-console": "off",
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/ban-ts-ignore": "error",
"camelcase": "off",
"@typescript-eslint/camelcase": "error",
"@typescript-eslint/class-name-casing": "error",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
"@typescript-eslint/no-extraneous-class": "error",
"@typescript-eslint/no-for-in-array": "error",
"@typescript-eslint/no-inferrable-types": "error",
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-object-literal-type-assertion": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/no-var-requires": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-interface": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true,
"jest/globals": true
}
}

View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@@ -0,0 +1,11 @@
{
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"semi": false,
"singleQuote": true,
"trailingComma": "none",
"bracketSpacing": false,
"arrowParens": "avoid",
"parser": "typescript"
}

View File

@@ -0,0 +1 @@
To update podmanHandler under `Misc/layoutbin` run `npm install && npm run all`

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,36 @@
{
"name": "podmanHandler",
"version": "1.0.0",
"description": "GitHub Actions",
"main": "lib/podmanHandler.js",
"scripts": {
"build": "tsc",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"lint": "eslint src/**/*.ts",
"pack": "ncc build -o ../../layoutbin/podmanHandler",
"all": "npm run build && npm run format && npm run lint && npm run pack"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/runner.git"
},
"keywords": [
"actions"
],
"author": "GitHub Actions",
"license": "MIT",
"dependencies": {
"@actions/exec": "^1.1.0",
"@actions/core": "^1.6.0"
},
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^2.8.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^6.8.0",
"eslint-plugin-github": "^2.0.0",
"prettier": "^1.19.1",
"typescript": "^3.6.4"
}
}

View File

@@ -0,0 +1,150 @@
import * as exec from '@actions/exec'
import * as core from '@actions/core'
import * as events from 'events'
import * as readline from 'readline'
async function run(): Promise<void> {
let input = ''
const rl = readline.createInterface({
input: process.stdin
})
rl.on('line', line => {
core.debug(`Line from STDIN: ${line}`)
input = line
})
await events.once(rl, 'close')
core.debug(input)
const inputJson = JSON.parse(input)
core.debug(JSON.stringify(inputJson))
const command = inputJson.command
if (command === 'Create') {
const creationInput = inputJson.creationInput
core.debug(JSON.stringify(creationInput))
const containers = creationInput.containers
const jobContainer = containers[0]
const networkName = 'actions_podman_network'
// podman network create {network} -> track and return `network` for ${{job.container.network}}
await exec.exec('podman', ['network', 'create', networkName])
const containerImage = `docker.io/library/${jobContainer.containerImage}`
// podman pull docker.io/library/{image}
await exec.exec('podman', ['pull', containerImage])
// podman create --name e088c842be1f46b394212618408aaba0_node1016jessie_6196c9
// --label fa4e14
// --workdir /__w/canary/canary
// --network github_network_f98a6e1e96e74d919d814c165641cba3
// -e "HOME=/github/home" -e GITHUB_ACTIONS=true -e CI=true
// -v "/var/run/docker.sock":"/var/run/docker.sock"
// -v "/home/runner/work":"/__w"
// -v "/home/runner/runners/2.283.2/externals":"/__e":ro
// -v "/home/runner/work/_temp":"/__w/_temp"
// -v "/home/runner/work/_actions":"/__w/_actions"
// -v "/opt/hostedtoolcache":"/__t"
// -v "/home/runner/work/_temp/_github_home":"/github/home"
// -v "/home/runner/work/_temp/_github_workflow":"/github/workflow"
// --entrypoint "tail" node:10.16-jessie "-f" "/dev/null"
const creatArgs = ['create']
creatArgs.push(`--workdir=${jobContainer.containerWorkDirectory}`)
creatArgs.push(`--network=${networkName}`)
for (const mountVolume of jobContainer.mountVolumes) {
creatArgs.push(
`-v=${mountVolume.sourceVolumePath}:${mountVolume.targetVolumePath}`
)
}
creatArgs.push(`--entrypoint=tail`)
creatArgs.push(containerImage)
creatArgs.push(`-f`)
creatArgs.push(`/dev/null`)
core.debug(JSON.stringify(creatArgs))
// const containerId = await exec.getExecOutput('podman', [
// 'create',
// // `--workdir ${jobContainer.containerWorkDirectory}`,
// `--network=${networkName}`,
// // `-v=/Users/ting/Desktop/runner/_layout/_work:/__w`,
// `--entrypoint=${jobContainer.containerEntryPoint}`,
// `${containerImage}`,
// `${jobContainer.containerEntryPointArgs}`
// ])
const containerId = await exec.getExecOutput('podman', creatArgs)
core.debug(JSON.stringify(containerId))
// podman start {containerId}
await exec.exec('podman', ['start', containerId.stdout.trim()])
// get PATH inside the container
// output containerId for ${{job.container.id}}
const creationOutput = {
JobContainerId: containerId.stdout.trim(),
Network: networkName
}
const output = JSON.stringify({CreationOutput: creationOutput})
core.debug(output)
process.stderr.write(
`___CONTAINER_ENGINE_HANDLER_OUTPUT___${output}___CONTAINER_ENGINE_HANDLER_OUTPUT___`
)
} else if (command === 'Remove') {
const removeInput = inputJson.removeInput
core.debug(JSON.stringify(removeInput))
const jobContainerId = removeInput.jobContainerId
const network = removeInput.network
await exec.exec('podman', ['rm', '-f', jobContainerId])
await exec.exec('podman', ['network', 'rm', '-f', network])
} else if (command === 'Exec') {
const execInput = inputJson.execInput
core.debug(JSON.stringify(execInput))
// podman exec -i --workdir /__w/canary/canary
// -e GITHUB_JOB -e GITHUB_REF -e GITHUB_SHA -e GITHUB_REPOSITORY
// -e GITHUB_REPOSITORY_OWNER -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER
// -e GITHUB_RETENTION_DAYS -e GITHUB_RUN_ATTEMPT -e GITHUB_ACTOR
// -e GITHUB_WORKFLOW -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GITHUB_EVENT_NAME
// -e GITHUB_SERVER_URL -e GITHUB_API_URL -e GITHUB_GRAPHQL_URL
// -e GITHUB_WORKSPACE -e GITHUB_ACTION -e GITHUB_EVENT_PATH -e GITHUB_ACTION_REPOSITORY
// -e GITHUB_ACTION_REF -e GITHUB_PATH -e GITHUB_ENV -e RUNNER_DEBUG
// -e RUNNER_OS -e RUNNER_NAME -e RUNNER_TOOL_CACHE
// -e RUNNER_TEMP -e RUNNER_WORKSPACE
// eccdf520697a035599d6e8c8dc801f004fdd3797cdce88f590aba3669a88d9bc sh -e /__w/_temp/d3b30383-719c-4e76-a16f-8f85443352be.sh
const execArgs = ['exec']
execArgs.push('-i')
execArgs.push(`--workdir=${execInput.workingDirectory}`)
for (const envKey of execInput.environmentKeys) {
execArgs.push(`-e=${envKey}`)
}
execArgs.push(execInput.jobContainer.containerId)
execArgs.push(execInput.fileName)
const args = (<string>execInput.arguments).split(' ')
core.debug(JSON.stringify(args))
execArgs.push(...args)
core.debug(JSON.stringify(execArgs))
await exec.exec('podman', execArgs)
}
await exec.exec('podman', ['network', 'ls'])
await exec.exec('podman', ['ps', '-a'])
}
run()

View File

@@ -0,0 +1,12 @@
{
"compilerOptions": {
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"outDir": "./lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
"strict": true, /* Enable all strict type-checking options. */
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
},
"exclude": ["node_modules", "**/*.test.ts"]
}

View File

@@ -1 +0,0 @@
de62d296708908cfd1236e58869aebbc2bae8a8c3d629276968542626c508e37

View File

@@ -1 +0,0 @@
44fcd0422dd98ed17d2c8e9057ff2260c50165f20674236a4ae7d2645a07df25

View File

@@ -1 +0,0 @@
e57652cf322ee16ce3af4f9e58f80858746b9e1e60279e991a3b3d9a6baf8d79

View File

@@ -1 +0,0 @@
bdd247b2ff3f51095524412e2ac588e7a87af805e114d6caf2368366ee7be1ea

View File

@@ -1 +0,0 @@
d23a0cb9f20c0aa1cddb7a39567cd097020cdeb06a1e952940601d1a405c53b8

View File

@@ -1 +0,0 @@
6ca4a0e1c50b7079ead05321dcf5835c1c25f23dc632add8c1c4667d416d103e

View File

@@ -1 +0,0 @@
b5951dc607d782d9c7571a7224e940eb0975bb23c54ff25c7afdbf959a417081

View File

@@ -1 +0,0 @@
af819e92011cc9cbca90e8299f9f7651f2cf6bf45b42920f9a4ca22795486147

View File

@@ -1 +0,0 @@
aa0e6bf4bfaabf48c962ea3b262dca042629ab332005f73d282faec908847036

View File

@@ -1 +0,0 @@
40328cff2b8229f9b578f32739183bd8f6aab481c21dadc052b09f1c7e8e4665

View File

@@ -4,7 +4,6 @@ PRECACHE=$2
NODE_URL=https://nodejs.org/dist
NODE12_VERSION="12.13.1"
NODE16_VERSION="16.13.0"
get_abs_path() {
# exploits the fact that pwd will print abs path when no args
@@ -127,8 +126,6 @@ function acquireExternalTool() {
if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/$PACKAGERUNTIME/node.exe" node12/bin
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/$PACKAGERUNTIME/node.lib" node12/bin
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
if [[ "$PRECACHE" != "" ]]; then
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
fi
@@ -137,23 +134,18 @@ fi
# Download the external tools only for OSX.
if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-darwin-x64.tar.gz" node12 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir
fi
# Download the external tools for Linux PACKAGERUNTIMEs.
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
fi
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-arm64.tar.gz" node12 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-arm64.tar.gz" node16 fix_nested_dir
fi
if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-armv7l.tar.gz" node12 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir
fi

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,49 @@
// Job container creation
// podman network create {network} -> track and return `network` for ${{job.container.network}}
// podman pull docker.io/library/{image}
// podman create --name e088c842be1f46b394212618408aaba0_node1016jessie_6196c9
// --label fa4e14
// --workdir /__w/canary/canary
// --network github_network_f98a6e1e96e74d919d814c165641cba3
// -e "HOME=/github/home" -e GITHUB_ACTIONS=true -e CI=true
// -v "/var/run/docker.sock":"/var/run/docker.sock"
// -v "/home/runner/work":"/__w"
// -v "/home/runner/runners/2.283.2/externals":"/__e":ro
// -v "/home/runner/work/_temp":"/__w/_temp"
// -v "/home/runner/work/_actions":"/__w/_actions"
// -v "/opt/hostedtoolcache":"/__t"
// -v "/home/runner/work/_temp/_github_home":"/github/home"
// -v "/home/runner/work/_temp/_github_workflow":"/github/workflow"
// --entrypoint "tail" node:10.16-jessie "-f" "/dev/null"
// podman start {containerId}
// get PATH inside the container
// output containerId for ${{job.container.id}}
// Job container stop
// podman rm --force {containerId}
// podman network rm {network}
// Run step
// podman exec -i --workdir /__w/canary/canary
// -e GITHUB_JOB -e GITHUB_REF -e GITHUB_SHA -e GITHUB_REPOSITORY
// -e GITHUB_REPOSITORY_OWNER -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER
// -e GITHUB_RETENTION_DAYS -e GITHUB_RUN_ATTEMPT -e GITHUB_ACTOR
// -e GITHUB_WORKFLOW -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GITHUB_EVENT_NAME
// -e GITHUB_SERVER_URL -e GITHUB_API_URL -e GITHUB_GRAPHQL_URL
// -e GITHUB_WORKSPACE -e GITHUB_ACTION -e GITHUB_EVENT_PATH -e GITHUB_ACTION_REPOSITORY
// -e GITHUB_ACTION_REF -e GITHUB_PATH -e GITHUB_ENV -e RUNNER_DEBUG
// -e RUNNER_OS -e RUNNER_NAME -e RUNNER_TOOL_CACHE
// -e RUNNER_TEMP -e RUNNER_WORKSPACE
// eccdf520697a035599d6e8c8dc801f004fdd3797cdce88f590aba3669a88d9bc sh -e /__w/_temp/d3b30383-719c-4e76-a16f-8f85443352be.sh

File diff suppressed because it is too large Load Diff

View File

@@ -161,13 +161,59 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
date "+[%F %T-%4N] DarwinRunnerUpgrade: Failed to find runner path. Path: $path, pgid: $procgroup, root: $rootfolder" >> "$telemetryfile" 2>&1
fi
else
runproc=$(ps x -o pgid,command | grep "run.sh" | grep -v grep | awk '{print $1}')
if [[ $? -eq 0 && -n "$runproc" ]]
then
date "+[%F %T-%4N] Running as ephemeral using run.sh, no need to recreate node folder" >> "$logfile" 2>&1
else
date "+[%F %T-%4N] DarwinRunnerUpgrade: Failed to find runner pgid. pgid: $procgroup, root: $rootfolder" >> "$logfile" 2>&1
date "+[%F %T-%4N] DarwinRunnerUpgrade: Failed to find runner pgid. pgid: $procgroup, root: $rootfolder" >> "$telemetryfile" 2>&1
fi
if [ $attemptedtargetedfix -eq 0 ]
then
date "+[%F %T-%4N] DarwinRunnerUpgrade: Defaulting to old macOS service fix" >> "$logfile" 2>&1
date "+[%F %T-%4N] DarwinRunnerUpgrade: Defaulting to old macOS service fix" >> "$telemetryfile" 2>&1
if [[ ! -e "$rootfolder/externals.2.280.3/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.280.3/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.280.3/node12/bin/node"
fi
if [[ ! -e "$rootfolder/externals.2.280.2/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.280.2/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.280.2/node12/bin/node"
fi
if [[ ! -e "$rootfolder/externals.2.280.1/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.280.1/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.280.1/node12/bin/node"
fi
# GHES 3.2
if [[ ! -e "$rootfolder/externals.2.279.0/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.279.0/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.279.0/node12/bin/node"
fi
# GHES 3.1.2 or later
if [[ ! -e "$rootfolder/externals.2.278.0/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.278.0/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.278.0/node12/bin/node"
fi
# GHES 3.1.0
if [[ ! -e "$rootfolder/externals.2.276.1/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.276.1/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.276.1/node12/bin/node"
fi
# GHES 3.0
if [[ ! -e "$rootfolder/externals.2.273.5/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.273.5/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.273.5/node12/bin/node"
fi
fi
fi

View File

@@ -8,7 +8,7 @@ if [ $user_id -eq 0 -a -z "$RUNNER_ALLOW_RUNASROOT" ]; then
exit 1
fi
# Check dotnet Core 6.0 dependencies for Linux
# Check dotnet core 3.0 dependencies for Linux
if [[ (`uname` == "Linux") ]]
then
command -v ldd > /dev/null
@@ -18,25 +18,25 @@ then
exit 1
fi
message="Execute sudo ./bin/installdependencies.sh to install any missing Dotnet Core 6.0 dependencies."
message="Execute sudo ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies."
ldd ./bin/libcoreclr.so | grep 'not found'
if [ $? -eq 0 ]; then
echo "Dependencies is missing for Dotnet Core 6.0"
echo "Dependencies is missing for Dotnet Core 3.0"
echo $message
exit 1
fi
ldd ./bin/libSystem.Security.Cryptography.Native.OpenSsl.so | grep 'not found'
ldd ./bin/System.Security.Cryptography.Native.OpenSsl.so | grep 'not found'
if [ $? -eq 0 ]; then
echo "Dependencies is missing for Dotnet Core 6.0"
echo "Dependencies is missing for Dotnet Core 3.0"
echo $message
exit 1
fi
ldd ./bin/libSystem.IO.Compression.Native.so | grep 'not found'
ldd ./bin/System.IO.Compression.Native.so | grep 'not found'
if [ $? -eq 0 ]; then
echo "Dependencies is missing for Dotnet Core 6.0"
echo "Dependencies is missing for Dotnet Core 3.0"
echo $message
exit 1
fi
@@ -54,7 +54,7 @@ then
libpath=${LD_LIBRARY_PATH:-}
$LDCONFIG_COMMAND -NXv ${libpath//:/ } 2>&1 | grep libicu >/dev/null 2>&1
if [ $? -ne 0 ]; then
echo "Libicu's dependencies is missing for Dotnet Core 6.0"
echo "Libicu's dependencies is missing for Dotnet Core 3.0"
echo $message
exit 1
fi

View File

@@ -0,0 +1,68 @@
#!/bin/bash
set -euo pipefail
function fatal() {
echo "error: $1" >&2
exit 1
}
[ -n "${GITHUB_PAT:-""}" ] || fatal "GITHUB_PAT variable must be set"
[ -n "${RUNNER_CONFIG_URL:-""}" ] || fatal "RUNNER_CONFIG_URL variable must be set"
# [ -n "${RUNNER_NAME:-""}" ] || fatal "RUNNER_NAME variable must be set"
# if [ -n "${RUNNER_NAME}" ]; then
# # Use container id to gen unique runner name if name not provide
# CONTAINER_ID=$(cat /proc/self/cgroup | head -n 1 | tr '/' '\n' | tail -1 | cut -c1-12)
# RUNNER_NAME="actions-runner-${CONTAINER_ID}"
# fi
# if the scope has a slash, it's a repo runner
# orgs_or_repos="orgs"
# if [[ "$GITHUB_RUNNER_SCOPE" == *\/* ]]; then
# orgs_or_repos="repos"
# fi
# RUNNER_REG_URL="${GITHUB_SERVER_URL:=https://github.com}/${GITHUB_RUNNER_SCOPE}"
# echo "Runner Name : ${RUNNER_NAME}"
echo "Registration URL : ${RUNNER_CONFIG_URL}"
# echo "GitHub API URL : ${GITHUB_API_URL:=https://api.github.com}"
# echo "Runner Labels : ${RUNNER_LABELS:=""}"
# TODO: if api url is not default, validate it ends in /api/v3
# RUNNER_LABELS_ARG=""
# if [ -n "${RUNNER_LABELS}" ]; then
# RUNNER_LABELS_ARG="--labels ${RUNNER_LABELS}"
# fi
# RUNNER_GROUP_ARG=""
# if [ -n "${RUNNER_GROUP}" ]; then
# RUNNER_GROUP_ARG="--runnergroup ${RUNNER_GROUP}"
# fi
# if [ -n "${K8S_HOST_IP}" ]; then
# export http_proxy=http://$K8S_HOST_IP:9090
# fi
# curl -v -s -X POST ${GITHUB_API_URL}/${orgs_or_repos}/${GITHUB_RUNNER_SCOPE}/actions/runners/registration-token -H "authorization: token $GITHUB_PAT" -H "accept: application/vnd.github.everest-preview+json"
# Generate registration token
# RUNNER_REG_TOKEN=$(curl -s -X POST ${GITHUB_API_URL}/${orgs_or_repos}/${GITHUB_RUNNER_SCOPE}/actions/runners/registration-token -H "authorization: token $GITHUB_PAT" -H "accept: application/vnd.github.everest-preview+json" | jq -r '.token')
# Create the runner and configure it
./config.sh --unattended --url $RUNNER_CONFIG_URL --pat $GITHUB_PAT --replace --ephemeral
# while (! docker version ); do
# # Docker takes a few seconds to initialize
# echo "Waiting for Docker to launch..."
# sleep 1
# done
# unset env
unset RUNNER_CONFIG_URL
unset GITHUB_PAT
# Run it
./run.sh

View File

@@ -1,57 +0,0 @@
actions.runner.plist.template
actions.runner.service.template
checkScripts/downloadCert.js
checkScripts/makeWebRequest.js
darwin.svc.sh.template
hashFiles/index.js
installdependencies.sh
macos-run-invoker.js
Microsoft.IdentityModel.Logging.dll
Microsoft.IdentityModel.Tokens.dll
Minimatch.dll
Newtonsoft.Json.Bson.dll
Newtonsoft.Json.dll
Runner.Common.deps.json
Runner.Common.dll
Runner.Common.pdb
Runner.Listener
Runner.Listener.deps.json
Runner.Listener.dll
Runner.Listener.exe
Runner.Listener.pdb
Runner.Listener.runtimeconfig.json
Runner.PluginHost
Runner.PluginHost.deps.json
Runner.PluginHost.dll
Runner.PluginHost.exe
Runner.PluginHost.pdb
Runner.PluginHost.runtimeconfig.json
Runner.Plugins.deps.json
Runner.Plugins.dll
Runner.Plugins.pdb
Runner.Sdk.deps.json
Runner.Sdk.dll
Runner.Sdk.pdb
Runner.Worker
Runner.Worker.deps.json
Runner.Worker.dll
Runner.Worker.exe
Runner.Worker.pdb
Runner.Worker.runtimeconfig.json
RunnerService.exe
RunnerService.exe.config
RunnerService.js
RunnerService.pdb
runsvc.sh
Sdk.deps.json
Sdk.dll
Sdk.pdb
System.IdentityModel.Tokens.Jwt.dll
System.Net.Http.Formatting.dll
System.Security.Cryptography.Pkcs.dll
System.Security.Cryptography.ProtectedData.dll
System.ServiceProcess.ServiceController.dll
systemd.svc.sh.template
update.cmd.template
update.sh.template
YamlDotNet.dll

View File

@@ -1,263 +0,0 @@
api-ms-win-core-console-l1-1-0.dll
api-ms-win-core-console-l1-2-0.dll
api-ms-win-core-datetime-l1-1-0.dll
api-ms-win-core-debug-l1-1-0.dll
api-ms-win-core-errorhandling-l1-1-0.dll
api-ms-win-core-file-l1-1-0.dll
api-ms-win-core-file-l1-2-0.dll
api-ms-win-core-file-l2-1-0.dll
api-ms-win-core-handle-l1-1-0.dll
api-ms-win-core-heap-l1-1-0.dll
api-ms-win-core-interlocked-l1-1-0.dll
api-ms-win-core-libraryloader-l1-1-0.dll
api-ms-win-core-localization-l1-2-0.dll
api-ms-win-core-memory-l1-1-0.dll
api-ms-win-core-namedpipe-l1-1-0.dll
api-ms-win-core-processenvironment-l1-1-0.dll
api-ms-win-core-processthreads-l1-1-0.dll
api-ms-win-core-processthreads-l1-1-1.dll
api-ms-win-core-profile-l1-1-0.dll
api-ms-win-core-rtlsupport-l1-1-0.dll
api-ms-win-core-string-l1-1-0.dll
api-ms-win-core-synch-l1-1-0.dll
api-ms-win-core-synch-l1-2-0.dll
api-ms-win-core-sysinfo-l1-1-0.dll
api-ms-win-core-timezone-l1-1-0.dll
api-ms-win-core-util-l1-1-0.dll
api-ms-win-crt-conio-l1-1-0.dll
api-ms-win-crt-convert-l1-1-0.dll
api-ms-win-crt-environment-l1-1-0.dll
api-ms-win-crt-filesystem-l1-1-0.dll
api-ms-win-crt-heap-l1-1-0.dll
api-ms-win-crt-locale-l1-1-0.dll
api-ms-win-crt-math-l1-1-0.dll
api-ms-win-crt-multibyte-l1-1-0.dll
api-ms-win-crt-private-l1-1-0.dll
api-ms-win-crt-process-l1-1-0.dll
api-ms-win-crt-runtime-l1-1-0.dll
api-ms-win-crt-stdio-l1-1-0.dll
api-ms-win-crt-string-l1-1-0.dll
api-ms-win-crt-time-l1-1-0.dll
api-ms-win-crt-utility-l1-1-0.dll
clrcompression.dll
clretwrc.dll
clrjit.dll
coreclr.dll
createdump
createdump.exe
dbgshim.dll
hostfxr.dll
hostpolicy.dll
libclrjit.dylib
libclrjit.so
libcoreclr.dylib
libcoreclr.so
libcoreclrtraceptprovider.so
libdbgshim.dylib
libdbgshim.so
libhostfxr.dylib
libhostfxr.so
libhostpolicy.dylib
libhostpolicy.so
libmscordaccore.dylib
libmscordaccore.so
libmscordbi.dylib
libmscordbi.so
Microsoft.CSharp.dll
Microsoft.DiaSymReader.Native.amd64.dll
Microsoft.VisualBasic.Core.dll
Microsoft.VisualBasic.dll
Microsoft.Win32.Primitives.dll
Microsoft.Win32.Registry.dll
mscordaccore.dll
mscordaccore_amd64_amd64_6.0.21.52210.dll
mscordbi.dll
mscorlib.dll
mscorrc.debug.dll
mscorrc.dll
msquic.dll
netstandard.dll
SOS_README.md
System.AppContext.dll
System.Buffers.dll
System.Collections.Concurrent.dll
System.Collections.dll
System.Collections.Immutable.dll
System.Collections.NonGeneric.dll
System.Collections.Specialized.dll
System.ComponentModel.Annotations.dll
System.ComponentModel.DataAnnotations.dll
System.ComponentModel.dll
System.ComponentModel.EventBasedAsync.dll
System.ComponentModel.Primitives.dll
System.ComponentModel.TypeConverter.dll
System.Configuration.dll
System.Console.dll
System.Core.dll
System.Data.Common.dll
System.Data.DataSetExtensions.dll
System.Data.dll
System.Diagnostics.Contracts.dll
System.Diagnostics.Debug.dll
System.Diagnostics.DiagnosticSource.dll
System.Diagnostics.FileVersionInfo.dll
System.Diagnostics.Process.dll
System.Diagnostics.StackTrace.dll
System.Diagnostics.TextWriterTraceListener.dll
System.Diagnostics.Tools.dll
System.Diagnostics.TraceSource.dll
System.Diagnostics.Tracing.dll
System.dll
System.Drawing.dll
System.Drawing.Primitives.dll
System.Dynamic.Runtime.dll
System.Formats.Asn1.dll
System.Globalization.Calendars.dll
System.Globalization.dll
System.Globalization.Extensions.dll
System.Globalization.Native.dylib
System.Globalization.Native.so
System.IO.Compression.Brotli.dll
System.IO.Compression.dll
System.IO.Compression.FileSystem.dll
System.IO.Compression.Native.a
System.IO.Compression.Native.dll
System.IO.Compression.Native.dylib
System.IO.Compression.Native.so
System.IO.Compression.ZipFile.dll
System.IO.dll
System.IO.FileSystem.AccessControl.dll
System.IO.FileSystem.dll
System.IO.FileSystem.DriveInfo.dll
System.IO.FileSystem.Primitives.dll
System.IO.FileSystem.Watcher.dll
System.IO.IsolatedStorage.dll
System.IO.MemoryMappedFiles.dll
System.IO.Pipes.AccessControl.dll
System.IO.Pipes.dll
System.IO.UnmanagedMemoryStream.dll
System.Linq.dll
System.Linq.Expressions.dll
System.Linq.Parallel.dll
System.Linq.Queryable.dll
System.Memory.dll
System.Native.a
System.Native.dylib
System.Native.so
System.Net.dll
System.Net.Http.dll
System.Net.Http.Json.dll
System.Net.Http.Native.a
System.Net.Http.Native.dylib
System.Net.Http.Native.so
System.Net.HttpListener.dll
System.Net.Mail.dll
System.Net.NameResolution.dll
System.Net.NetworkInformation.dll
System.Net.Ping.dll
System.Net.Primitives.dll
System.Net.Quic.dll
System.Net.Requests.dll
System.Net.Security.dll
System.Net.Security.Native.a
System.Net.Security.Native.dylib
System.Net.Security.Native.so
System.Net.ServicePoint.dll
System.Net.Sockets.dll
System.Net.WebClient.dll
System.Net.WebHeaderCollection.dll
System.Net.WebProxy.dll
System.Net.WebSockets.Client.dll
System.Net.WebSockets.dll
System.Numerics.dll
System.Numerics.Vectors.dll
System.ObjectModel.dll
System.Private.CoreLib.dll
System.Private.DataContractSerialization.dll
System.Private.Uri.dll
System.Private.Xml.dll
System.Private.Xml.Linq.dll
System.Reflection.DispatchProxy.dll
System.Reflection.dll
System.Reflection.Emit.dll
System.Reflection.Emit.ILGeneration.dll
System.Reflection.Emit.Lightweight.dll
System.Reflection.Extensions.dll
System.Reflection.Metadata.dll
System.Reflection.Primitives.dll
System.Reflection.TypeExtensions.dll
System.Resources.Reader.dll
System.Resources.ResourceManager.dll
System.Resources.Writer.dll
System.Runtime.CompilerServices.Unsafe.dll
System.Runtime.CompilerServices.VisualC.dll
System.Runtime.dll
System.Runtime.Extensions.dll
System.Runtime.Handles.dll
System.Runtime.InteropServices.dll
System.Runtime.InteropServices.RuntimeInformation.dll
System.Runtime.InteropServices.WindowsRuntime.dll
System.Runtime.Intrinsics.dll
System.Runtime.Loader.dll
System.Runtime.Numerics.dll
System.Runtime.Serialization.dll
System.Runtime.Serialization.Formatters.dll
System.Runtime.Serialization.Json.dll
System.Runtime.Serialization.Primitives.dll
System.Runtime.Serialization.Xml.dll
System.Runtime.WindowsRuntime.dll
System.Runtime.WindowsRuntime.UI.Xaml.dll
System.Security.AccessControl.dll
System.Security.Claims.dll
System.Security.Cryptography.Algorithms.dll
System.Security.Cryptography.Cng.dll
System.Security.Cryptography.Csp.dll
System.Security.Cryptography.Encoding.dll
System.Security.Cryptography.Native.Apple.a
System.Security.Cryptography.Native.Apple.dylib
System.Security.Cryptography.Native.OpenSsl.a
System.Security.Cryptography.Native.OpenSsl.dylib
System.Security.Cryptography.Native.OpenSsl.so
System.Security.Cryptography.OpenSsl.dll
System.Security.Cryptography.Primitives.dll
System.Security.Cryptography.X509Certificates.dll
System.Security.Cryptography.XCertificates.dll
System.Security.dll
System.Security.Principal.dll
System.Security.Principal.Windows.dll
System.Security.SecureString.dll
System.ServiceModel.Web.dll
System.ServiceProcess.dll
System.Text.Encoding.CodePages.dll
System.Text.Encoding.dll
System.Text.Encoding.Extensions.dll
System.Text.Encodings.Web.dll
System.Text.Json.dll
System.Text.RegularExpressions.dll
System.Threading.Channels.dll
System.Threading.dll
System.Threading.Overlapped.dll
System.Threading.Tasks.Dataflow.dll
System.Threading.Tasks.dll
System.Threading.Tasks.Extensions.dll
System.Threading.Tasks.Parallel.dll
System.Threading.Thread.dll
System.Threading.ThreadPool.dll
System.Threading.Timer.dll
System.Transactions.dll
System.Transactions.Local.dll
System.ValueTuple.dll
System.Web.dll
System.Web.HttpUtility.dll
System.Windows.dll
System.Xml.dll
System.Xml.Linq.dll
System.Xml.ReaderWriter.dll
System.Xml.Serialization.dll
System.Xml.XDocument.dll
System.Xml.XmlDocument.dll
System.Xml.XmlSerializer.dll
System.Xml.XPath.dll
System.Xml.XPath.XDocument.dll
ucrtbase.dll
WindowsBase.dll

View File

@@ -1,24 +0,0 @@
[
{
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.tar.gz",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>",
"externals": "<EXTERNALS_HASH>"
}
},
{
"HashValue": "<NO_RUNTIME_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.tar.gz",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>"
}
},
{
"HashValue": "<NO_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.tar.gz",
"TrimmedContents": {
"externals": "<EXTERNALS_HASH>"
}
}
]

View File

@@ -1,24 +0,0 @@
[
{
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.zip",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>",
"externals": "<EXTERNALS_HASH>"
}
},
{
"HashValue": "<NO_RUNTIME_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.zip",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>"
}
},
{
"HashValue": "<NO_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.zip",
"TrimmedContents": {
"externals": "<EXTERNALS_HASH>"
}
}
]

View File

@@ -98,14 +98,14 @@ namespace GitHub.Runner.Common
{
int logPageSize;
string logSizeEnv = Environment.GetEnvironmentVariable($"{hostType.ToUpperInvariant()}_LOGSIZE");
if (string.IsNullOrEmpty(logSizeEnv) || !int.TryParse(logSizeEnv, out logPageSize))
if (!string.IsNullOrEmpty(logSizeEnv) || !int.TryParse(logSizeEnv, out logPageSize))
{
logPageSize = _defaultLogPageSize;
}
int logRetentionDays;
string logRetentionDaysEnv = Environment.GetEnvironmentVariable($"{hostType.ToUpperInvariant()}_LOGRETENTION");
if (string.IsNullOrEmpty(logRetentionDaysEnv) || !int.TryParse(logRetentionDaysEnv, out logRetentionDays))
if (!string.IsNullOrEmpty(logRetentionDaysEnv) || !int.TryParse(logRetentionDaysEnv, out logRetentionDays))
{
logRetentionDays = _defaultLogRetentionDays;
}
@@ -200,17 +200,9 @@ namespace GitHub.Runner.Common
if (credData != null &&
credData.Data.TryGetValue("clientId", out var clientId))
{
_userAgents.Add(new ProductInfoHeaderValue("ClientId", clientId));
_userAgents.Add(new ProductInfoHeaderValue($"RunnerId", clientId));
}
}
var runnerFile = GetConfigFile(WellKnownConfigFile.Runner);
if (File.Exists(runnerFile))
{
var runnerSettings = IOUtil.LoadObject<RunnerSettings>(runnerFile);
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
}
}
public string GetDirectory(WellKnownDirectory directory)

View File

@@ -38,8 +38,7 @@ namespace GitHub.Runner.Common
public async Task ConnectAsync(VssConnection jobConnection)
{
_connection = jobConnection;
int totalAttempts = 5;
int attemptCount = totalAttempts;
int attemptCount = 5;
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
@@ -57,21 +56,18 @@ namespace GitHub.Runner.Common
if (runnerSettings.IsHostedServer)
{
await CheckNetworkEndpointsAsync(attemptCount);
await CheckNetworkEndpointsAsync();
}
}
int attempt = totalAttempts - attemptCount;
TimeSpan backoff = BackoffTimerHelper.GetExponentialBackoff(attempt, TimeSpan.FromMilliseconds(100), TimeSpan.FromSeconds(3.2), TimeSpan.FromMilliseconds(100));
await Task.Delay(backoff);
await Task.Delay(100);
}
_taskClient = _connection.GetClient<TaskHttpClient>();
_hasConnection = true;
}
private async Task CheckNetworkEndpointsAsync(int attemptsLeft)
private async Task CheckNetworkEndpointsAsync()
{
try
{
@@ -83,8 +79,8 @@ namespace GitHub.Runner.Common
actionsClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
// Call the _apis/health endpoint, and include how many attempts are left as a URL query for easy tracking
var response = await actionsClient.GetAsync(new Uri(baseUri, $"_apis/health?_internalRunnerAttemptsLeft={attemptsLeft}"));
// Call the _apis/health endpoint
var response = await actionsClient.GetAsync(new Uri(baseUri, "_apis/health"));
Trace.Info($"Actions health status code: {response.StatusCode}");
}
}
@@ -104,8 +100,8 @@ namespace GitHub.Runner.Common
{
gitHubClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
// Call the api.github.com endpoint, and include how many attempts are left as a URL query for easy tracking
var response = await gitHubClient.GetAsync($"https://api.github.com?_internalRunnerAttemptsLeft={attemptsLeft}");
// Call the api.github.com endpoint
var response = await gitHubClient.GetAsync("https://api.github.com");
Trace.Info($"api.github.com status code: {response.StatusCode}");
}
}

View File

@@ -1,12 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
<OutputType>Library</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<AssetTargetFallback>portable-net45+win8</AssetTargetFallback>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>
<TieredCompilationQuickJit>true</TieredCompilationQuickJit>
</PropertyGroup>
<ItemGroup>

View File

@@ -51,7 +51,7 @@ namespace GitHub.Runner.Common
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
// agent update
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState, string trace);
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState);
}
public sealed class RunnerServer : RunnerService, IRunnerServer
@@ -341,10 +341,25 @@ namespace GitHub.Runner.Common
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
}
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState, string trace)
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState)
{
CheckConnection(RunnerConnectionType.Generic);
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace);
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState);
}
//-----------------------------------------------------------------
// Runner Auth Url
//-----------------------------------------------------------------
public Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId)
{
CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.GetAgentAuthUrlAsync(runnerPoolId, runnerId);
}
public Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error)
{
CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.ReportAgentAuthUrlMigrationErrorAsync(runnerPoolId, runnerId, error);
}
}
}

View File

@@ -243,7 +243,6 @@ namespace GitHub.Runner.Listener
validator: Validators.ServerUrlValidator);
}
#if OS_WINDOWS
public string GetWindowsLogonAccount(string defaultValue, string descriptionMsg)
{
return GetArgOrPrompt(
@@ -261,7 +260,7 @@ namespace GitHub.Runner.Listener
defaultValue: string.Empty,
validator: Validators.NonEmptyValidator);
}
#endif
public string GetWork()
{
return GetArgOrPrompt(

View File

@@ -1,5 +1,4 @@
#if OS_WINDOWS
#pragma warning disable CA1416
using System;
using System.Collections;
using System.Collections.Generic;
@@ -1328,5 +1327,4 @@ namespace GitHub.Runner.Listener.Configuration
public IntPtr hProfile;
}
}
#pragma warning restore CA1416
#endif

View File

@@ -67,8 +67,6 @@ namespace GitHub.Runner.Listener.Configuration
return !string.IsNullOrEmpty(value);
}
#if OS_WINDOWS
#pragma warning disable CA1416
public static bool NTAccountValidator(string arg)
{
if (string.IsNullOrEmpty(arg) || String.IsNullOrEmpty(arg.TrimStart('.', '\\')))
@@ -89,7 +87,5 @@ namespace GitHub.Runner.Listener.Configuration
return true;
}
#pragma warning restore CA1416
#endif
}
}

View File

@@ -1,5 +1,4 @@
#if OS_WINDOWS
#pragma warning disable CA1416
using System;
using System.IO;
using System.Linq;
@@ -170,5 +169,4 @@ namespace GitHub.Runner.Listener.Configuration
}
}
}
#pragma warning restore CA1416
#endif

View File

@@ -1,14 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
<OutputType>Exe</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<AssetTargetFallback>portable-net45+win8</AssetTargetFallback>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>
<PredefinedCulturesOnly>false</PredefinedCulturesOnly>
<PublishReadyToRunComposite>true</PublishReadyToRunComposite>
<TieredCompilationQuickJit>true</TieredCompilationQuickJit>
<PublishReadyToRun>true</PublishReadyToRun>
</PropertyGroup>
<ItemGroup>
@@ -25,12 +26,6 @@
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="..\Misc\runnercoreassets">
<LogicalName>GitHub.Runner.Listener.runnercoreassets</LogicalName>
</EmbeddedResource>
</ItemGroup>
<PropertyGroup Condition=" '$(Configuration)' == 'Debug' ">
<DebugType>portable</DebugType>
</PropertyGroup>

View File

@@ -312,8 +312,6 @@ namespace GitHub.Runner.Listener
}
HostContext.WritePerfCounter("SessionCreated");
_term.WriteLine($"Current runner version: '{BuildConstants.RunnerPackage.Version}'");
_term.WriteLine($"{DateTime.UtcNow:u}: Listening for Jobs");
IJobDispatcher jobDispatcher = null;

View File

@@ -13,9 +13,6 @@ using GitHub.Services.WebApi;
using GitHub.Services.Common;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System.Text;
using System.Collections.Generic;
using System.Reflection;
namespace GitHub.Runner.Listener
{
@@ -36,7 +33,6 @@ namespace GitHub.Runner.Listener
private IRunnerServer _runnerServer;
private int _poolId;
private int _agentId;
private readonly List<string> _updateTrace = new List<string>();
public bool Busy { get; private set; }
@@ -57,8 +53,6 @@ namespace GitHub.Runner.Listener
Busy = true;
try
{
var totalUpdateTime = Stopwatch.StartNew();
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
{
Trace.Info($"Can't find available update package.");
@@ -66,7 +60,6 @@ namespace GitHub.Runner.Listener
}
Trace.Info($"An update is available.");
_updateTrace.Add($"RunnerPlatform: {_targetPackage.Platform}");
// Print console line that warn user not shutdown runner.
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
@@ -82,13 +75,12 @@ namespace GitHub.Runner.Listener
Trace.Info($"All running job has exited.");
// We need to keep runner backup around for macOS until we fixed https://github.com/actions/runner/issues/743
#if !OS_OSX
// delete runner backup
var stopWatch = Stopwatch.StartNew();
DeletePreviousVersionRunnerBackup(token);
Trace.Info($"Delete old version runner backup.");
stopWatch.Stop();
#endif
// generate update script from template
_updateTrace.Add($"DeleteRunnerBackupTime: {stopWatch.ElapsedMilliseconds}ms");
await UpdateRunnerUpdateStateAsync("Generate and execute update script.");
string updateScript = GenerateUpdateScript(restartInteractiveRunner);
@@ -106,21 +98,12 @@ namespace GitHub.Runner.Listener
invokeScript.Start();
Trace.Info($"Update script start running");
totalUpdateTime.Stop();
_updateTrace.Add($"TotalUpdateTime: {totalUpdateTime.ElapsedMilliseconds}ms");
await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should be back online within 10 seconds.");
return true;
}
catch (Exception ex)
{
_updateTrace.Add(ex.ToString());
throw;
}
finally
{
await UpdateRunnerUpdateStateAsync("Runner update process finished.");
Busy = false;
}
}
@@ -175,23 +158,171 @@ namespace GitHub.Runner.Listener
IOUtil.DeleteDirectory(latestRunnerDirectory, token);
Directory.CreateDirectory(latestRunnerDirectory);
int runnerSuffix = 1;
string archiveFile = null;
var packageDownloadUrl = _targetPackage.DownloadUrl;
var packageHashValue = _targetPackage.HashValue;
_updateTrace.Add($"DownloadUrl: {packageDownloadUrl}");
bool downloadSucceeded = false;
try
{
archiveFile = await DownLoadRunner(latestRunnerDirectory, packageDownloadUrl, packageHashValue, token);
if (string.IsNullOrEmpty(archiveFile))
// Download the runner, using multiple attempts in order to be resilient against any networking/CDN issues
for (int attempt = 1; attempt <= Constants.RunnerDownloadRetryMaxAttempts; attempt++)
{
throw new TaskCanceledException($"Runner package '{packageDownloadUrl}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
// Generate an available package name, and do our best effort to clean up stale local zip files
while (true)
{
if (_targetPackage.Platform.StartsWith("win"))
{
archiveFile = Path.Combine(latestRunnerDirectory, $"runner{runnerSuffix}.zip");
}
else
{
archiveFile = Path.Combine(latestRunnerDirectory, $"runner{runnerSuffix}.tar.gz");
}
try
{
// delete .zip file
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
{
Trace.Verbose("Deleting latest runner package zip '{0}'", archiveFile);
IOUtil.DeleteFile(archiveFile);
}
break;
}
catch (Exception ex)
{
// couldn't delete the file for whatever reason, so generate another name
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
runnerSuffix++;
}
}
// Allow a 15-minute package download timeout, which is good enough to update the runner from a 1 Mbit/s ADSL connection.
if (!int.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_DOWNLOAD_TIMEOUT") ?? string.Empty, out int timeoutSeconds))
{
timeoutSeconds = 15 * 60;
}
Trace.Info($"Attempt {attempt}: save latest runner into {archiveFile}.");
using (var downloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
using (var downloadCts = CancellationTokenSource.CreateLinkedTokenSource(downloadTimeout.Token, token))
{
try
{
Trace.Info($"Download runner: begin download");
//open zip stream in async mode
using (HttpClient httpClient = new HttpClient(HostContext.CreateHttpClientHandler()))
{
if (!string.IsNullOrEmpty(_targetPackage.Token))
{
Trace.Info($"Adding authorization token ({_targetPackage.Token.Length} chars)");
httpClient.DefaultRequestHeaders.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", _targetPackage.Token);
}
Trace.Info($"Downloading {_targetPackage.DownloadUrl}");
using (FileStream fs = new FileStream(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: 4096, useAsync: true))
using (Stream result = await httpClient.GetStreamAsync(_targetPackage.DownloadUrl))
{
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
await result.CopyToAsync(fs, 81920, downloadCts.Token);
await fs.FlushAsync(downloadCts.Token);
}
}
Trace.Info($"Download runner: finished download");
downloadSucceeded = true;
break;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info($"Runner download has been canceled.");
throw;
}
catch (Exception ex)
{
if (downloadCts.Token.IsCancellationRequested)
{
Trace.Warning($"Runner download has timed out after {timeoutSeconds} seconds");
}
Trace.Warning($"Failed to get package '{archiveFile}' from '{_targetPackage.DownloadUrl}'. Exception {ex}");
}
}
}
await ValidateRunnerHash(archiveFile, packageHashValue);
if (!downloadSucceeded)
{
throw new TaskCanceledException($"Runner package '{archiveFile}' failed after {Constants.RunnerDownloadRetryMaxAttempts} download attempts");
}
await ExtractRunnerPackage(archiveFile, latestRunnerDirectory, token);
// If we got this far, we know that we've successfully downloaded the runner package
// Validate Hash Matches if it is provided
using (FileStream stream = File.OpenRead(archiveFile))
{
if (!String.IsNullOrEmpty(_targetPackage.HashValue))
{
using (SHA256 sha256 = SHA256.Create())
{
byte[] srcHashBytes = await sha256.ComputeHashAsync(stream);
var hash = PrimitiveExtensions.ConvertToHexString(srcHashBytes);
if (hash != _targetPackage.HashValue)
{
// Hash did not match, we can't recover from this, just throw
throw new Exception($"Computed runner hash {hash} did not match expected Runner Hash {_targetPackage.HashValue} for {_targetPackage.Filename}");
}
Trace.Info($"Validated Runner Hash matches {_targetPackage.Filename} : {_targetPackage.HashValue}");
}
}
}
if (archiveFile.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
{
ZipFile.ExtractToDirectory(archiveFile, latestRunnerDirectory);
}
else if (archiveFile.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase))
{
string tar = WhichUtil.Which("tar", trace: Trace);
if (string.IsNullOrEmpty(tar))
{
throw new NotSupportedException($"tar -xzf");
}
// tar -xzf
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
Trace.Info(args.Data);
}
});
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
Trace.Error(args.Data);
}
});
int exitCode = await processInvoker.ExecuteAsync(latestRunnerDirectory, tar, $"-xzf \"{archiveFile}\"", null, token);
if (exitCode != 0)
{
throw new NotSupportedException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. return code: {exitCode}.");
}
}
}
else
{
throw new NotSupportedException($"{archiveFile}");
}
Trace.Info($"Finished getting latest runner package at: {latestRunnerDirectory}.");
}
finally
{
@@ -211,204 +342,6 @@ namespace GitHub.Runner.Listener
}
}
await CopyLatestRunnerToRoot(latestRunnerDirectory, token);
}
private async Task<string> DownLoadRunner(string downloadDirectory, string packageDownloadUrl, string packageHashValue, CancellationToken token)
{
var stopWatch = Stopwatch.StartNew();
int runnerSuffix = 1;
string archiveFile = null;
bool downloadSucceeded = false;
// Download the runner, using multiple attempts in order to be resilient against any networking/CDN issues
for (int attempt = 1; attempt <= Constants.RunnerDownloadRetryMaxAttempts; attempt++)
{
// Generate an available package name, and do our best effort to clean up stale local zip files
while (true)
{
if (_targetPackage.Platform.StartsWith("win"))
{
archiveFile = Path.Combine(downloadDirectory, $"runner{runnerSuffix}.zip");
}
else
{
archiveFile = Path.Combine(downloadDirectory, $"runner{runnerSuffix}.tar.gz");
}
try
{
// delete .zip file
if (!string.IsNullOrEmpty(archiveFile) && File.Exists(archiveFile))
{
Trace.Verbose("Deleting latest runner package zip '{0}'", archiveFile);
IOUtil.DeleteFile(archiveFile);
}
break;
}
catch (Exception ex)
{
// couldn't delete the file for whatever reason, so generate another name
Trace.Warning("Failed to delete runner package zip '{0}'. Exception: {1}", archiveFile, ex);
runnerSuffix++;
}
}
// Allow a 15-minute package download timeout, which is good enough to update the runner from a 1 Mbit/s ADSL connection.
if (!int.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_DOWNLOAD_TIMEOUT") ?? string.Empty, out int timeoutSeconds))
{
timeoutSeconds = 15 * 60;
}
Trace.Info($"Attempt {attempt}: save latest runner into {archiveFile}.");
using (var downloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
using (var downloadCts = CancellationTokenSource.CreateLinkedTokenSource(downloadTimeout.Token, token))
{
try
{
Trace.Info($"Download runner: begin download");
long downloadSize = 0;
//open zip stream in async mode
using (HttpClient httpClient = new HttpClient(HostContext.CreateHttpClientHandler()))
{
if (!string.IsNullOrEmpty(_targetPackage.Token))
{
Trace.Info($"Adding authorization token ({_targetPackage.Token.Length} chars)");
httpClient.DefaultRequestHeaders.Authorization = new System.Net.Http.Headers.AuthenticationHeaderValue("Bearer", _targetPackage.Token);
}
Trace.Info($"Downloading {packageDownloadUrl}");
using (FileStream fs = new FileStream(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: 4096, useAsync: true))
using (Stream result = await httpClient.GetStreamAsync(packageDownloadUrl))
{
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
await result.CopyToAsync(fs, 81920, downloadCts.Token);
await fs.FlushAsync(downloadCts.Token);
downloadSize = fs.Length;
}
}
Trace.Info($"Download runner: finished download");
downloadSucceeded = true;
stopWatch.Stop();
_updateTrace.Add($"PackageDownloadTime: {stopWatch.ElapsedMilliseconds}ms");
_updateTrace.Add($"Attempts: {attempt}");
_updateTrace.Add($"PackageSize: {downloadSize / 1024 / 1024}MB");
break;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
Trace.Info($"Runner download has been canceled.");
throw;
}
catch (Exception ex)
{
if (downloadCts.Token.IsCancellationRequested)
{
Trace.Warning($"Runner download has timed out after {timeoutSeconds} seconds");
}
Trace.Warning($"Failed to get package '{archiveFile}' from '{packageDownloadUrl}'. Exception {ex}");
}
}
}
if (downloadSucceeded)
{
return archiveFile;
}
else
{
return null;
}
}
private async Task ValidateRunnerHash(string archiveFile, string packageHashValue)
{
var stopWatch = Stopwatch.StartNew();
// Validate Hash Matches if it is provided
using (FileStream stream = File.OpenRead(archiveFile))
{
if (!string.IsNullOrEmpty(packageHashValue))
{
using (SHA256 sha256 = SHA256.Create())
{
byte[] srcHashBytes = await sha256.ComputeHashAsync(stream);
var hash = PrimitiveExtensions.ConvertToHexString(srcHashBytes);
if (hash != packageHashValue)
{
// Hash did not match, we can't recover from this, just throw
throw new Exception($"Computed runner hash {hash} did not match expected Runner Hash {packageHashValue} for {_targetPackage.Filename}");
}
stopWatch.Stop();
Trace.Info($"Validated Runner Hash matches {_targetPackage.Filename} : {packageHashValue}");
_updateTrace.Add($"ValidateHashTime: {stopWatch.ElapsedMilliseconds}ms");
}
}
}
}
private async Task ExtractRunnerPackage(string archiveFile, string extractDirectory, CancellationToken token)
{
var stopWatch = Stopwatch.StartNew();
if (archiveFile.EndsWith(".zip", StringComparison.OrdinalIgnoreCase))
{
ZipFile.ExtractToDirectory(archiveFile, extractDirectory);
}
else if (archiveFile.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase))
{
string tar = WhichUtil.Which("tar", trace: Trace);
if (string.IsNullOrEmpty(tar))
{
throw new NotSupportedException($"tar -xzf");
}
// tar -xzf
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
processInvoker.OutputDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
Trace.Info(args.Data);
}
});
processInvoker.ErrorDataReceived += new EventHandler<ProcessDataReceivedEventArgs>((sender, args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
Trace.Error(args.Data);
}
});
int exitCode = await processInvoker.ExecuteAsync(extractDirectory, tar, $"-xzf \"{archiveFile}\"", null, token);
if (exitCode != 0)
{
throw new NotSupportedException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. return code: {exitCode}.");
}
}
}
else
{
throw new NotSupportedException($"{archiveFile}");
}
stopWatch.Stop();
Trace.Info($"Finished getting latest runner package at: {extractDirectory}.");
_updateTrace.Add($"PackageExtractTime: {stopWatch.ElapsedMilliseconds}ms");
}
private Task CopyLatestRunnerToRoot(string latestRunnerDirectory, CancellationToken token)
{
var stopWatch = Stopwatch.StartNew();
// copy latest runner into runner root folder
// copy bin from _work/_update -> bin.version under root
string binVersionDir = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), $"{Constants.Path.BinDirectory}.{_targetPackage.Version}");
@@ -434,10 +367,6 @@ namespace GitHub.Runner.Listener
IOUtil.DeleteFile(destination);
file.CopyTo(destination, true);
}
stopWatch.Stop();
_updateTrace.Add($"CopyRunnerToRootTime: {stopWatch.ElapsedMilliseconds}ms");
return Task.CompletedTask;
}
private void DeletePreviousVersionRunnerBackup(CancellationToken token)
@@ -561,18 +490,9 @@ namespace GitHub.Runner.Listener
{
_terminal.WriteLine(currentState);
if (_updateTrace.Count > 0)
{
foreach (var trace in _updateTrace)
{
Trace.Info(trace);
}
}
try
{
await _runnerServer.UpdateAgentUpdateStateAsync(_poolId, _agentId, currentState, string.Join(Environment.NewLine, _updateTrace));
_updateTrace.Clear();
await _runnerServer.UpdateAgentUpdateStateAsync(_poolId, _agentId, currentState);
}
catch (VssResourceNotFoundException)
{

View File

@@ -1,14 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
<OutputType>Exe</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<AssetTargetFallback>portable-net45+win8</AssetTargetFallback>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>
<PredefinedCulturesOnly>false</PredefinedCulturesOnly>
<PublishReadyToRunComposite>true</PublishReadyToRunComposite>
<TieredCompilationQuickJit>true</TieredCompilationQuickJit>
<PublishReadyToRun>true</PublishReadyToRun>
</PropertyGroup>
<ItemGroup>

View File

@@ -444,7 +444,7 @@ namespace GitHub.Runner.Plugins.Artifact
{
// We should never
context.Error($"Error '{ex.Message}' when downloading file '{fileToDownload}'. (Downloader {downloaderId})");
throw;
throw ex;
}
}
@@ -528,7 +528,7 @@ namespace GitHub.Runner.Plugins.Artifact
catch (Exception ex)
{
context.Output($"File error '{ex.Message}' when uploading file '{fileToUpload}'.");
throw;
throw ex;
}
}

View File

@@ -1,12 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
<OutputType>Library</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<AssetTargetFallback>portable-net45+win8</AssetTargetFallback>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>
<TieredCompilationQuickJit>true</TieredCompilationQuickJit>
</PropertyGroup>
<ItemGroup>

View File

@@ -1,12 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
<OutputType>Library</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<AssetTargetFallback>portable-net45+win8</AssetTargetFallback>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>
<TieredCompilationQuickJit>true</TieredCompilationQuickJit>
</PropertyGroup>
<ItemGroup>

View File

@@ -381,13 +381,6 @@ namespace GitHub.Runner.Worker
HostContext.SecretMasker.AddValue(command.Data);
Trace.Info($"Add new secret mask with length of {command.Data.Length}");
// Also add each individual line. Typically individual lines are processed from STDOUT of child processes.
var split = command.Data.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
foreach (var item in split)
{
HostContext.SecretMasker.AddValue(item);
}
}
}
}

View File

@@ -267,19 +267,6 @@ namespace GitHub.Runner.Worker
_cachedEmbeddedPostSteps[parentStepId].Push(clonedAction);
}
}
else if (depth > 0)
{
// if we're in a composite action and haven't loaded the local action yet
// we assume it has a post step
if (!_cachedEmbeddedPostSteps.ContainsKey(parentStepId))
{
// If we haven't done so already, add the parent to the post steps
_cachedEmbeddedPostSteps[parentStepId] = new Stack<Pipelines.ActionStep>();
}
// Clone action so we can modify the condition without affecting the original
var clonedAction = action.Clone() as Pipelines.ActionStep;
_cachedEmbeddedPostSteps[parentStepId].Push(clonedAction);
}
}
}
@@ -443,7 +430,7 @@ namespace GitHub.Runner.Worker
{
for (var i = 0; i < compositeAction.Steps.Count; i++)
{
// Load stored Ids for later load actions
// Store Id's for later load actions
compositeAction.Steps[i].Id = _cachedEmbeddedStepIds[action.Id][i];
if (string.IsNullOrEmpty(executionContext.Global.Variables.Get("DistributedTask.EnableCompositeActions")) && compositeAction.Steps[i].Reference.Type != Pipelines.ActionSourceType.Script)
{
@@ -451,16 +438,6 @@ namespace GitHub.Runner.Worker
}
}
}
else
{
_cachedEmbeddedStepIds[action.Id] = new List<Guid>();
foreach (var compositeStep in compositeAction.Steps)
{
var guid = Guid.NewGuid();
compositeStep.Id = guid;
_cachedEmbeddedStepIds[action.Id].Add(guid);
}
}
}
else
{
@@ -1057,6 +1034,7 @@ namespace GitHub.Runner.Worker
}
}
// TODO: remove once we remove the DistributedTask.EnableCompositeActions FF
foreach (var step in compositeAction.Steps)
{
if (string.IsNullOrEmpty(executionContext.Global.Variables.Get("DistributedTask.EnableCompositeActions")) && step.Reference.Type != Pipelines.ActionSourceType.Script)
@@ -1199,8 +1177,6 @@ namespace GitHub.Runner.Worker
public string Pre { get; set; }
public string Post { get; set; }
public string NodeVersion { get; set; }
}
public sealed class PluginActionExecutionData : ActionExecutionData

View File

@@ -451,8 +451,7 @@ namespace GitHub.Runner.Worker
};
}
}
else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase)||
string.Equals(usingToken.Value, "node16", StringComparison.OrdinalIgnoreCase))
else if (string.Equals(usingToken.Value, "node12", StringComparison.OrdinalIgnoreCase))
{
if (string.IsNullOrEmpty(mainToken?.Value))
{
@@ -462,7 +461,6 @@ namespace GitHub.Runner.Worker
{
return new NodeJSActionExecutionData()
{
NodeVersion = usingToken.Value,
Script = mainToken.Value,
Pre = preToken?.Value,
InitCondition = preIfToken?.Value ?? "always()",
@@ -492,7 +490,7 @@ namespace GitHub.Runner.Worker
}
else
{
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker', 'node12' or 'node16' instead.");
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker' or 'node12' instead.");
}
}
else if (pluginToken != null)

View File

@@ -54,7 +54,7 @@ namespace GitHub.Runner.Worker.Container
_pathMappings.Add(new PathMapping(hostContext.GetDirectory(WellKnownDirectory.Externals), "/__e"));
if (this.IsJobContainer)
{
this.MountVolumes.Add(new MountVolume("/var/run/docker.sock", "/var/run/docker.sock"));
// this.MountVolumes.Add(new MountVolume("/var/run/docker.sock", "/var/run/docker.sock"));
}
#endif
if (container.Ports?.Count > 0)

View File

@@ -12,9 +12,88 @@ using GitHub.Runner.Sdk;
using GitHub.DistributedTask.Pipelines.ContextData;
using Microsoft.Win32;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using System.Threading.Channels;
using GitHub.Services.WebApi;
using System.Text;
using System.Runtime.Serialization;
namespace GitHub.Runner.Worker
{
[DataContract]
public class ContainerEngineHandlerInput
{
[DataMember]
public string Command { get; set; }
[DataMember]
public ContainersCreationInput CreationInput { get; set; }
[DataMember]
public JobContainerExecInput ExecInput { get; set; }
[DataMember]
public ContainersRemoveInput RemoveInput { get; set; }
}
[DataContract]
public class ContainersCreationInput
{
[DataMember]
public List<ContainerInfo> Containers { get; set; }
}
[DataContract]
public class JobContainerExecInput
{
[DataMember]
public ContainerInfo JobContainer { get; set; }
[DataMember]
public string WorkingDirectory { get; set; }
[DataMember]
public string FileName { get; set; }
[DataMember]
public string Arguments { get; set; }
[DataMember]
public List<string> EnvironmentKeys { get; set; }
[DataMember]
public Dictionary<string, string> EnvironmentVariables { get; set; }
}
[DataContract]
public class ContainersRemoveInput
{
[DataMember]
public string Network { get; set; }
[DataMember]
public string JobContainerId { get; set; }
}
[DataContract]
public class ContainersCreationOutput
{
[DataMember]
public string Network { get; set; }
[DataMember]
public string JobContainerId { get; set; }
}
[DataContract]
public class ContainerEngineHandlerOutput
{
[DataMember]
public ContainersCreationOutput CreationOutput { get; set; }
}
[ServiceLocator(Default = typeof(ContainerOperationProvider))]
public interface IContainerOperationProvider : IRunnerService
{
@@ -24,25 +103,57 @@ namespace GitHub.Runner.Worker
public class ContainerOperationProvider : RunnerService, IContainerOperationProvider
{
private IDockerCommandManager _dockerManager;
private IDockerCommandManager _dockerManager = null;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_dockerManager = HostContext.GetService<IDockerCommandManager>();
// _dockerManager = HostContext.GetService<IDockerCommandManager>();
}
public async Task StartContainersAsync(IExecutionContext executionContext, object data)
{
Trace.Entering();
if (!Constants.Runner.Platform.Equals(Constants.OSPlatform.Linux))
{
throw new NotSupportedException("Container operations are only supported on Linux runners");
}
// if (!Constants.Runner.Platform.Equals(Constants.OSPlatform.Linux))
// {
// throw new NotSupportedException("Container operations are only supported on Linux runners");
// }
ArgUtil.NotNull(executionContext, nameof(executionContext));
List<ContainerInfo> containers = data as List<ContainerInfo>;
ArgUtil.NotNull(containers, nameof(containers));
foreach (var container in containers)
{
if (container.IsJobContainer)
{
// Configure job container - Mount workspace and tools, set up environment, and start long running process
var githubContext = executionContext.ExpressionValues["github"] as GitHubContext;
ArgUtil.NotNull(githubContext, nameof(githubContext));
var workingDirectory = githubContext["workspace"] as StringContextData;
ArgUtil.NotNullOrEmpty(workingDirectory, nameof(workingDirectory));
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Work), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Work))));
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Externals), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Externals)), true));
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Temp), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Temp))));
// container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Actions), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Actions))));
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Tools), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Tools))));
var tempHomeDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Temp), "_github_home");
Directory.CreateDirectory(tempHomeDirectory);
container.MountVolumes.Add(new MountVolume(tempHomeDirectory, "/github/home"));
container.AddPathTranslateMapping(tempHomeDirectory, "/github/home");
container.ContainerEnvironmentVariables["HOME"] = container.TranslateToContainerPath(tempHomeDirectory);
var tempWorkflowDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Temp), "_github_workflow");
Directory.CreateDirectory(tempWorkflowDirectory);
container.MountVolumes.Add(new MountVolume(tempWorkflowDirectory, "/github/workflow"));
container.AddPathTranslateMapping(tempWorkflowDirectory, "/github/workflow");
container.ContainerWorkDirectory = container.TranslateToContainerPath(workingDirectory);
container.ContainerEntryPoint = "tail";
container.ContainerEntryPointArgs = "-f /dev/null";
}
}
var postJobStep = new JobExtensionRunner(runAsync: this.StopContainersAsync,
condition: $"{PipelineTemplateConstants.Always}()",
displayName: "Stop containers",
@@ -51,28 +162,87 @@ namespace GitHub.Runner.Worker
executionContext.Debug($"Register post job cleanup for stopping/deleting containers.");
executionContext.RegisterPostJobStep(postJobStep);
// Check whether we are inside a container.
// Our container feature requires to map working directory from host to the container.
// If we are already inside a container, we will not able to find out the real working direcotry path on the host.
var podManHandler = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "kubectlHandler", "index.js");
if (File.Exists(podManHandler))
{
var podmanInput = new ContainerEngineHandlerInput()
{
Command = "Create",
CreationInput = new ContainersCreationInput()
{
Containers = containers
}
};
ContainerEngineHandlerOutput podmanOutput = null;
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
var redirectStandardIn = Channel.CreateUnbounded<string>(new UnboundedChannelOptions() { SingleReader = true, SingleWriter = true });
redirectStandardIn.Writer.TryWrite(JsonUtility.ToString(podmanInput));
processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message)
{
executionContext.Output(message.Data);
};
processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message)
{
executionContext.Output(message.Data);
if (podmanOutput == null && message.Data.IndexOf("___CONTAINER_ENGINE_HANDLER_OUTPUT___") >= 0)
{
try
{
podmanOutput = JsonUtility.FromString<ContainerEngineHandlerOutput>(message.Data.Replace("___CONTAINER_ENGINE_HANDLER_OUTPUT___", ""));
}
catch (Exception ex)
{
executionContext.Error(ex);
}
}
};
// Execute the process. Exit code 0 should always be returned.
// A non-zero exit code indicates infrastructural failure.
// Task failure should be communicated over STDOUT using ## commands.
await processInvoker.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Bin),
fileName: Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}"),
arguments: podManHandler,
environment: null,
requireExitCodeZero: false,
outputEncoding: Encoding.UTF8,
killProcessOnCancel: false,
redirectStandardIn: redirectStandardIn,
cancellationToken: executionContext.CancellationToken);
}
if (podmanOutput != null)
{
executionContext.JobContext.Container["network"] = new StringContextData(podmanOutput.CreationOutput.Network);
executionContext.JobContext.Container["id"] = new StringContextData(podmanOutput.CreationOutput.JobContainerId);
executionContext.Global.Container.ContainerId = podmanOutput.CreationOutput.JobContainerId;
}
}
else
{
// Check whether we are inside a container.
// Our container feature requires to map working directory from host to the container.
// If we are already inside a container, we will not able to find out the real working direcotry path on the host.
#if OS_WINDOWS
#pragma warning disable CA1416
// service CExecSvc is Container Execution Agent.
ServiceController[] scServices = ServiceController.GetServices();
if (scServices.Any(x => String.Equals(x.ServiceName, "cexecsvc", StringComparison.OrdinalIgnoreCase) && x.Status == ServiceControllerStatus.Running))
{
throw new NotSupportedException("Container feature is not supported when runner is already running inside container.");
}
#pragma warning restore CA1416
#else
var initProcessCgroup = File.ReadLines("/proc/1/cgroup");
if (initProcessCgroup.Any(x => x.IndexOf(":/docker/", StringComparison.OrdinalIgnoreCase) >= 0))
{
throw new NotSupportedException("Container feature is not supported when runner is already running inside container.");
}
var initProcessCgroup = File.ReadLines("/proc/1/cgroup");
if (initProcessCgroup.Any(x => x.IndexOf(":/docker/", StringComparison.OrdinalIgnoreCase) >= 0))
{
throw new NotSupportedException("Container feature is not supported when runner is already running inside container.");
}
#endif
#if OS_WINDOWS
#pragma warning disable CA1416
// Check OS version (Windows server 1803 is required)
object windowsInstallationType = Registry.GetValue(@"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion", "InstallationType", defaultValue: null);
ArgUtil.NotNull(windowsInstallationType, nameof(windowsInstallationType));
@@ -91,71 +261,71 @@ namespace GitHub.Runner.Worker
{
throw new ArgumentOutOfRangeException(@"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows NT\CurrentVersion\ReleaseId");
}
#pragma warning restore CA1416
#endif
// Check docker client/server version
executionContext.Output("##[group]Checking docker version");
DockerVersion dockerVersion = await _dockerManager.DockerVersion(executionContext);
executionContext.Output("##[endgroup]");
// Check docker client/server version
executionContext.Output("##[group]Checking docker version");
DockerVersion dockerVersion = await _dockerManager.DockerVersion(executionContext);
executionContext.Output("##[endgroup]");
ArgUtil.NotNull(dockerVersion.ServerVersion, nameof(dockerVersion.ServerVersion));
ArgUtil.NotNull(dockerVersion.ClientVersion, nameof(dockerVersion.ClientVersion));
ArgUtil.NotNull(dockerVersion.ServerVersion, nameof(dockerVersion.ServerVersion));
ArgUtil.NotNull(dockerVersion.ClientVersion, nameof(dockerVersion.ClientVersion));
#if OS_WINDOWS
Version requiredDockerEngineAPIVersion = new Version(1, 30); // Docker-EE version 17.6
#else
Version requiredDockerEngineAPIVersion = new Version(1, 35); // Docker-CE version 17.12
Version requiredDockerEngineAPIVersion = new Version(1, 35); // Docker-CE version 17.12
#endif
if (dockerVersion.ServerVersion < requiredDockerEngineAPIVersion)
{
throw new NotSupportedException($"Min required docker engine API server version is '{requiredDockerEngineAPIVersion}', your docker ('{_dockerManager.DockerPath}') server version is '{dockerVersion.ServerVersion}'");
}
if (dockerVersion.ClientVersion < requiredDockerEngineAPIVersion)
{
throw new NotSupportedException($"Min required docker engine API client version is '{requiredDockerEngineAPIVersion}', your docker ('{_dockerManager.DockerPath}') client version is '{dockerVersion.ClientVersion}'");
}
// Clean up containers left by previous runs
executionContext.Output("##[group]Clean up resources from previous jobs");
var staleContainers = await _dockerManager.DockerPS(executionContext, $"--all --quiet --no-trunc --filter \"label={_dockerManager.DockerInstanceLabel}\"");
foreach (var staleContainer in staleContainers)
{
int containerRemoveExitCode = await _dockerManager.DockerRemove(executionContext, staleContainer);
if (containerRemoveExitCode != 0)
if (dockerVersion.ServerVersion < requiredDockerEngineAPIVersion)
{
executionContext.Warning($"Delete stale containers failed, docker rm fail with exit code {containerRemoveExitCode} for container {staleContainer}");
throw new NotSupportedException($"Min required docker engine API server version is '{requiredDockerEngineAPIVersion}', your docker ('{_dockerManager.DockerPath}') server version is '{dockerVersion.ServerVersion}'");
}
if (dockerVersion.ClientVersion < requiredDockerEngineAPIVersion)
{
throw new NotSupportedException($"Min required docker engine API client version is '{requiredDockerEngineAPIVersion}', your docker ('{_dockerManager.DockerPath}') client version is '{dockerVersion.ClientVersion}'");
}
}
int networkPruneExitCode = await _dockerManager.DockerNetworkPrune(executionContext);
if (networkPruneExitCode != 0)
{
executionContext.Warning($"Delete stale container networks failed, docker network prune fail with exit code {networkPruneExitCode}");
}
executionContext.Output("##[endgroup]");
// Clean up containers left by previous runs
executionContext.Output("##[group]Clean up resources from previous jobs");
var staleContainers = await _dockerManager.DockerPS(executionContext, $"--all --quiet --no-trunc --filter \"label={_dockerManager.DockerInstanceLabel}\"");
foreach (var staleContainer in staleContainers)
{
int containerRemoveExitCode = await _dockerManager.DockerRemove(executionContext, staleContainer);
if (containerRemoveExitCode != 0)
{
executionContext.Warning($"Delete stale containers failed, docker rm fail with exit code {containerRemoveExitCode} for container {staleContainer}");
}
}
// Create local docker network for this job to avoid port conflict when multiple runners run on same machine.
// All containers within a job join the same network
executionContext.Output("##[group]Create local container network");
var containerNetwork = $"github_network_{Guid.NewGuid().ToString("N")}";
await CreateContainerNetworkAsync(executionContext, containerNetwork);
executionContext.JobContext.Container["network"] = new StringContextData(containerNetwork);
executionContext.Output("##[endgroup]");
int networkPruneExitCode = await _dockerManager.DockerNetworkPrune(executionContext);
if (networkPruneExitCode != 0)
{
executionContext.Warning($"Delete stale container networks failed, docker network prune fail with exit code {networkPruneExitCode}");
}
executionContext.Output("##[endgroup]");
foreach (var container in containers)
{
container.ContainerNetwork = containerNetwork;
await StartContainerAsync(executionContext, container);
}
// Create local docker network for this job to avoid port conflict when multiple runners run on same machine.
// All containers within a job join the same network
executionContext.Output("##[group]Create local container network");
var containerNetwork = $"github_network_{Guid.NewGuid().ToString("N")}";
await CreateContainerNetworkAsync(executionContext, containerNetwork);
executionContext.JobContext.Container["network"] = new StringContextData(containerNetwork);
executionContext.Output("##[endgroup]");
executionContext.Output("##[group]Waiting for all services to be ready");
foreach (var container in containers.Where(c => !c.IsJobContainer))
{
await ContainerHealthcheck(executionContext, container);
foreach (var container in containers)
{
container.ContainerNetwork = containerNetwork;
await StartContainerAsync(executionContext, container);
}
executionContext.Output("##[group]Waiting for all services to be ready");
foreach (var container in containers.Where(c => !c.IsJobContainer))
{
await ContainerHealthcheck(executionContext, container);
}
executionContext.Output("##[endgroup]");
}
executionContext.Output("##[endgroup]");
}
public async Task StopContainersAsync(IExecutionContext executionContext, object data)
@@ -166,12 +336,69 @@ namespace GitHub.Runner.Worker
List<ContainerInfo> containers = data as List<ContainerInfo>;
ArgUtil.NotNull(containers, nameof(containers));
foreach (var container in containers)
var podManHandler = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "kubectlHandler", "index.js");
if (File.Exists(podManHandler))
{
await StopContainerAsync(executionContext, container);
var podmanInput = new ContainerEngineHandlerInput()
{
Command = "Remove",
RemoveInput = new ContainersRemoveInput()
{
Network = executionContext.JobContext.Container["network"].ToString(),
JobContainerId = executionContext.JobContext.Container["id"].ToString()
}
};
ContainerEngineHandlerOutput podmanOutput = null;
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
var redirectStandardIn = Channel.CreateUnbounded<string>(new UnboundedChannelOptions() { SingleReader = true, SingleWriter = true });
redirectStandardIn.Writer.TryWrite(JsonUtility.ToString(podmanInput));
processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message)
{
executionContext.Output(message.Data);
};
processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message)
{
executionContext.Output(message.Data);
if (podmanOutput == null && message.Data.IndexOf("___CONTAINER_ENGINE_HANDLER_OUTPUT___") >= 0)
{
try
{
podmanOutput = JsonUtility.FromString<ContainerEngineHandlerOutput>(message.Data.Replace("___CONTAINER_ENGINE_HANDLER_OUTPUT___", ""));
}
catch (Exception ex)
{
executionContext.Error(ex);
}
}
};
// Execute the process. Exit code 0 should always be returned.
// A non-zero exit code indicates infrastructural failure.
// Task failure should be communicated over STDOUT using ## commands.
await processInvoker.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Work),
fileName: Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}"),
arguments: podManHandler,
environment: null,
requireExitCodeZero: false,
outputEncoding: Encoding.UTF8,
killProcessOnCancel: false,
redirectStandardIn: redirectStandardIn,
cancellationToken: executionContext.CancellationToken);
}
}
else
{
foreach (var container in containers)
{
await StopContainerAsync(executionContext, container);
}
// Remove the container network
await RemoveContainerNetworkAsync(executionContext, containers.First().ContainerNetwork);
}
// Remove the container network
await RemoveContainerNetworkAsync(executionContext, containers.First().ContainerNetwork);
}
private async Task StartContainerAsync(IExecutionContext executionContext, ContainerInfo container)
@@ -338,18 +565,6 @@ namespace GitHub.Runner.Worker
if (!string.IsNullOrEmpty(container.ContainerId))
{
if(!container.IsJobContainer)
{
// Print logs for service container jobs (not the "action" job itself b/c that's already logged).
executionContext.Output($"Print service container logs: {container.ContainerDisplayName}");
int logsExitCode = await _dockerManager.DockerLogs(executionContext, container.ContainerId);
if (logsExitCode != 0)
{
executionContext.Warning($"Docker logs fail with exit code {logsExitCode}");
}
}
executionContext.Output($"Stop and remove container: {container.ContainerDisplayName}");
int rmExitCode = await _dockerManager.DockerRemove(executionContext, container.ContainerId);

View File

@@ -40,7 +40,6 @@ namespace GitHub.Runner.Worker
string ScopeName { get; }
string SiblingScopeName { get; }
string ContextName { get; }
ActionRunStage Stage { get; }
Task ForceCompleted { get; }
TaskResult? Result { get; set; }
TaskResult? Outcome { get; set; }
@@ -63,7 +62,7 @@ namespace GitHub.Runner.Worker
// Only job level ExecutionContext has PostJobSteps
Stack<IStep> PostJobSteps { get; }
Dictionary<Guid, string> EmbeddedStepsWithPostRegistered { get; }
HashSet<Guid> EmbeddedStepsWithPostRegistered{ get; }
// Keep track of embedded steps states
Dictionary<Guid, Dictionary<string, string>> EmbeddedIntraActionState { get; }
@@ -77,8 +76,8 @@ namespace GitHub.Runner.Worker
// Initialize
void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token);
void CancelToken();
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, ActionRunStage stage, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null);
IExecutionContext CreateEmbeddedChild(string scopeName, string contextName, Guid embeddedId, ActionRunStage stage, Dictionary<string, string> intraActionState = null, string siblingScopeName = null);
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null);
IExecutionContext CreateEmbeddedChild(string scopeName, string contextName, Guid embeddedId, Dictionary<string, string> intraActionState = null, string siblingScopeName = null);
// logging
long Write(string tag, string message);
@@ -145,7 +144,6 @@ namespace GitHub.Runner.Worker
public string ScopeName { get; private set; }
public string SiblingScopeName { get; private set; }
public string ContextName { get; private set; }
public ActionRunStage Stage { get; private set; }
public Task ForceCompleted => _forceCompleted.Task;
public CancellationToken CancellationToken => _cancellationTokenSource.Token;
public Dictionary<string, string> IntraActionState { get; private set; }
@@ -170,7 +168,7 @@ namespace GitHub.Runner.Worker
public HashSet<Guid> StepsWithPostRegistered { get; private set; }
// Only job level ExecutionContext has EmbeddedStepsWithPostRegistered
public Dictionary<Guid, string> EmbeddedStepsWithPostRegistered { get; private set; }
public HashSet<Guid> EmbeddedStepsWithPostRegistered { get; private set; }
public Dictionary<Guid, Dictionary<string, string>> EmbeddedIntraActionState { get; private set; }
@@ -267,19 +265,12 @@ namespace GitHub.Runner.Worker
string siblingScopeName = null;
if (this.IsEmbedded)
{
if (step is IActionRunner actionRunner)
if (step is IActionRunner actionRunner && !Root.EmbeddedStepsWithPostRegistered.Add(actionRunner.Action.Id))
{
if (Root.EmbeddedStepsWithPostRegistered.ContainsKey(actionRunner.Action.Id))
{
Trace.Info($"'post' of '{actionRunner.DisplayName}' already push to child post step stack.");
}
else
{
Root.EmbeddedStepsWithPostRegistered[actionRunner.Action.Id] = actionRunner.Condition;
}
return;
Trace.Info($"'post' of '{actionRunner.DisplayName}' already push to child post step stack.");
}
}
return;
}
else if (step is IActionRunner actionRunner && !Root.StepsWithPostRegistered.Add(actionRunner.Action.Id))
{
Trace.Info($"'post' of '{actionRunner.DisplayName}' already push to post step stack.");
@@ -294,7 +285,7 @@ namespace GitHub.Runner.Worker
Root.PostJobSteps.Push(step);
}
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, ActionRunStage stage, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null)
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool isEmbedded = false, CancellationTokenSource cancellationTokenSource = null, Guid embeddedId = default(Guid), string siblingScopeName = null)
{
Trace.Entering();
@@ -303,7 +294,6 @@ namespace GitHub.Runner.Worker
child.Global = Global;
child.ScopeName = scopeName;
child.ContextName = contextName;
child.Stage = stage;
child.EmbeddedId = embeddedId;
child.SiblingScopeName = siblingScopeName;
child.JobTelemetry = JobTelemetry;
@@ -354,9 +344,9 @@ namespace GitHub.Runner.Worker
/// An embedded execution context shares the same record ID, record name, logger,
/// and a linked cancellation token.
/// </summary>
public IExecutionContext CreateEmbeddedChild(string scopeName, string contextName, Guid embeddedId, ActionRunStage stage, Dictionary<string, string> intraActionState = null, string siblingScopeName = null)
public IExecutionContext CreateEmbeddedChild(string scopeName, string contextName, Guid embeddedId, Dictionary<string, string> intraActionState = null, string siblingScopeName = null)
{
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, stage, logger: _logger, isEmbedded: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token), intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName);
return Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, contextName, logger: _logger, isEmbedded: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token), intraActionState: intraActionState, embeddedId: embeddedId, siblingScopeName: siblingScopeName);
}
public void Start(string currentOperation = null)
@@ -553,8 +543,8 @@ namespace GitHub.Runner.Worker
}
_record.WarningCount++;
}
else if (issue.Type == IssueType.Notice)
}
else if (issue.Type == IssueType.Notice)
{
// tracking line number for each issue in log file
@@ -726,10 +716,10 @@ namespace GitHub.Runner.Worker
StepsWithPostRegistered = new HashSet<Guid>();
// EmbeddedStepsWithPostRegistered for job ExecutionContext
EmbeddedStepsWithPostRegistered = new Dictionary<Guid, string>();
EmbeddedStepsWithPostRegistered = new HashSet<Guid>();
// EmbeddedIntraActionState for job ExecutionContext
EmbeddedIntraActionState = new Dictionary<Guid, Dictionary<string, string>>();
EmbeddedIntraActionState = new Dictionary<Guid, Dictionary<string,string>>();
// Job timeline record.
InitializeTimelineRecord(
@@ -947,7 +937,7 @@ namespace GitHub.Runner.Worker
}
var newGuid = Guid.NewGuid();
return CreateChild(newGuid, displayName, newGuid.ToString("N"), null, null, ActionRunStage.Post, intraActionState, _childTimelineRecordOrder - Root.PostJobSteps.Count, siblingScopeName: siblingScopeName);
return CreateChild(newGuid, displayName, newGuid.ToString("N"), null, null, intraActionState, _childTimelineRecordOrder - Root.PostJobSteps.Count, siblingScopeName: siblingScopeName);
}
}
@@ -980,7 +970,7 @@ namespace GitHub.Runner.Worker
// Do not add a format string overload. See comment on ExecutionContext.Write().
public static void InfrastructureError(this IExecutionContext context, string message)
{
context.AddIssue(new Issue() { Type = IssueType.Error, Message = message, IsInfrastructureIssue = true });
context.AddIssue(new Issue() { Type = IssueType.Error, Message = message, IsInfrastructureIssue = true});
}
// Do not add a format string overload. See comment on ExecutionContext.Write().

View File

@@ -24,19 +24,8 @@ namespace GitHub.Runner.Worker.Expressions
ArgUtil.NotNull(templateContext, nameof(templateContext));
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
// Decide based on 'action_status' for composite MAIN steps and 'job.status' for pre, post and job-level steps
var isCompositeMainStep = executionContext.IsEmbedded && executionContext.Stage == ActionRunStage.Main;
if (isCompositeMainStep)
{
ActionResult actionStatus = EnumUtil.TryParse<ActionResult>(executionContext.GetGitHubContext("action_status")) ?? ActionResult.Success;
return actionStatus == ActionResult.Failure;
}
else
{
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Failure;
}
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Failure;
}
}
}

View File

@@ -24,19 +24,8 @@ namespace GitHub.Runner.Worker.Expressions
ArgUtil.NotNull(templateContext, nameof(templateContext));
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
// Decide based on 'action_status' for composite MAIN steps and 'job.status' for pre, post and job-level steps
var isCompositeMainStep = executionContext.IsEmbedded && executionContext.Stage == ActionRunStage.Main;
if (isCompositeMainStep)
{
ActionResult actionStatus = EnumUtil.TryParse<ActionResult>(executionContext.GetGitHubContext("action_status")) ?? ActionResult.Success;
return actionStatus == ActionResult.Success;
}
else
{
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Success;
}
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Success;
}
}
}

View File

@@ -50,9 +50,8 @@ namespace GitHub.Runner.Worker.Handlers
// Only register post steps for steps that actually ran
foreach (var step in Data.PostSteps.ToList())
{
if (ExecutionContext.Root.EmbeddedStepsWithPostRegistered.ContainsKey(step.Id))
if (ExecutionContext.Root.EmbeddedStepsWithPostRegistered.Contains(step.Id))
{
step.Condition = ExecutionContext.Root.EmbeddedStepsWithPostRegistered[step.Id];
steps.Add(step);
}
else
@@ -125,7 +124,7 @@ namespace GitHub.Runner.Worker.Handlers
{
ArgUtil.NotNull(step, step.DisplayName);
var stepId = $"__{Guid.NewGuid()}";
step.ExecutionContext = ExecutionContext.CreateEmbeddedChild(childScopeName, stepId, Guid.NewGuid(), stage);
step.ExecutionContext = ExecutionContext.CreateEmbeddedChild(childScopeName, stepId, Guid.NewGuid());
embeddedSteps.Add(step);
}
}
@@ -144,7 +143,7 @@ namespace GitHub.Runner.Worker.Handlers
step.Stage = stage;
step.Condition = stepData.Condition;
ExecutionContext.Root.EmbeddedIntraActionState.TryGetValue(step.Action.Id, out var intraActionState);
step.ExecutionContext = ExecutionContext.CreateEmbeddedChild(childScopeName, stepData.ContextName, step.Action.Id, stage, intraActionState: intraActionState, siblingScopeName: siblingScopeName);
step.ExecutionContext = ExecutionContext.CreateEmbeddedChild(childScopeName, stepData.ContextName, step.Action.Id, intraActionState: intraActionState, siblingScopeName: siblingScopeName);
step.ExecutionContext.ExpressionValues["inputs"] = inputsData;
if (!String.IsNullOrEmpty(ExecutionContext.SiblingScopeName))
{
@@ -242,10 +241,6 @@ namespace GitHub.Runner.Worker.Handlers
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<FailureFunction>(PipelineTemplateConstants.Failure, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, 0));
// Set action_status to the success of the current composite action
var actionResult = ExecutionContext.Result?.ToActionResult() ?? ActionResult.Success;
step.ExecutionContext.SetGitHubContext("action_status", actionResult.ToString());
// Initialize env context
Trace.Info("Initialize Env context for embedded step");
#if OS_WINDOWS
@@ -300,100 +295,108 @@ namespace GitHub.Runner.Worker.Handlers
CancellationTokenRegistration? jobCancelRegister = null;
try
{
// Register job cancellation call back only if job cancellation token not been fire before each step run
if (!ExecutionContext.Root.CancellationToken.IsCancellationRequested)
{
// Test the condition again. The job was canceled after the condition was originally evaluated.
jobCancelRegister = ExecutionContext.Root.CancellationToken.Register(() =>
{
// Mark job as cancelled
ExecutionContext.Root.Result = TaskResult.Canceled;
ExecutionContext.Root.JobContext.Status = ExecutionContext.Root.Result?.ToActionResult();
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only
var conditionReTestResult = false;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
step.ExecutionContext.Debug($"Skip Re-evaluate condition on runner shutdown.");
}
else
{
try
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionReTestTraceWriter);
var condition = new BasicExpressionToken(null, null, null, step.Condition);
conditionReTestResult = templateEvaluator.EvaluateStepIf(condition, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, step.ExecutionContext.ToExpressionState());
}
catch (Exception ex)
{
// Cancel the step since we get exception while re-evaluate step condition
Trace.Info("Caught exception from expression when re-test condition on job cancellation.");
step.ExecutionContext.Error(ex);
}
}
if (!conditionReTestResult)
{
// Cancel the step
Trace.Info("Cancel current running step.");
step.ExecutionContext.CancelToken();
}
});
}
else
{
if (ExecutionContext.Root.Result != TaskResult.Canceled)
{
// Mark job as cancelled
ExecutionContext.Root.Result = TaskResult.Canceled;
ExecutionContext.Root.JobContext.Status = ExecutionContext.Root.Result?.ToActionResult();
}
}
// Evaluate condition
step.ExecutionContext.Debug($"Evaluating condition for step: '{step.DisplayName}'");
var conditionTraceWriter = new ConditionTraceWriter(Trace, step.ExecutionContext);
var conditionResult = false;
var conditionEvaluateError = default(Exception);
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
step.ExecutionContext.Debug($"Skip evaluate condition on runner shutdown.");
}
else
{
try
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionTraceWriter);
var condition = new BasicExpressionToken(null, null, null, step.Condition);
conditionResult = templateEvaluator.EvaluateStepIf(condition, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, step.ExecutionContext.ToExpressionState());
}
catch (Exception ex)
{
Trace.Info("Caught exception from expression.");
Trace.Error(ex);
conditionEvaluateError = ex;
}
}
if (!conditionResult && conditionEvaluateError == null)
{
// Condition is false
Trace.Info("Skipping step due to condition evaluation.");
step.ExecutionContext.Result = TaskResult.Skipped;
continue;
}
else if (conditionEvaluateError != null)
{
// Condition error
step.ExecutionContext.Error(conditionEvaluateError);
step.ExecutionContext.Result = TaskResult.Failed;
ExecutionContext.Result = TaskResult.Failed;
break;
}
else
// For main steps just run the action
if (stage == ActionRunStage.Main)
{
await RunStepAsync(step);
}
// We need to evaluate conditions for pre/post steps
else
{
// Register job cancellation call back only if job cancellation token not been fire before each step run
if (!ExecutionContext.Root.CancellationToken.IsCancellationRequested)
{
// Test the condition again. The job was canceled after the condition was originally evaluated.
jobCancelRegister = ExecutionContext.Root.CancellationToken.Register(() =>
{
// Mark job as cancelled
ExecutionContext.Root.Result = TaskResult.Canceled;
ExecutionContext.Root.JobContext.Status = ExecutionContext.Root.Result?.ToActionResult();
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only
var conditionReTestResult = false;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
step.ExecutionContext.Debug($"Skip Re-evaluate condition on runner shutdown.");
}
else
{
try
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionReTestTraceWriter);
var condition = new BasicExpressionToken(null, null, null, step.Condition);
conditionReTestResult = templateEvaluator.EvaluateStepIf(condition, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, step.ExecutionContext.ToExpressionState());
}
catch (Exception ex)
{
// Cancel the step since we get exception while re-evaluate step condition
Trace.Info("Caught exception from expression when re-test condition on job cancellation.");
step.ExecutionContext.Error(ex);
}
}
if (!conditionReTestResult)
{
// Cancel the step
Trace.Info("Cancel current running step.");
step.ExecutionContext.CancelToken();
}
});
}
else
{
if (ExecutionContext.Root.Result != TaskResult.Canceled)
{
// Mark job as cancelled
ExecutionContext.Root.Result = TaskResult.Canceled;
ExecutionContext.Root.JobContext.Status = ExecutionContext.Root.Result?.ToActionResult();
}
}
// Evaluate condition
step.ExecutionContext.Debug($"Evaluating condition for step: '{step.DisplayName}'");
var conditionTraceWriter = new ConditionTraceWriter(Trace, step.ExecutionContext);
var conditionResult = false;
var conditionEvaluateError = default(Exception);
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
step.ExecutionContext.Debug($"Skip evaluate condition on runner shutdown.");
}
else
{
try
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionTraceWriter);
var condition = new BasicExpressionToken(null, null, null, step.Condition);
conditionResult = templateEvaluator.EvaluateStepIf(condition, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, step.ExecutionContext.ToExpressionState());
}
catch (Exception ex)
{
Trace.Info("Caught exception from expression.");
Trace.Error(ex);
conditionEvaluateError = ex;
}
}
if (!conditionResult && conditionEvaluateError == null)
{
// Condition is false
Trace.Info("Skipping step due to condition evaluation.");
step.ExecutionContext.Result = TaskResult.Skipped;
continue;
}
else if (conditionEvaluateError != null)
{
// Condition error
step.ExecutionContext.Error(conditionEvaluateError);
step.ExecutionContext.Result = TaskResult.Failed;
ExecutionContext.Result = TaskResult.Failed;
break;
}
else
{
await RunStepAsync(step);
}
}
}
finally
{
@@ -409,6 +412,12 @@ namespace GitHub.Runner.Worker.Handlers
{
Trace.Info($"Update job result with current composite step result '{step.ExecutionContext.Result}'.");
ExecutionContext.Result = TaskResultUtil.MergeTaskResults(ExecutionContext.Result, step.ExecutionContext.Result.Value);
// We should run cleanup even if one of the cleanup step fails
if (stage != ActionRunStage.Post)
{
break;
}
}
}
}

View File

@@ -83,7 +83,7 @@ namespace GitHub.Runner.Worker.Handlers
HasPreStep = Data.HasPre,
HasPostStep = Data.HasPost,
IsEmbedded = ExecutionContext.IsEmbedded,
Type = Data.NodeVersion
Type = "node12"
};
ExecutionContext.Root.ActionsStepsTelemetry.Add(telemetry);
}
@@ -99,7 +99,7 @@ namespace GitHub.Runner.Worker.Handlers
workingDirectory = HostContext.GetDirectory(WellKnownDirectory.Work);
}
var nodeRuntimeVersion = await StepHost.DetermineNodeRuntimeVersion(ExecutionContext, Data.NodeVersion);
var nodeRuntimeVersion = await StepHost.DetermineNodeRuntimeVersion(ExecutionContext);
string file = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), nodeRuntimeVersion, "bin", $"node{IOUtil.ExeExtension}");
// Format the arguments passed to node.

View File

@@ -21,9 +21,11 @@ namespace GitHub.Runner.Worker.Handlers
event EventHandler<ProcessDataReceivedEventArgs> OutputDataReceived;
event EventHandler<ProcessDataReceivedEventArgs> ErrorDataReceived;
IExecutionContext ExecutionContext { get; set; }
string ResolvePathForStepHost(string path);
Task<string> DetermineNodeRuntimeVersion(IExecutionContext executionContext, string preferredVersion);
Task<string> DetermineNodeRuntimeVersion(IExecutionContext executionContext);
Task<int> ExecuteAsync(string workingDirectory,
string fileName,
@@ -53,14 +55,16 @@ namespace GitHub.Runner.Worker.Handlers
public event EventHandler<ProcessDataReceivedEventArgs> OutputDataReceived;
public event EventHandler<ProcessDataReceivedEventArgs> ErrorDataReceived;
public IExecutionContext ExecutionContext { get; set; }
public string ResolvePathForStepHost(string path)
{
return path;
}
public Task<string> DetermineNodeRuntimeVersion(IExecutionContext executionContext, string preferredVersion)
public Task<string> DetermineNodeRuntimeVersion(IExecutionContext executionContext)
{
return Task.FromResult<string>(preferredVersion);
return Task.FromResult<string>("node12");
}
public async Task<int> ExecuteAsync(string workingDirectory,
@@ -99,6 +103,8 @@ namespace GitHub.Runner.Worker.Handlers
public event EventHandler<ProcessDataReceivedEventArgs> OutputDataReceived;
public event EventHandler<ProcessDataReceivedEventArgs> ErrorDataReceived;
public IExecutionContext ExecutionContext { get; set; }
public string ResolvePathForStepHost(string path)
{
// make sure container exist.
@@ -123,7 +129,7 @@ namespace GitHub.Runner.Worker.Handlers
}
}
public async Task<string> DetermineNodeRuntimeVersion(IExecutionContext executionContext, string preferredVersion)
public async Task<string> DetermineNodeRuntimeVersion(IExecutionContext executionContext)
{
// Best effort to determine a compatible node runtime
// There may be more variation in which libraries are linked than just musl/glibc,
@@ -148,14 +154,14 @@ namespace GitHub.Runner.Worker.Handlers
var msg = $"JavaScript Actions in Alpine containers are only supported on x64 Linux runners. Detected {os} {arch}";
throw new NotSupportedException(msg);
}
nodeExternal = $"{preferredVersion}_alpine";
nodeExternal = "node12_alpine";
executionContext.Debug($"Container distribution is alpine. Running JavaScript Action with external tool: {nodeExternal}");
return nodeExternal;
}
}
}
// Optimistically use the default
nodeExternal = preferredVersion;
nodeExternal = "node12";
executionContext.Debug($"Running JavaScript Action with default external tool: {nodeExternal}");
return nodeExternal;
}
@@ -174,69 +180,138 @@ namespace GitHub.Runner.Worker.Handlers
ArgUtil.NotNull(Container, nameof(Container));
ArgUtil.NotNullOrEmpty(Container.ContainerId, nameof(Container.ContainerId));
var dockerManager = HostContext.GetService<IDockerCommandManager>();
string dockerClientPath = dockerManager.DockerPath;
// Usage: docker exec [OPTIONS] CONTAINER COMMAND [ARG...]
IList<string> dockerCommandArgs = new List<string>();
dockerCommandArgs.Add($"exec");
// [OPTIONS]
dockerCommandArgs.Add($"-i");
dockerCommandArgs.Add($"--workdir {workingDirectory}");
foreach (var env in environment)
var podManHandler = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "kubectlHandler", "index.js");
if (File.Exists(podManHandler))
{
// e.g. -e MY_SECRET maps the value into the exec'ed process without exposing
// the value directly in the command
dockerCommandArgs.Add($"-e {env.Key}");
var podmanInput = new ContainerEngineHandlerInput()
{
Command = "Exec",
ExecInput = new JobContainerExecInput()
{
JobContainer = this.Container,
WorkingDirectory = workingDirectory,
FileName = fileName,
Arguments = arguments,
EnvironmentKeys = environment.Keys.ToList(),
EnvironmentVariables = environment.ToDictionary(x => x.Key, y => y.Value)
}
};
// make sure all env are using container path
foreach (var envKey in environment.Keys.ToList())
{
environment[envKey] = this.Container.TranslateToContainerPath(environment[envKey]);
}
// ContainerEngineHandlerOutput podmanOutput = null;
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
var redirectStandardIn = Channel.CreateUnbounded<string>(new UnboundedChannelOptions() { SingleReader = true, SingleWriter = true });
redirectStandardIn.Writer.TryWrite(JsonUtility.ToString(podmanInput));
// processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message)
// {
// ExecutionContext.Output(message.Data);
// };
// processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs message)
// {
// executionContext.Output(message.Data);
// if (podmanOutput == null && message.Data.IndexOf("___CONTAINER_ENGINE_HANDLER_OUTPUT___") >= 0)
// {
// try
// {
// podmanOutput = JsonUtility.FromString<ContainerEngineHandlerOutput>(message.Data.Replace("___CONTAINER_ENGINE_HANDLER_OUTPUT___", ""));
// }
// catch (Exception ex)
// {
// executionContext.Error(ex);
// }
// }
// };
processInvoker.OutputDataReceived += OutputDataReceived;
processInvoker.ErrorDataReceived += ErrorDataReceived;
// Execute the process. Exit code 0 should always be returned.
// A non-zero exit code indicates infrastructural failure.
// Task failure should be communicated over STDOUT using ## commands.
return await processInvoker.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Work),
fileName: Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}"),
arguments: podManHandler,
environment: environment,
requireExitCodeZero: requireExitCodeZero,
outputEncoding: Encoding.UTF8,
killProcessOnCancel: killProcessOnCancel,
redirectStandardIn: redirectStandardIn,
cancellationToken: cancellationToken);
}
}
if (!string.IsNullOrEmpty(PrependPath))
else
{
// Prepend tool paths to container's PATH
var fullPath = !string.IsNullOrEmpty(Container.ContainerRuntimePath) ? $"{PrependPath}:{Container.ContainerRuntimePath}" : PrependPath;
dockerCommandArgs.Add($"-e PATH=\"{fullPath}\"");
}
var dockerManager = HostContext.GetService<IDockerCommandManager>();
string dockerClientPath = dockerManager.DockerPath;
// CONTAINER
dockerCommandArgs.Add($"{Container.ContainerId}");
// Usage: docker exec [OPTIONS] CONTAINER COMMAND [ARG...]
IList<string> dockerCommandArgs = new List<string>();
dockerCommandArgs.Add($"exec");
// COMMAND
dockerCommandArgs.Add(fileName);
// [OPTIONS]
dockerCommandArgs.Add($"-i");
dockerCommandArgs.Add($"--workdir {workingDirectory}");
foreach (var env in environment)
{
// e.g. -e MY_SECRET maps the value into the exec'ed process without exposing
// the value directly in the command
dockerCommandArgs.Add($"-e {env.Key}");
}
if (!string.IsNullOrEmpty(PrependPath))
{
// Prepend tool paths to container's PATH
var fullPath = !string.IsNullOrEmpty(Container.ContainerRuntimePath) ? $"{PrependPath}:{Container.ContainerRuntimePath}" : PrependPath;
dockerCommandArgs.Add($"-e PATH=\"{fullPath}\"");
}
// [ARG...]
dockerCommandArgs.Add(arguments);
// CONTAINER
dockerCommandArgs.Add($"{Container.ContainerId}");
string dockerCommandArgstring = string.Join(" ", dockerCommandArgs);
// COMMAND
dockerCommandArgs.Add(fileName);
// make sure all env are using container path
foreach (var envKey in environment.Keys.ToList())
{
environment[envKey] = this.Container.TranslateToContainerPath(environment[envKey]);
}
// [ARG...]
dockerCommandArgs.Add(arguments);
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
processInvoker.OutputDataReceived += OutputDataReceived;
processInvoker.ErrorDataReceived += ErrorDataReceived;
string dockerCommandArgstring = string.Join(" ", dockerCommandArgs);
// make sure all env are using container path
foreach (var envKey in environment.Keys.ToList())
{
environment[envKey] = this.Container.TranslateToContainerPath(environment[envKey]);
}
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
processInvoker.OutputDataReceived += OutputDataReceived;
processInvoker.ErrorDataReceived += ErrorDataReceived;
#if OS_WINDOWS
// It appears that node.exe outputs UTF8 when not in TTY mode.
outputEncoding = Encoding.UTF8;
#else
// Let .NET choose the default.
outputEncoding = null;
// Let .NET choose the default.
outputEncoding = null;
#endif
return await processInvoker.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Work),
fileName: dockerClientPath,
arguments: dockerCommandArgstring,
environment: environment,
requireExitCodeZero: requireExitCodeZero,
outputEncoding: outputEncoding,
killProcessOnCancel: killProcessOnCancel,
redirectStandardIn: null,
inheritConsoleHandler: inheritConsoleHandler,
cancellationToken: cancellationToken);
return await processInvoker.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Work),
fileName: dockerClientPath,
arguments: dockerCommandArgstring,
environment: environment,
requireExitCodeZero: requireExitCodeZero,
outputEncoding: outputEncoding,
killProcessOnCancel: killProcessOnCancel,
redirectStandardIn: null,
inheritConsoleHandler: inheritConsoleHandler,
cancellationToken: cancellationToken);
}
}
}
}

View File

@@ -55,7 +55,7 @@ namespace GitHub.Runner.Worker
ArgUtil.NotNull(message, nameof(message));
// Create a new timeline record for 'Set up job'
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Set up job", $"{nameof(JobExtension)}_Init", null, null, ActionRunStage.Pre);
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Set up job", $"{nameof(JobExtension)}_Init", null, null);
List<IStep> preJobSteps = new List<IStep>();
List<IStep> jobSteps = new List<IStep>();
@@ -142,12 +142,6 @@ namespace GitHub.Runner.Worker
Trace.Error(ex);
}
var secretSource = context.GetGitHubContext("secret_source");
if (!string.IsNullOrEmpty(secretSource))
{
context.Output($"Secret source: {secretSource}");
}
var repoFullName = context.GetGitHubContext("repository");
ArgUtil.NotNull(repoFullName, nameof(repoFullName));
context.Debug($"Primary repository: {repoFullName}");
@@ -312,13 +306,13 @@ namespace GitHub.Runner.Worker
JobExtensionRunner extensionStep = step as JobExtensionRunner;
ArgUtil.NotNull(extensionStep, extensionStep.DisplayName);
Guid stepId = Guid.NewGuid();
extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, null, null, stepId.ToString("N"), ActionRunStage.Pre);
extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, null, null, stepId.ToString("N"));
}
else if (step is IActionRunner actionStep)
{
ArgUtil.NotNull(actionStep, step.DisplayName);
Guid stepId = Guid.NewGuid();
actionStep.ExecutionContext = jobContext.CreateChild(stepId, actionStep.DisplayName, stepId.ToString("N"), null, null, ActionRunStage.Pre, intraActionStates[actionStep.Action.Id]);
actionStep.ExecutionContext = jobContext.CreateChild(stepId, actionStep.DisplayName, stepId.ToString("N"), null, null, intraActionStates[actionStep.Action.Id]);
}
}
@@ -329,7 +323,7 @@ namespace GitHub.Runner.Worker
{
ArgUtil.NotNull(actionStep, step.DisplayName);
intraActionStates.TryGetValue(actionStep.Action.Id, out var intraActionState);
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, null, actionStep.Action.ContextName, ActionRunStage.Main, intraActionState);
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, null, actionStep.Action.ContextName, intraActionState);
}
}
@@ -400,7 +394,7 @@ namespace GitHub.Runner.Worker
ArgUtil.NotNull(jobContext, nameof(jobContext));
// create a new timeline record node for 'Finalize job'
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Complete job", $"{nameof(JobExtension)}_Final", null, null, ActionRunStage.Post);
IExecutionContext context = jobContext.CreateChild(Guid.NewGuid(), "Complete job", $"{nameof(JobExtension)}_Final", null, null);
using (var register = jobContext.CancellationToken.Register(() => { context.CancelToken(); }))
{
try

View File

@@ -106,7 +106,6 @@ namespace GitHub.Runner.Worker
}
jobContext.SetRunnerContext("os", VarUtil.OS);
jobContext.SetRunnerContext("arch", VarUtil.OSArchitecture);
var runnerSettings = HostContext.GetService<IConfigurationStore>().GetSettings();
jobContext.SetRunnerContext("name", runnerSettings.AgentName);

View File

@@ -1,14 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
<OutputType>Exe</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<AssetTargetFallback>portable-net45+win8</AssetTargetFallback>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>
<PredefinedCulturesOnly>false</PredefinedCulturesOnly>
<PublishReadyToRunComposite>true</PublishReadyToRunComposite>
<TieredCompilationQuickJit>true</TieredCompilationQuickJit>
<PublishReadyToRun>true</PublishReadyToRun>
</PropertyGroup>
<ItemGroup>

View File

@@ -22,13 +22,12 @@ namespace GitHub.Runner.Worker
{
private readonly TimeSpan _workerStartTimeout = TimeSpan.FromSeconds(30);
private ManualResetEvent _completedCommand = new ManualResetEvent(false);
// Do not mask the values of these secrets
private static HashSet<String> SecretVariableMaskWhitelist = new HashSet<String>(StringComparer.OrdinalIgnoreCase)
{
private static HashSet<String> SecretVariableMaskWhitelist = new HashSet<String>(StringComparer.OrdinalIgnoreCase){
Constants.Variables.Actions.StepDebug,
Constants.Variables.Actions.RunnerDebug
};
};
public async Task<int> RunAsync(string pipeIn, string pipeOut)
{
@@ -139,10 +138,10 @@ namespace GitHub.Runner.Worker
HostContext.SecretMasker.AddValue(value);
// Also add each individual line. Typically individual lines are processed from STDOUT of child processes.
var split = value.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
var split = value.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries);
foreach (var item in split)
{
HostContext.SecretMasker.AddValue(item);
HostContext.SecretMasker.AddValue(item.Trim());
}
}
}

View File

@@ -46,7 +46,7 @@
"runs": {
"one-of": [
"container-runs",
"node-runs",
"node12-runs",
"plugin-runs",
"composite-runs"
]
@@ -80,7 +80,7 @@
"loose-value-type": "string"
}
},
"node-runs": {
"node12-runs": {
"mapping": {
"properties": {
"using": "non-empty-string",
@@ -112,10 +112,10 @@
"item-type": "composite-step"
}
},
"composite-step": {
"composite-step":{
"one-of": [
"run-step",
"uses-step"
"uses-step"
]
},
"run-step": {
@@ -123,7 +123,6 @@
"properties": {
"name": "string-steps-context",
"id": "non-empty-string",
"if": "step-if",
"run": {
"type": "string-steps-context",
"required": true
@@ -142,7 +141,6 @@
"properties": {
"name": "string-steps-context",
"id": "non-empty-string",
"if": "step-if",
"uses": {
"type": "non-empty-string",
"required": true
@@ -218,24 +216,6 @@
"loose-value-type": "string"
}
},
"step-if": {
"context": [
"github",
"inputs",
"strategy",
"matrix",
"steps",
"job",
"runner",
"env",
"always(0,0)",
"failure(0,0)",
"cancelled(0,0)",
"success(0,0)",
"hashFiles(1,255)"
],
"string": {}
},
"step-with": {
"context": [
"github",
@@ -254,4 +234,4 @@
}
}
}
}
}

View File

@@ -28,6 +28,14 @@ namespace GitHub.Services.Common
}
}
public IDictionary<string, object> Properties
{
get
{
return m_request.Properties;
}
}
IEnumerable<String> IHttpHeaders.GetValues(String name)
{
IEnumerable<String> values;

View File

@@ -14,5 +14,10 @@ namespace GitHub.Services.Common
{
get;
}
IDictionary<string, object> Properties
{
get;
}
}
}

View File

@@ -21,7 +21,7 @@ namespace GitHub.Services.Common.Diagnostics
public static VssTraceActivity GetActivity(this HttpRequestMessage message)
{
Object traceActivity;
if (!message.Options.TryGetValue(VssTraceActivity.PropertyName, out traceActivity))
if (!message.Properties.TryGetValue(VssTraceActivity.PropertyName, out traceActivity))
{
return VssTraceActivity.Empty;
}

View File

@@ -169,7 +169,7 @@ namespace GitHub.Services.Common
}
// Add ourselves to the message so the underlying token issuers may use it if necessary
request.Options.Set(new HttpRequestOptionsKey<VssHttpMessageHandler>(VssHttpMessageHandler.PropertyName), this);
request.Properties[VssHttpMessageHandler.PropertyName] = this;
Boolean succeeded = false;
Boolean lastResponseDemandedProxyAuth = false;
@@ -409,7 +409,7 @@ namespace GitHub.Services.Common
// Read the completion option provided by the caller. If we don't find the property then we
// assume it is OK to buffer by default.
HttpCompletionOption completionOption;
if (!request.Options.TryGetValue(VssHttpRequestSettings.HttpCompletionOptionPropertyName, out completionOption))
if (!request.Properties.TryGetValue(VssHttpRequestSettings.HttpCompletionOptionPropertyName, out completionOption))
{
completionOption = HttpCompletionOption.ResponseContentRead;
}

View File

@@ -77,9 +77,9 @@ namespace GitHub.Services.Common
public static void SetTraceInfo(HttpRequestMessage message, VssHttpMessageHandlerTraceInfo traceInfo)
{
object existingTraceInfo;
if (!message.Options.TryGetValue(TfsTraceInfoKey, out existingTraceInfo))
if (!message.Properties.TryGetValue(TfsTraceInfoKey, out existingTraceInfo))
{
message.Options.Set(new HttpRequestOptionsKey<VssHttpMessageHandlerTraceInfo>(TfsTraceInfoKey), traceInfo);
message.Properties.Add(TfsTraceInfoKey, traceInfo);
}
}
@@ -92,7 +92,7 @@ namespace GitHub.Services.Common
{
VssHttpMessageHandlerTraceInfo traceInfo = null;
if (message.Options.TryGetValue(TfsTraceInfoKey, out object traceInfoObject))
if (message.Properties.TryGetValue(TfsTraceInfoKey, out object traceInfoObject))
{
traceInfo = traceInfoObject as VssHttpMessageHandlerTraceInfo;
}

Some files were not shown because too many files have changed in this diff Show More