Compare commits

..

33 Commits

Author SHA1 Message Date
Tingluo Huang
c645de9aee Delete docker-image.yml 2021-12-13 23:25:57 -05:00
TingluoHuang
0e4f76ec4e . 2021-10-28 22:59:19 -04:00
TingluoHuang
af18df4621 . 2021-10-28 21:39:55 -04:00
TingluoHuang
5215d95637 . 2021-10-28 21:31:52 -04:00
TingluoHuang
e750eb7e38 . 2021-10-28 21:20:48 -04:00
TingluoHuang
ca1f621077 . 2021-10-28 21:07:52 -04:00
TingluoHuang
80d0b58f3c . 2021-10-28 19:57:37 -04:00
TingluoHuang
11ff2be7e9 . 2021-10-28 19:48:42 -04:00
TingluoHuang
3ce763338d . 2021-10-28 19:40:25 -04:00
TingluoHuang
a45c0278e6 . 2021-10-28 19:18:21 -04:00
TingluoHuang
658d36c1bc . 2021-10-28 19:01:29 -04:00
TingluoHuang
ca3b803237 . 2021-10-28 18:50:44 -04:00
TingluoHuang
4fa691f73e . 2021-10-28 18:40:09 -04:00
TingluoHuang
dfcfae49e5 . 2021-10-14 16:57:24 -04:00
TingluoHuang
1235dc1cea . 2021-10-14 16:53:58 -04:00
TingluoHuang
cc0d0bed90 . 2021-10-14 16:49:54 -04:00
TingluoHuang
0fac863568 . 2021-10-14 16:46:45 -04:00
TingluoHuang
42e7359f5c . 2021-10-14 16:40:42 -04:00
TingluoHuang
5639175ecb . 2021-10-14 16:33:36 -04:00
TingluoHuang
7128998d77 . 2021-10-14 16:29:59 -04:00
TingluoHuang
f37e9f80a6 . 2021-10-14 15:33:18 -04:00
TingluoHuang
0fa08423d2 . 2021-10-14 15:28:59 -04:00
TingluoHuang
029106a1dc . 2021-10-14 15:11:17 -04:00
TingluoHuang
493a2a0bf7 . 2021-10-14 15:02:29 -04:00
TingluoHuang
43f983486e . 2021-10-14 14:59:34 -04:00
TingluoHuang
f6053b616c . 2021-10-14 14:55:12 -04:00
TingluoHuang
4f4608b710 . 2021-10-14 14:52:45 -04:00
TingluoHuang
28686c40d2 . 2021-10-14 14:50:28 -04:00
TingluoHuang
ce1679bb6f . 2021-10-14 14:48:42 -04:00
TingluoHuang
0a7611b0b5 podman 2021-10-14 14:45:09 -04:00
TingluoHuang
b3fee33a92 ref_* context. 2021-10-13 09:58:07 -04:00
Ferenc Hammerl
d83ef5549e Keep env vars alphabetical 2021-10-13 09:57:37 -04:00
TingluoHuang
fe6719d120 c 2021-10-13 09:57:37 -04:00
193 changed files with 31785 additions and 10012 deletions

View File

@@ -1,18 +1,12 @@
---
name: 🛑 Report a bug in the runner application
about: If you have issues with GitHub Actions, please follow the "support for GitHub Actions" link, below.
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
<!--
👋 You're opening a bug report against the GitHub Actions **runner application**.
🛑 Please stop if you're not certain that the bug you're seeing is in the runner application - if you have general problems with actions, workflows, or runners, please see the [GitHub Community Support Forum](https://github.community/c/code-to-cloud/52) which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃
-->
**Describe the bug**
A clear and concise description of what the bug is.

View File

@@ -1,11 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: ✅ Support for GitHub Actions
url: https://github.community/c/code-to-cloud/52
about: If you have questions about GitHub Actions or need support writing workflows, please ask in the GitHub Community Support forum.
- name: ✅ Feedback and suggestions for GitHub Actions
url: https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback
about: If you have feedback or suggestions about GitHub Actions, please open a discussion (or add to an existing one) in the GitHub Actions Feedback. GitHub Actions Product Managers and Engineers monitor the feedback forum.
- name: ‼️ GitHub Security Bug Bounty
url: https://bounty.github.com/
about: Please report security vulnerabilities here.

View File

@@ -1,24 +1,19 @@
---
name: 🛑 Request a feature in the runner application
about: If you have feature requests for GitHub Actions, please use the "feedback and suggestions for GitHub Actions" link below.
name: Feature Request
about: Create a request to help us improve
title: ''
labels: enhancement
assignees: ''
---
<!--
👋 You're opening a request for an enhancement in the GitHub Actions **runner application**.
Thank you 🙇‍♀ for wanting to create a feature in this repository. Before you do, please ensure you are filing the issue in the right place. Issues should only be opened on if the issue **relates to code in this repository**.
🛑 Please stop if you're not certain that the feature you want is in the runner application - if you have a suggestion for improving GitHub Actions, please see the [GitHub Actions Feedback](https://github.com/github/feedback/discussions/categories/actions-and-packages-feedback) discussion forum which is actively monitored. Using the forum ensures that we route your problem to the correct team. 😃
Some additional useful links:
* If you have found a security issue [please submit it here](https://hackerone.com/github)
* If you have questions or issues with the service, writing workflows or actions, then please [visit the GitHub Community Forum's Actions Board](https://github.community/t5/GitHub-Actions/bd-p/actions)
* If you are having an issue or have a question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
* If you are having an issue or question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
If you have a feature request that is relevant to this repository, the runner, then please include the information below:
-->
**Describe the enhancement**
A clear and concise description of what the features or enhancement you need.

View File

@@ -37,12 +37,12 @@ jobs:
devScript: ./dev.sh
- runtime: win-x64
os: windows-2019
os: windows-latest
devScript: ./dev
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v1
# Build runner layout
- name: Build & Layout Release
@@ -50,29 +50,6 @@ jobs:
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
working-directory: src
# Check runtime/externals hash
- name: Compute/Compare runtime and externals Hash
shell: bash
run: |
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
echo "Current Externals hash result: $EXTERNALS_HASH"
NeedUpdate=0
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
NeedUpdate=1
fi
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
NeedUpdate=1
fi
exit $NeedUpdate
env:
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
# Run tests
- name: L0
run: |
@@ -90,11 +67,7 @@ jobs:
# Upload runner package tar.gz/zip as artifact
- name: Publish Artifact
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v2
uses: actions/upload-artifact@v1
with:
name: runner-package-${{ matrix.runtime }}
path: |
_package
_package_trims/trim_externals
_package_trims/trim_runtime
_package_trims/trim_runtime_externals
path: _package

View File

@@ -1,12 +1,7 @@
name: "Code Scanning - Action"
permissions:
security-events: write
on:
push:
branches:
- main
pull_request:
schedule:
- cron: '0 0 * * 0'

View File

@@ -51,21 +51,6 @@ jobs:
linux-arm-sha: ${{ steps.sha.outputs.linux-arm-sha256 }}
win-x64-sha: ${{ steps.sha.outputs.win-x64-sha256 }}
osx-x64-sha: ${{ steps.sha.outputs.osx-x64-sha256 }}
linux-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-x64-sha256 }}
linux-arm64-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm64-sha256 }}
linux-arm-sha-noexternals: ${{ steps.sha_noexternals.outputs.linux-arm-sha256 }}
win-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.win-x64-sha256 }}
osx-x64-sha-noexternals: ${{ steps.sha_noexternals.outputs.osx-x64-sha256 }}
linux-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-x64-sha256 }}
linux-arm64-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm64-sha256 }}
linux-arm-sha-noruntime: ${{ steps.sha_noruntime.outputs.linux-arm-sha256 }}
win-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.win-x64-sha256 }}
osx-x64-sha-noruntime: ${{ steps.sha_noruntime.outputs.osx-x64-sha256 }}
linux-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-x64-sha256 }}
linux-arm64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm64-sha256 }}
linux-arm-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.linux-arm-sha256 }}
win-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.win-x64-sha256 }}
osx-x64-sha-noruntime-noexternals: ${{ steps.sha_noruntime_noexternals.outputs.osx-x64-sha256 }}
strategy:
matrix:
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
@@ -87,12 +72,12 @@ jobs:
devScript: ./dev.sh
- runtime: win-x64
os: windows-2019
os: windows-latest
devScript: ./dev
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v1
# Build runner layout
- name: Build & Layout Release
@@ -114,6 +99,14 @@ jobs:
${{ matrix.devScript }} package Release ${{ matrix.runtime }}
working-directory: src
# Upload runner package tar.gz/zip as artifact.
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
- name: Publish Artifact
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v1
with:
name: runner-packages
path: _package
# compute shas and set as job outputs to use in release notes
- run: brew install coreutils #needed for shasum util
if: ${{ matrix.os == 'macOS-latest' }}
@@ -127,91 +120,6 @@ jobs:
id: sha
name: Compute SHA256
working-directory: _package
- run: |
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noexternals
name: Compute SHA256
working-directory: _package_trims/trim_externals
- run: |
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noruntime
name: Compute SHA256
working-directory: _package_trims/trim_runtime
- run: |
file=$(ls)
sha=$(sha256sum $file | awk '{ print $1 }')
echo "Computed sha256: $sha for $file"
echo "::set-output name=${{matrix.runtime}}-sha256::$sha"
echo "::set-output name=sha256::$sha"
shell: bash
id: sha_noruntime_noexternals
name: Compute SHA256
working-directory: _package_trims/trim_runtime_externals
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
if: matrix.runtime == 'win-x64'
uses: actions/github-script@0.3.0
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const core = require('@actions/core')
const fs = require('fs');
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_zip.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
console.log(trimmedPackages)
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
- name: Create trimmedpackages.json for ${{ matrix.runtime }}
if: matrix.runtime != 'win-x64'
uses: actions/github-script@0.3.0
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const core = require('@actions/core')
const fs = require('fs');
const runnerVersion = fs.readFileSync('src/runnerversion', 'utf8').replace(/\n$/g, '')
var trimmedPackages = fs.readFileSync('src/Misc/trimmedpackages_targz.json', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion).replace(/<RUNNER_PLATFORM>/g, '${{ matrix.runtime }}')
trimmedPackages = trimmedPackages.replace(/<RUNTIME_HASH>/g, '${{hashFiles('**/_layout_trims/runtime/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<EXTERNALS_HASH>/g, '${{hashFiles('**/_layout_trims/externals/**/*')}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_EXTERNALS_HASH>/g, '${{steps.sha_noruntime_noexternals.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_RUNTIME_HASH>/g, '${{steps.sha_noruntime.outputs.sha256}}')
trimmedPackages = trimmedPackages.replace(/<NO_EXTERNALS_HASH>/g, '${{steps.sha_noexternals.outputs.sha256}}')
console.log(trimmedPackages)
fs.writeFileSync('${{ matrix.runtime }}-trimmedpackages.json', trimmedPackages)
# Upload runner package tar.gz/zip as artifact.
# Since each package name is unique, so we don't need to put ${{matrix}} info into artifact name
- name: Publish Artifact
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v2
with:
name: runner-packages
path: |
_package
_package_trims/trim_externals
_package_trims/trim_runtime
_package_trims/trim_runtime_externals
${{ matrix.runtime }}-trimmedpackages.json
release:
needs: build
runs-on: ubuntu-latest
@@ -242,35 +150,9 @@ jobs:
releaseNote = releaseNote.replace(/<LINUX_X64_SHA>/g, '${{needs.build.outputs.linux-x64-sha}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA>/g, '${{needs.build.outputs.linux-arm-sha}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA>/g, '${{needs.build.outputs.linux-arm64-sha}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noexternals}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.win-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.osx-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-x64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm-sha-noruntime}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime}}')
releaseNote = releaseNote.replace(/<WIN_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.win-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<OSX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.osx-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_X64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-x64-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm-sha-noruntime-noexternals}}')
releaseNote = releaseNote.replace(/<LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS>/g, '${{needs.build.outputs.linux-arm64-sha-noruntime-noexternals}}')
console.log(releaseNote)
core.setOutput('version', runnerVersion);
core.setOutput('note', releaseNote);
- name: Validate Packages HASH
working-directory: _package
run: |
ls -l
echo "${{needs.build.outputs.win-x64-sha}} actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip" | shasum -a 256 -c
echo "${{needs.build.outputs.osx-x64-sha}} actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-x64-sha}} actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-arm-sha}} actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
echo "${{needs.build.outputs.linux-arm64-sha}} actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz" | shasum -a 256 -c
# Create GitHub release
- uses: actions/create-release@master
id: createRelease
@@ -283,14 +165,14 @@ jobs:
body: |
${{ steps.releaseNote.outputs.note }}
# Upload release assets (full runner packages)
# Upload release assets
- name: Upload Release Asset (win-x64)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_content_type: application/octet-stream
@@ -300,7 +182,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
@@ -310,7 +192,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
@@ -320,7 +202,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
@@ -330,210 +212,6 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trim externals)
- name: Upload Release Asset (win-x64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noexternals.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noexternals.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trim runtime)
- name: Upload Release Asset (win-x64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-noruntime)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trim runtime and externals)
- name: Upload Release Asset (win-x64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-noruntime-noexternals)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/_package_trims/trim_runtime_externals/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-noruntime-noexternals.tar.gz
asset_content_type: application/octet-stream
# Upload release assets (trimmedpackages.json)
- name: Upload Release Asset (win-x64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/win-x64-trimmedpackages.json
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/linux-x64-trimmedpackages.json
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/osx-x64-trimmedpackages.json
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/linux-arm-trimmedpackages.json
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64-trimmedpackages.json)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/linux-arm64-trimmedpackages.json
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}-trimmedpackages.json
asset_content_type: application/octet-stream

2
.gitignore vendored
View File

@@ -19,9 +19,7 @@
node_modules
_downloads
_layout
_layout_trims
_package
_package_trims
_dotnetsdk
TestResults
TestLogs

6
.vscode/launch.json vendored
View File

@@ -13,7 +13,6 @@
"cwd": "${workspaceFolder}/src",
"console": "integratedTerminal",
"requireExactSource": false,
"targetArchitecture": "x86_64"
},
{
"name": "Run",
@@ -26,7 +25,6 @@
"cwd": "${workspaceFolder}/src",
"console": "integratedTerminal",
"requireExactSource": false,
"targetArchitecture": "x86_64"
},
{
"name": "Configure",
@@ -40,7 +38,6 @@
"cwd": "${workspaceFolder}/src",
"console": "integratedTerminal",
"requireExactSource": false,
"targetArchitecture": "x86_64"
},
{
"name": "Debug Worker",
@@ -48,7 +45,6 @@
"request": "attach",
"processName": "Runner.Worker",
"requireExactSource": false,
"targetArchitecture": "x86_64"
},
{
"name": "Attach Debugger",
@@ -56,8 +52,6 @@
"request": "attach",
"processId": "${command:pickProcess}",
"requireExactSource": false,
"targetArchitecture": "x86_64"
},
],
}

View File

@@ -5,6 +5,7 @@
# GitHub Actions Runner
[![Actions Status](https://github.com/actions/runner/workflows/Runner%20CI/badge.svg)](https://github.com/actions/runner/actions)
[![Runner E2E Test](https://github.com/actions/runner/workflows/Runner%20E2E%20Test/badge.svg)](https://github.com/actions/runner/actions)
The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the [hosted virtual environments](https://github.com/actions/virtual-environments), or you can [self-host the runner](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners) in your own environment.

View File

@@ -10,7 +10,7 @@ Compilation failures during a CI build should surface good error messages.
For example, the actual compile errors from the typescript compiler should bubble as issues in the UI. And not simply "tsc exited with exit code 1".
VSCode has an extensible model for solving this type of problem. VSCode allows users to configure which [problems matchers](https://code.visualstudio.com/docs/editor/tasks#_defining-a-problem-matcher) to use, when scanning output. For example, a user can apply the `tsc` problem matcher to receive a rich error output experience in VSCode, when compiling their typescript project.
VSCode has an extensible model for solving this type of problem. VSCode allows users to configure which problems matchers to use, when scanning output. For example, a user can apply the `tsc` problem matcher to receive a rich error output experience in VSCode, when compiling their typescript project.
The problem-matcher concept fits well with "setup" actions. For example, the `setup-nodejs` action will download node.js, add it to the PATH, and register the `tsc` problem matcher. For the duration of the job, the `tsc` problem matcher will be applied against the output.
@@ -18,23 +18,21 @@ The problem-matcher concept fits well with "setup" actions. For example, the `se
### Registration
#### Using `::` command
#### Using `##` command
`::add-matcher::path-to-problem-matcher-config.json`
`##[add-matcher]path-to-problem-matcher-config.json`
Using a `::` command allows for flexibility:
Using a `##` command allows for flexibility:
- Ad hoc scripts can register problem matchers
- Allows problem matchers to be conditionally registered
Note, if a matcher with the same name is registered a second time, it will clobber the first instance.
Note, at some point the syntax changed from `##` to `::`.
#### Unregister using `::` command
#### Unregister using `##` command
A way out for rare cases where scoping is a problem.
`::remove-matcher::owner`
`##[remove-matcher]owner`
For this to be usable, the `owner` needs to be discoverable. Therefore, debug print the owner on registration.
@@ -106,7 +104,7 @@ message: ; expected
fromPath: C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj
```
Additionally the line will appear red in the web UI (prefix with `::error`).
Additionally the line will appear red in the web UI (prefix with `##[error]`).
Note, an error does not imply task failure. Exit codes communicate failure.

View File

@@ -24,7 +24,7 @@ The runner will look for a file `.setup_info` under the runner's root directory,
}
]
```
The runner will use `::group` and `::endgroup` to fold all detail info into an expandable group.
The runner will use `##[group]` and `##[endgroup]` to fold all detail info into an expandable group.
Both [virtual-environments](https://github.com/actions/virtual-environments) and self-hosted runners can use this mechanism to add extra logging info to the `Set up job` step's log.

View File

@@ -1,71 +0,0 @@
# ADR 1438: Support Conditionals In Composite Actions
**Date**: 2021-10-13
**Status**: Accepted
## Context
We recently shipped composite actions, which allows you to reuse individual steps inside an action.
However, one of the [most requested features](https://github.com/actions/runner/issues/834) has been a way to support the `if` keyword.
### Goals
- We want to keep consistent with current behavior
- We want to support conditionals via the `if` keyword
- Our built in functions like `success` should be implementable without calling them, for example you can do `job.status == success` rather then `success()` currently.
### How does composite currently work?
Currently, we have limited conditional support in composite actions for `pre` and `post` steps.
These are based on the `job status`, and support keywords like `always()`, `failed()`, `success()` and `cancelled()`.
However, generic or main steps do **not** support conditionals.
By default, in a regular workflow, a step runs on the `success()` condition. Which looks at the **job** **status**, sees if it is successful and runs.
By default, in a composite action, main steps run until a single step fails in that composite action, then the composite action is halted early. It does **not** care about the job status.
Pre, and post steps in composite actions use the job status to determine if they should run.
### How do we go forward?
Well, if we think about what composite actions are currently doing when invoking main steps, they are checking if the current composite action is successful.
Lets formalize that concept into a "real" idea.
- We will add an `action_status` field to the github context to mimic the [job's context status](https://docs.github.com/en/actions/learn-github-actions/contexts#job-context).
- We have an existing concept that does this `action_path` which is only set for composite actions on the github context.
- In a composite action during a main step, the `success()` function will check if `action_status == success`, rather then `job_status == success`. Failure will work the same way.
- Pre and post steps in composite actions will not change, they will continue to check the job status.
### Nested Scenario
For nested composite actions, we will follow the existing behavior, you only care about your current composite action, not any parents.
For example, lets imagine a scenario with a simple nested composite action
```
- Job
- Regular Step
- Composite Action
- runs: exit 1
- if: always()
uses: A child composite action
- if: success()
runs: echo "this should print"
- runs: echo "this should also print"
- if: success()
runs: echo "this will not print as the current composite action has failed already"
```
The child composite actions steps should run in this example, the child composite action has not yet failed, so it should run all steps until a step fails. This is consistent with how a composite action currently works in production if the main job fails but a composite action is invoked with `if:always()` or `if: failure()`
### Other options explored
We could add the `current_step_status` to the job context rather then `__status` to the steps context, however this comes with two major downsides:
- We need to support the field for every type of step, because its non trivial to remove a field from the job context once it has been added (its readonly)
- For all actions besides composite it would only every be `success`
- Its weird to have a `current_step` value on the job context
- We also explored a `__status` on the steps context.
- The `__` is required to prevent us from colliding with a step with id: status
- This felt wrong because the naming was not smooth, and did not fit into current conventions.
### Consequences
- github context has a new field for the status of the current composite action.
- We support conditional's in composite actions
- We keep the existing behavior for all users, but allow them to expand that functionality.

View File

@@ -6,35 +6,13 @@
Make sure the runner has access to actions service for GitHub.com or GitHub Enterprise Server
- For GitHub.com
- The runner needs to access `https://api.github.com` for downloading actions.
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
These can by tested by running the following `curl` commands from your self-hosted runner machine:
```
curl -v https://api.github.com/api/v3/zen
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
curl -v https://pipelines.actions.githubusercontent/_apis/health
```
- The runner needs to access https://api.github.com for downloading actions.
- The runner needs to access https://vstoken.actions.githubusercontent.com/_apis/.../ for requesting an access token.
- The runner needs to access https://pipelines.actions.githubusercontent.com/_apis/.../ for receiving workflow jobs.
- For GitHub Enterprise Server
- The runner needs to access `https://[hostname]/api/v3` for downloading actions.
- The runner needs to access `https://[hostname]/_services/vstoken/_apis/.../` for requesting an access token.
- The runner needs to access `https://[hostname]/_services/pipelines/_apis/.../` for receiving workflow jobs.
These can by tested by running the following `curl` commands from your self-hosted runner machine, replacing `[hostname]` with the hostname of your appliance, for instance `github.example.com`:
```
curl -v https://[hostname]/api/v3/zen
curl -v https://[hostname]/_services/vstoken/_apis/health
curl -v https://[hostname]/_services/pipelines/_apis/health
```
A common cause of this these connectivity issues is if your to GitHub Enterprise Server appliance is using [the self-signed certificate that is enabled the first time](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls) your appliance is started. As self-signed certificates are not trusted by web browsers and Git clients, these clients (including the GitHub Actions runner) will report certificate warnings.
We recommend [upload a certificate signed by a trusted authority](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls) to GitHub Enterprise Server, or enabling the built-in ][Let's Encrypt support](https://docs.github.com/en/enterprise-server/admin/configuration/configuring-network-settings/configuring-tls).
- The runner needs to access https://myGHES.com/api/v3 for downloading actions.
- The runner needs to access https://myGHES.com/_services/vstoken/_apis/.../ for requesting an access token.
- The runner needs to access https://myGHES.com/_services/pipelines/_apis/.../ for receiving workflow jobs.
## What is checked?
@@ -64,4 +42,4 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
## Still not working?
Contact [GitHub Support](https://support.github.com] if you have further questuons, or log an issue at https://github.com/actions/runner if you think it's a runner issue.
Contact GitHub customer service or log an issue at https://github.com/actions/runner if you think it's a runner issue.

View File

@@ -4,9 +4,9 @@
Make sure the built-in node.js has access to GitHub.com or GitHub Enterprise Server.
The runner carries its own copy of node.js executable under `<runner_root>/externals/node16/`.
The runner carries it's own copy of node.js executable under `<runner_root>/externals/node12/`.
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node16/`.
All javascript base Actions will get executed by the built-in `node` at `<runner_root>/externals/node12/`.
> Not the `node` from `$PATH`

View File

@@ -23,10 +23,6 @@ An ADR is an Architectural Decision Record. This allows consensus on the direct
![Win](res/win_sm.png) ![*nix](res/linux_sm.png) Git for Windows and Linux [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
![*nix](res/linux_sm.png) cURL [Install here](https://curl.se/download.html) (needed for external sh script)
![Win](res/win_sm.png) Visual Studio 2017 or newer [Install here](https://visualstudio.microsoft.com) (needed for dev sh script)
## Quickstart: Run a job from a real repository
If you just want to get from building the sourcecode to using it to execute an action, you will need:

Binary file not shown.

Before

Width:  |  Height:  |  Size: 138 KiB

After

Width:  |  Height:  |  Size: 158 KiB

View File

@@ -23,8 +23,8 @@ You might see something like this which indicate a dependency's missing.
./config.sh
libunwind.so.8 => not found
libunwind-x86_64.so.8 => not found
Dependencies is missing for Dotnet Core 6.0
Execute ./bin/installdependencies.sh to install any missing Dotnet Core 6.0 dependencies.
Dependencies is missing for Dotnet Core 3.0
Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies.
```
You can easily correct the problem by executing `./bin/installdependencies.sh`.
The `installdependencies.sh` script should install all required dependencies on all supported Linux versions

74
job.yml Normal file
View File

@@ -0,0 +1,74 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: pod-admin
namespace: default
rules:
- apiGroups: [""]
resources: ["pods", "pods/log", "pods/attach", "pods/exec"]
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: default-pod-admin
namespace: default
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: pod-admin
subjects:
- kind: ServiceAccount
name: default
namespace: default
---
apiVersion: batch/v1
kind: Job
metadata:
namespace: default
name: actions-runners
spec:
template:
spec:
# hostNetwork: true
volumes:
- name: runner-working
emptyDir: {}
containers:
- name: k8srunner
image: huangtingluo/kube-runner:v0
imagePullPolicy: Always
volumeMounts:
- mountPath: /actions-runner/_work
name: runner-working
env:
- name: GITHUB_PAT
value: ghp_
- name: RUNNER_CONFIG_URL
value: https://github.com/bbq-beets/ting-test
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
fieldPath: spec.nodeName
- name: K8S_POD_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
- name: K8S_POD_NAMESPACE
valueFrom:
fieldRef:
fieldPath: metadata.namespace
- name: K8S_POD_IP
valueFrom:
fieldRef:
fieldPath: status.podIP
- name: K8S_POD_SERVICE_ACCOUNT
valueFrom:
fieldRef:
fieldPath: spec.serviceAccountName
restartPolicy: Never
backoffLimit: 1
completions: 1
parallelism: 1

View File

@@ -1,13 +1,11 @@
## Features
## Bugs
- Fixed a crash on runner startup (#1770)
- Fixed an issue where ephemeral runners did not restart after upgrading (#1396)
## Misc
- Clarified the type of step running when running job started or completed hooks (#1769)
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
@@ -78,21 +76,3 @@ The SHA-256 checksums for the packages included in this build are shown below:
- actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-x64 --><LINUX_X64_SHA><!-- END SHA linux-x64 -->
- actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm64 --><LINUX_ARM64_SHA><!-- END SHA linux-arm64 -->
- actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz <!-- BEGIN SHA linux-arm --><LINUX_ARM_SHA><!-- END SHA linux-arm -->
- actions-runner-win-x64-<RUNNER_VERSION>-noexternals.zip <!-- BEGIN SHA win-x64_noexternals --><WIN_X64_SHA_NOEXTERNALS><!-- END SHA win-x64_noexternals -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noexternals --><OSX_X64_SHA_NOEXTERNALS><!-- END SHA osx-x64_noexternals -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noexternals --><LINUX_X64_SHA_NOEXTERNALS><!-- END SHA linux-x64_noexternals -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noexternals --><LINUX_ARM64_SHA_NOEXTERNALS><!-- END SHA linux-arm64_noexternals -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noexternals --><LINUX_ARM_SHA_NOEXTERNALS><!-- END SHA linux-arm_noexternals -->
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime.zip <!-- BEGIN SHA win-x64_noruntime --><WIN_X64_SHA_NORUNTIME><!-- END SHA win-x64_noruntime -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA osx-x64_noruntime --><OSX_X64_SHA_NORUNTIME><!-- END SHA osx-x64_noruntime -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-x64_noruntime --><LINUX_X64_SHA_NORUNTIME><!-- END SHA linux-x64_noruntime -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm64_noruntime --><LINUX_ARM64_SHA_NORUNTIME><!-- END SHA linux-arm64_noruntime -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime.tar.gz <!-- BEGIN SHA linux-arm_noruntime --><LINUX_ARM_SHA_NORUNTIME><!-- END SHA linux-arm_noruntime -->
- actions-runner-win-x64-<RUNNER_VERSION>-noruntime-noexternals.zip <!-- BEGIN SHA win-x64_noruntime_noexternals --><WIN_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA win-x64_noruntime_noexternals -->
- actions-runner-osx-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA osx-x64_noruntime_noexternals --><OSX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA osx-x64_noruntime_noexternals -->
- actions-runner-linux-x64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-x64_noruntime_noexternals --><LINUX_X64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-x64_noruntime_noexternals -->
- actions-runner-linux-arm64-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm64_noruntime_noexternals --><LINUX_ARM64_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm64_noruntime_noexternals -->
- actions-runner-linux-arm-<RUNNER_VERSION>-noruntime-noexternals.tar.gz <!-- BEGIN SHA linux-arm_noruntime_noexternals --><LINUX_ARM_SHA_NORUNTIME_NOEXTERNALS><!-- END SHA linux-arm_noruntime_noexternals -->

View File

@@ -13,7 +13,7 @@ set -e
flags_found=false
while getopts 's:g:n:r:u:l:' opt; do
while getopts 's:g:n:u:l:' opt; do
flags_found=true
case $opt in
@@ -26,9 +26,6 @@ while getopts 's:g:n:r:u:l:' opt; do
n)
runner_name=$OPTARG
;;
r)
runner_group=$OPTARG
;;
u)
svc_user=$OPTARG
;;
@@ -47,7 +44,6 @@ Usage:
-s required scope: repo (:owner/:repo) or org (:organization)
-g optional ghe_hostname: the fully qualified domain name of your GitHub Enterprise Server deployment
-n optional name of the runner, defaults to hostname
-r optional name of the runner group to add the runner to, defaults to the Default group
-u optional user svc will run as, defaults to current
-l optional list of labels (split by comma) applied on the runner"
exit 0
@@ -63,7 +59,6 @@ if ! "$flags_found"; then
runner_name=${3:-$(hostname)}
svc_user=${4:-$USER}
labels=${5}
runner_group=${6}
fi
# apply defaults
@@ -169,8 +164,8 @@ fi
echo
echo "Configuring ${runner_name} @ $runner_url"
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup \"$runner_group\"}"
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name ${labels:+--labels $labels} ${runner_group:+--runnergroup "$runner_group"}
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name --labels $labels"
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name --labels $labels
#---------------------------------------
# Configuring as a service

78
src/Dockerfile Normal file
View File

@@ -0,0 +1,78 @@
FROM mcr.microsoft.com/dotnet/sdk:3.1 AS Build
# ENV RUNNER_CONFIG_URL=""
# ENV GITHUB_PAT=""
# ENV RUNNER_NAME=""
# ENV RUNNER_GROUP=""
# ENV RUNNER_LABELS=""
# ENV GITHUB_RUNNER_SCOPE=""
# ENV GITHUB_SERVER_URL=""
# ENV GITHUB_API_URL=""
# ENV K8S_HOST_IP=""
RUN apt-get update --fix-missing \
&& apt-get install -y --no-install-recommends \
curl \
# jq \
# git \
apt-utils \
apt-transport-https \
unzip \
net-tools\
gnupg2\
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Install kubectl
# RUN curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \
# echo "deb https://apt.kubernetes.io/ kubernetes-xenial main" | tee -a /etc/apt/sources.list.d/kubernetes.list && \
# apt-get update && apt-get -y install --no-install-recommends kubectl
# Install docker
# RUN curl -fsSL https://get.docker.com -o get-docker.sh
# RUN sh get-docker.sh
# Allow runner to run as root
# ENV RUNNER_ALLOW_RUNASROOT=1
# Directory for runner to operate in
RUN mkdir /actions-runner
RUN mkdir /actions-runner/src
WORKDIR /actions-runner/src
COPY ./ /actions-runner/src
RUN /actions-runner/src/dev.sh l
FROM mcr.microsoft.com/dotnet/core/runtime-deps:3.1
ENV RUNNER_CONFIG_URL=""
ENV GITHUB_PAT=""
RUN apt-get update --fix-missing \
&& apt-get install -y --no-install-recommends \
curl \
# jq \
# git \
# apt-utils \
# apt-transport-https \
# unzip \
# net-tools\
gnupg2\
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Install kubectl
RUN curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \
echo "deb https://apt.kubernetes.io/ kubernetes-xenial main" | tee -a /etc/apt/sources.list.d/kubernetes.list && \
apt-get update && apt-get -y install --no-install-recommends kubectl
# Allow runner to run as root
ENV RUNNER_ALLOW_RUNASROOT=1
# Directory for runner to operate in
RUN mkdir /actions-runner
WORKDIR /actions-runner
COPY --from=Build /actions-runner/_layout /actions-runner
ENTRYPOINT ["./entrypoint.sh"]

View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@@ -0,0 +1,59 @@
{
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/es6"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
"sourceType": "module",
"project": "./tsconfig.json"
},
"rules": {
"eslint-comments/no-use": "off",
"import/no-namespace": "off",
"no-console": "off",
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/ban-ts-ignore": "error",
"camelcase": "off",
"@typescript-eslint/camelcase": "error",
"@typescript-eslint/class-name-casing": "error",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
"@typescript-eslint/no-extraneous-class": "error",
"@typescript-eslint/no-for-in-array": "error",
"@typescript-eslint/no-inferrable-types": "error",
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-object-literal-type-assertion": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/no-var-requires": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-interface": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true,
"jest/globals": true
}
}

View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@@ -0,0 +1,11 @@
{
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"semi": false,
"singleQuote": true,
"trailingComma": "none",
"bracketSpacing": false,
"arrowParens": "avoid",
"parser": "typescript"
}

View File

@@ -0,0 +1 @@
To update kubeInnerHandler under `Misc/layoutbin` run `npm install && npm run all`

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,36 @@
{
"name": "kubeInnerHandler",
"version": "1.0.0",
"description": "GitHub Actions",
"main": "lib/kubeInnerHandler.js",
"scripts": {
"build": "tsc",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"lint": "eslint src/**/*.ts",
"pack": "ncc build -o ../../layoutbin/kubeInnerHandler",
"all": "npm run build && npm run format && npm run lint && npm run pack"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/runner.git"
},
"keywords": [
"actions"
],
"author": "GitHub Actions",
"license": "MIT",
"dependencies": {
"@actions/exec": "^1.1.0",
"@actions/core": "^1.6.0"
},
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^2.8.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^6.8.0",
"eslint-plugin-github": "^2.0.0",
"prettier": "^1.19.1",
"typescript": "^3.6.4"
}
}

View File

@@ -0,0 +1,49 @@
import * as exec from '@actions/exec'
import * as core from '@actions/core'
import * as events from 'events'
import * as readline from 'readline'
async function run(): Promise<void> {
let input = ''
const rl = readline.createInterface({
input: process.stdin
})
rl.on('line', line => {
core.debug(`Line from STDIN: ${line}`)
input = line
})
await events.once(rl, 'close')
core.debug(input)
const execInput = JSON.parse(input)
core.debug(JSON.stringify(execInput))
// podman exec -i --workdir /__w/canary/canary
// -e GITHUB_JOB -e GITHUB_REF -e GITHUB_SHA -e GITHUB_REPOSITORY
// -e GITHUB_REPOSITORY_OWNER -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER
// -e GITHUB_RETENTION_DAYS -e GITHUB_RUN_ATTEMPT -e GITHUB_ACTOR
// -e GITHUB_WORKFLOW -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GITHUB_EVENT_NAME
// -e GITHUB_SERVER_URL -e GITHUB_API_URL -e GITHUB_GRAPHQL_URL
// -e GITHUB_WORKSPACE -e GITHUB_ACTION -e GITHUB_EVENT_PATH -e GITHUB_ACTION_REPOSITORY
// -e GITHUB_ACTION_REF -e GITHUB_PATH -e GITHUB_ENV -e RUNNER_DEBUG
// -e RUNNER_OS -e RUNNER_NAME -e RUNNER_TOOL_CACHE
// -e RUNNER_TEMP -e RUNNER_WORKSPACE
// eccdf520697a035599d6e8c8dc801f004fdd3797cdce88f590aba3669a88d9bc sh -e /__w/_temp/d3b30383-719c-4e76-a16f-8f85443352be.sh
const execArgs = []
const args = (<string>execInput.arguments).split(' ')
core.debug(JSON.stringify(args))
execArgs.push(...args)
core.debug(JSON.stringify(execArgs))
await exec.exec(execInput.fileName, execArgs, {
env: execInput.environmentVariables
})
}
run()

View File

@@ -0,0 +1,12 @@
{
"compilerOptions": {
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"outDir": "./lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
"strict": true, /* Enable all strict type-checking options. */
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
},
"exclude": ["node_modules", "**/*.test.ts"]
}

View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@@ -0,0 +1,59 @@
{
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/es6"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
"sourceType": "module",
"project": "./tsconfig.json"
},
"rules": {
"eslint-comments/no-use": "off",
"import/no-namespace": "off",
"no-console": "off",
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/ban-ts-ignore": "error",
"camelcase": "off",
"@typescript-eslint/camelcase": "error",
"@typescript-eslint/class-name-casing": "error",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
"@typescript-eslint/no-extraneous-class": "error",
"@typescript-eslint/no-for-in-array": "error",
"@typescript-eslint/no-inferrable-types": "error",
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-object-literal-type-assertion": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/no-var-requires": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-interface": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true,
"jest/globals": true
}
}

View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@@ -0,0 +1,11 @@
{
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"semi": false,
"singleQuote": true,
"trailingComma": "none",
"bracketSpacing": false,
"arrowParens": "avoid",
"parser": "typescript"
}

View File

@@ -0,0 +1 @@
To update kubectlHandler under `Misc/layoutbin` run `npm install && npm run all`

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,36 @@
{
"name": "kubectlHandler",
"version": "1.0.0",
"description": "GitHub Actions",
"main": "lib/kubectlHandler.js",
"scripts": {
"build": "tsc",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"lint": "eslint src/**/*.ts",
"pack": "ncc build -o ../../layoutbin/kubectlHandler",
"all": "npm run build && npm run format && npm run lint && npm run pack"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/runner.git"
},
"keywords": [
"actions"
],
"author": "GitHub Actions",
"license": "MIT",
"dependencies": {
"@actions/exec": "^1.1.0",
"@actions/core": "^1.6.0"
},
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^2.8.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^6.8.0",
"eslint-plugin-github": "^2.0.0",
"prettier": "^1.19.1",
"typescript": "^3.6.4"
}
}

View File

@@ -0,0 +1,156 @@
import * as exec from '@actions/exec'
import * as core from '@actions/core'
import * as events from 'events'
import * as readline from 'readline'
async function run(): Promise<void> {
let input = ''
const rl = readline.createInterface({
input: process.stdin
})
rl.on('line', line => {
core.debug(`Line from STDIN: ${line}`)
input = line
})
await events.once(rl, 'close')
core.debug(input)
const inputJson = JSON.parse(input)
core.debug(JSON.stringify(inputJson))
const command = inputJson.command
if (command === 'Create') {
const creationInput = inputJson.creationInput
core.debug(JSON.stringify(creationInput))
const containers = creationInput.containers
const jobContainer = containers[0]
// const networkName = 'actions_podman_network'
// // podman network create {network} -> track and return `network` for ${{job.container.network}}
// await exec.exec('podman', ['network', 'create', networkName])
const containerImage = `${jobContainer.containerImage}`
// podman pull docker.io/library/{image}
// await exec.exec('podman', ['pull', containerImage])
// kubectl run e088c842be1f46b394212618408aaba0_node1016jessie_6196c9
// --image=node:10.16-jessie
// -- tail -f /dev/null
const runArgs = ['run', 'job-container']
// runArgs.push(`--workdir=${jobContainer.containerWorkDirectory}`)
// runArgs.push(`--network=${networkName}`)
// for (const mountVolume of jobContainer.mountVolumes) {
// runArgs.push(
// `-v=${mountVolume.sourceVolumePath}:${mountVolume.targetVolumePath}`
// )
// }
runArgs.push(`--image=${containerImage}`)
runArgs.push(`--`)
runArgs.push(`tail`)
runArgs.push(`-f`)
runArgs.push(`/dev/null`)
core.debug(JSON.stringify(runArgs))
// const containerId = await exec.getExecOutput('podman', [
// 'create',
// // `--workdir ${jobContainer.containerWorkDirectory}`,
// `--network=${networkName}`,
// // `-v=/Users/ting/Desktop/runner/_layout/_work:/__w`,
// `--entrypoint=${jobContainer.containerEntryPoint}`,
// `${containerImage}`,
// `${jobContainer.containerEntryPointArgs}`
// ])
await exec.exec('kubectl', runArgs)
// get PATH inside the container
const waitArgs = ['wait', '--for=condition=Ready', 'pod/job-container']
await exec.exec('kubectl', waitArgs)
// output containerId for ${{job.container.id}}
// copy over node.js
const cpNodeArgs = [
'cp',
'/actions-runner/externals/node12/bin',
'job-container:/__runner_util/'
]
await exec.exec('kubectl', cpNodeArgs)
// copy over innerhandler
const cpKubeInnerArgs = [
'cp',
'/actions-runner/bin/kubeInnerHandler',
'job-container:/__runner_util/kubeInnerHandler'
]
await exec.exec('kubectl', cpKubeInnerArgs)
// copy over _work
const cpWorkArgs = ['cp', '/actions-runner/_work', 'job-container:/__w/']
await exec.exec('kubectl', cpWorkArgs)
const creationOutput = {
JobContainerId: 'job-container',
Network: 'job-container'
}
const output = JSON.stringify({CreationOutput: creationOutput})
core.debug(output)
process.stderr.write(
`___CONTAINER_ENGINE_HANDLER_OUTPUT___${output}___CONTAINER_ENGINE_HANDLER_OUTPUT___`
)
} else if (command === 'Remove') {
const removeInput = inputJson.removeInput
core.debug(JSON.stringify(removeInput))
// const jobContainerId = removeInput.jobContainerId
// await exec.exec('kubectl', ['delete', 'pod', jobContainerId, '--force'])
// await exec.exec('podman', ['network', 'rm', '-f', network])
} else if (command === 'Exec') {
const execInput = inputJson.execInput
core.debug(JSON.stringify(execInput))
// podman exec -i --workdir /__w/canary/canary
// -e GITHUB_JOB -e GITHUB_REF -e GITHUB_SHA -e GITHUB_REPOSITORY
// -e GITHUB_REPOSITORY_OWNER -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER
// -e GITHUB_RETENTION_DAYS -e GITHUB_RUN_ATTEMPT -e GITHUB_ACTOR
// -e GITHUB_WORKFLOW -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GITHUB_EVENT_NAME
// -e GITHUB_SERVER_URL -e GITHUB_API_URL -e GITHUB_GRAPHQL_URL
// -e GITHUB_WORKSPACE -e GITHUB_ACTION -e GITHUB_EVENT_PATH -e GITHUB_ACTION_REPOSITORY
// -e GITHUB_ACTION_REF -e GITHUB_PATH -e GITHUB_ENV -e RUNNER_DEBUG
// -e RUNNER_OS -e RUNNER_NAME -e RUNNER_TOOL_CACHE
// -e RUNNER_TEMP -e RUNNER_WORKSPACE
// eccdf520697a035599d6e8c8dc801f004fdd3797cdce88f590aba3669a88d9bc sh -e /__w/_temp/d3b30383-719c-4e76-a16f-8f85443352be.sh
const cpTempArgs = [
'cp',
'/actions-runner/_work/_temp',
'job-container:/__w/'
]
await exec.exec('kubectl', cpTempArgs)
const execArgs = ['exec']
execArgs.push(execInput.jobContainer.containerId)
execArgs.push('-i')
execArgs.push('-t')
execArgs.push('--')
execArgs.push('/__runner_util/node')
execArgs.push('/__runner_util/kubeInnerHandler')
core.debug(JSON.stringify(execArgs))
await exec.exec('kubectl', execArgs, {
input: Buffer.from(JSON.stringify(execInput))
})
}
}
run()

View File

@@ -0,0 +1,12 @@
{
"compilerOptions": {
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"outDir": "./lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
"strict": true, /* Enable all strict type-checking options. */
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
},
"exclude": ["node_modules", "**/*.test.ts"]
}

View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@@ -0,0 +1,59 @@
{
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/es6"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
"sourceType": "module",
"project": "./tsconfig.json"
},
"rules": {
"eslint-comments/no-use": "off",
"import/no-namespace": "off",
"no-console": "off",
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/ban-ts-ignore": "error",
"camelcase": "off",
"@typescript-eslint/camelcase": "error",
"@typescript-eslint/class-name-casing": "error",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
"@typescript-eslint/no-extraneous-class": "error",
"@typescript-eslint/no-for-in-array": "error",
"@typescript-eslint/no-inferrable-types": "error",
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-object-literal-type-assertion": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/no-var-requires": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-interface": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true,
"jest/globals": true
}
}

View File

@@ -0,0 +1,3 @@
dist/
lib/
node_modules/

View File

@@ -0,0 +1,11 @@
{
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"semi": false,
"singleQuote": true,
"trailingComma": "none",
"bracketSpacing": false,
"arrowParens": "avoid",
"parser": "typescript"
}

View File

@@ -0,0 +1 @@
To update podmanHandler under `Misc/layoutbin` run `npm install && npm run all`

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,36 @@
{
"name": "podmanHandler",
"version": "1.0.0",
"description": "GitHub Actions",
"main": "lib/podmanHandler.js",
"scripts": {
"build": "tsc",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"lint": "eslint src/**/*.ts",
"pack": "ncc build -o ../../layoutbin/podmanHandler",
"all": "npm run build && npm run format && npm run lint && npm run pack"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/runner.git"
},
"keywords": [
"actions"
],
"author": "GitHub Actions",
"license": "MIT",
"dependencies": {
"@actions/exec": "^1.1.0",
"@actions/core": "^1.6.0"
},
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^2.8.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^6.8.0",
"eslint-plugin-github": "^2.0.0",
"prettier": "^1.19.1",
"typescript": "^3.6.4"
}
}

View File

@@ -0,0 +1,150 @@
import * as exec from '@actions/exec'
import * as core from '@actions/core'
import * as events from 'events'
import * as readline from 'readline'
async function run(): Promise<void> {
let input = ''
const rl = readline.createInterface({
input: process.stdin
})
rl.on('line', line => {
core.debug(`Line from STDIN: ${line}`)
input = line
})
await events.once(rl, 'close')
core.debug(input)
const inputJson = JSON.parse(input)
core.debug(JSON.stringify(inputJson))
const command = inputJson.command
if (command === 'Create') {
const creationInput = inputJson.creationInput
core.debug(JSON.stringify(creationInput))
const containers = creationInput.containers
const jobContainer = containers[0]
const networkName = 'actions_podman_network'
// podman network create {network} -> track and return `network` for ${{job.container.network}}
await exec.exec('podman', ['network', 'create', networkName])
const containerImage = `docker.io/library/${jobContainer.containerImage}`
// podman pull docker.io/library/{image}
await exec.exec('podman', ['pull', containerImage])
// podman create --name e088c842be1f46b394212618408aaba0_node1016jessie_6196c9
// --label fa4e14
// --workdir /__w/canary/canary
// --network github_network_f98a6e1e96e74d919d814c165641cba3
// -e "HOME=/github/home" -e GITHUB_ACTIONS=true -e CI=true
// -v "/var/run/docker.sock":"/var/run/docker.sock"
// -v "/home/runner/work":"/__w"
// -v "/home/runner/runners/2.283.2/externals":"/__e":ro
// -v "/home/runner/work/_temp":"/__w/_temp"
// -v "/home/runner/work/_actions":"/__w/_actions"
// -v "/opt/hostedtoolcache":"/__t"
// -v "/home/runner/work/_temp/_github_home":"/github/home"
// -v "/home/runner/work/_temp/_github_workflow":"/github/workflow"
// --entrypoint "tail" node:10.16-jessie "-f" "/dev/null"
const creatArgs = ['create']
creatArgs.push(`--workdir=${jobContainer.containerWorkDirectory}`)
creatArgs.push(`--network=${networkName}`)
for (const mountVolume of jobContainer.mountVolumes) {
creatArgs.push(
`-v=${mountVolume.sourceVolumePath}:${mountVolume.targetVolumePath}`
)
}
creatArgs.push(`--entrypoint=tail`)
creatArgs.push(containerImage)
creatArgs.push(`-f`)
creatArgs.push(`/dev/null`)
core.debug(JSON.stringify(creatArgs))
// const containerId = await exec.getExecOutput('podman', [
// 'create',
// // `--workdir ${jobContainer.containerWorkDirectory}`,
// `--network=${networkName}`,
// // `-v=/Users/ting/Desktop/runner/_layout/_work:/__w`,
// `--entrypoint=${jobContainer.containerEntryPoint}`,
// `${containerImage}`,
// `${jobContainer.containerEntryPointArgs}`
// ])
const containerId = await exec.getExecOutput('podman', creatArgs)
core.debug(JSON.stringify(containerId))
// podman start {containerId}
await exec.exec('podman', ['start', containerId.stdout.trim()])
// get PATH inside the container
// output containerId for ${{job.container.id}}
const creationOutput = {
JobContainerId: containerId.stdout.trim(),
Network: networkName
}
const output = JSON.stringify({CreationOutput: creationOutput})
core.debug(output)
process.stderr.write(
`___CONTAINER_ENGINE_HANDLER_OUTPUT___${output}___CONTAINER_ENGINE_HANDLER_OUTPUT___`
)
} else if (command === 'Remove') {
const removeInput = inputJson.removeInput
core.debug(JSON.stringify(removeInput))
const jobContainerId = removeInput.jobContainerId
const network = removeInput.network
await exec.exec('podman', ['rm', '-f', jobContainerId])
await exec.exec('podman', ['network', 'rm', '-f', network])
} else if (command === 'Exec') {
const execInput = inputJson.execInput
core.debug(JSON.stringify(execInput))
// podman exec -i --workdir /__w/canary/canary
// -e GITHUB_JOB -e GITHUB_REF -e GITHUB_SHA -e GITHUB_REPOSITORY
// -e GITHUB_REPOSITORY_OWNER -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER
// -e GITHUB_RETENTION_DAYS -e GITHUB_RUN_ATTEMPT -e GITHUB_ACTOR
// -e GITHUB_WORKFLOW -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GITHUB_EVENT_NAME
// -e GITHUB_SERVER_URL -e GITHUB_API_URL -e GITHUB_GRAPHQL_URL
// -e GITHUB_WORKSPACE -e GITHUB_ACTION -e GITHUB_EVENT_PATH -e GITHUB_ACTION_REPOSITORY
// -e GITHUB_ACTION_REF -e GITHUB_PATH -e GITHUB_ENV -e RUNNER_DEBUG
// -e RUNNER_OS -e RUNNER_NAME -e RUNNER_TOOL_CACHE
// -e RUNNER_TEMP -e RUNNER_WORKSPACE
// eccdf520697a035599d6e8c8dc801f004fdd3797cdce88f590aba3669a88d9bc sh -e /__w/_temp/d3b30383-719c-4e76-a16f-8f85443352be.sh
const execArgs = ['exec']
execArgs.push('-i')
execArgs.push(`--workdir=${execInput.workingDirectory}`)
for (const envKey of execInput.environmentKeys) {
execArgs.push(`-e=${envKey}`)
}
execArgs.push(execInput.jobContainer.containerId)
execArgs.push(execInput.fileName)
const args = (<string>execInput.arguments).split(' ')
core.debug(JSON.stringify(args))
execArgs.push(...args)
core.debug(JSON.stringify(execArgs))
await exec.exec('podman', execArgs)
}
await exec.exec('podman', ['network', 'ls'])
await exec.exec('podman', ['ps', '-a'])
}
run()

View File

@@ -0,0 +1,12 @@
{
"compilerOptions": {
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"outDir": "./lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
"strict": true, /* Enable all strict type-checking options. */
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
},
"exclude": ["node_modules", "**/*.test.ts"]
}

View File

@@ -1 +0,0 @@
de62d296708908cfd1236e58869aebbc2bae8a8c3d629276968542626c508e37

View File

@@ -1 +0,0 @@
44fcd0422dd98ed17d2c8e9057ff2260c50165f20674236a4ae7d2645a07df25

View File

@@ -1 +0,0 @@
e57652cf322ee16ce3af4f9e58f80858746b9e1e60279e991a3b3d9a6baf8d79

View File

@@ -1 +0,0 @@
bdd247b2ff3f51095524412e2ac588e7a87af805e114d6caf2368366ee7be1ea

View File

@@ -1 +0,0 @@
d23a0cb9f20c0aa1cddb7a39567cd097020cdeb06a1e952940601d1a405c53b8

View File

@@ -1 +0,0 @@
6ed30a2c1ee403a610d63e82bb230b9ba846a9c25cec9e4ea8672fb6ed4e1a51

View File

@@ -1 +0,0 @@
711c30c51ec52c9b7a9a2eb399d6ab2ab5ee1dc72de11879f2f36f919f163d78

View File

@@ -1 +0,0 @@
a49479ca4b4988a06c097e8d22c51fd08a11c13f40807366236213d0e008cf6a

View File

@@ -1 +0,0 @@
8e97df75230b843462a9b4c578ccec604ee4b4a1066120c85b04374317fa372b

View File

@@ -1 +0,0 @@
f75a671e5a188c76680739689aa75331a2c09d483dce9c80023518c48fd67a18

View File

@@ -1,6 +1,6 @@
{
"plugins": ["@typescript-eslint"],
"extends": ["plugin:github/recommended"],
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/es6"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
@@ -17,16 +17,13 @@
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/naming-convention": [
"error",
{
"selector": "default",
"format": ["camelCase"]
}
],
"@typescript-eslint/ban-ts-ignore": "error",
"camelcase": "off",
"@typescript-eslint/camelcase": "error",
"@typescript-eslint/class-name-casing": "error",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
@@ -36,6 +33,7 @@
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-object-literal-type-assertion": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
@@ -43,19 +41,19 @@
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-interface": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error",
"filenames/match-regex" : "off",
"github/no-then" : 1, // warning
"semi": "off"
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true
"es6": true,
"jest/globals": true
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -25,10 +25,10 @@
},
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^5.15.0",
"@typescript-eslint/parser": "^2.8.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^8.11.0",
"eslint-plugin-github": "^4.3.5",
"eslint": "^6.8.0",
"eslint-plugin-github": "^2.0.0",
"prettier": "^1.19.1",
"typescript": "^3.6.4"
}

View File

@@ -1,9 +1,9 @@
import * as glob from '@actions/glob'
import * as crypto from 'crypto'
import * as fs from 'fs'
import * as glob from '@actions/glob'
import * as path from 'path'
import * as stream from 'stream'
import * as util from 'util'
import * as path from 'path'
async function run(): Promise<void> {
// arg0 -> node
@@ -45,7 +45,7 @@ async function run(): Promise<void> {
result.end()
if (hasMatch) {
console.log(`Found ${count} files to hash.`)
console.log(`Find ${count} files to hash.`)
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`)
} else {
console.error(`__OUTPUT____OUTPUT__`)
@@ -53,11 +53,3 @@ async function run(): Promise<void> {
}
run()
.then(out => {
console.log(out)
process.exit(0)
})
.catch(err => {
console.error(err)
process.exit(1)
})

View File

@@ -3,8 +3,7 @@ PACKAGERUNTIME=$1
PRECACHE=$2
NODE_URL=https://nodejs.org/dist
NODE12_VERSION="12.22.7"
NODE16_VERSION="16.13.0"
NODE12_VERSION="12.13.1"
get_abs_path() {
# exploits the fact that pwd will print abs path when no args
@@ -127,8 +126,6 @@ function acquireExternalTool() {
if [[ "$PACKAGERUNTIME" == "win-x64" || "$PACKAGERUNTIME" == "win-x86" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/$PACKAGERUNTIME/node.exe" node12/bin
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/$PACKAGERUNTIME/node.lib" node12/bin
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.exe" node16/bin
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/$PACKAGERUNTIME/node.lib" node16/bin
if [[ "$PRECACHE" != "" ]]; then
acquireExternalTool "https://github.com/microsoft/vswhere/releases/download/2.6.7/vswhere.exe" vswhere
fi
@@ -137,23 +134,18 @@ fi
# Download the external tools only for OSX.
if [[ "$PACKAGERUNTIME" == "osx-x64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-darwin-x64.tar.gz" node12 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-darwin-x64.tar.gz" node16 fix_nested_dir
fi
# Download the external tools for Linux PACKAGERUNTIMEs.
if [[ "$PACKAGERUNTIME" == "linux-x64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-x64.tar.gz" node12 fix_nested_dir
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-v${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-x64.tar.gz" node16 fix_nested_dir
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE16_VERSION}/alpine/x64/node-v${NODE16_VERSION}-alpine-x64.tar.gz" node16_alpine
acquireExternalTool "https://vstsagenttools.blob.core.windows.net/tools/nodejs/${NODE12_VERSION}/alpine/x64/node-${NODE12_VERSION}-alpine-x64.tar.gz" node12_alpine
fi
if [[ "$PACKAGERUNTIME" == "linux-arm64" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-arm64.tar.gz" node12 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-arm64.tar.gz" node16 fix_nested_dir
fi
if [[ "$PACKAGERUNTIME" == "linux-arm" ]]; then
acquireExternalTool "$NODE_URL/v${NODE12_VERSION}/node-v${NODE12_VERSION}-linux-armv7l.tar.gz" node12 fix_nested_dir
acquireExternalTool "$NODE_URL/v${NODE16_VERSION}/node-v${NODE16_VERSION}-linux-armv7l.tar.gz" node16 fix_nested_dir
fi

View File

@@ -3,135 +3,94 @@
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
var childProcess = require("child_process");
var path = require("path");
var path = require("path")
var supported = ["linux", "darwin"];
var supported = ['linux', 'darwin']
if (supported.indexOf(process.platform) == -1) {
console.log("Unsupported platform: " + process.platform);
console.log("Supported platforms are: " + supported.toString());
console.log('Unsupported platform: ' + process.platform);
console.log('Supported platforms are: ' + supported.toString());
process.exit(1);
}
var stopping = false;
var listener = null;
var exitServiceAfterNFailures = Number(
process.env.GITHUB_ACTIONS_SERVICE_EXIT_AFTER_N_FAILURES
);
if (exitServiceAfterNFailures <= 0) {
exitServiceAfterNFailures = NaN;
}
var consecutiveFailureCount = 0;
var gracefulShutdown = function () {
console.log("Shutting down runner listener");
stopping = true;
if (listener) {
console.log("Sending SIGINT to runner listener to stop");
listener.kill("SIGINT");
console.log("Sending SIGKILL to runner listener");
setTimeout(() => listener.kill("SIGKILL"), 30000).unref();
}
};
var runService = function () {
var listenerExePath = path.join(__dirname, "../bin/Runner.Listener");
var listenerExePath = path.join(__dirname, '../bin/Runner.Listener');
var interactive = process.argv[2] === "interactive";
if (!stopping) {
try {
if (interactive) {
console.log("Starting Runner listener interactively");
listener = childProcess.spawn(listenerExePath, ["run"], {
env: process.env,
});
console.log('Starting Runner listener interactively');
listener = childProcess.spawn(listenerExePath, ['run'], { env: process.env });
} else {
console.log("Starting Runner listener with startup type: service");
listener = childProcess.spawn(
listenerExePath,
["run", "--startuptype", "service"],
{ env: process.env }
);
console.log('Starting Runner listener with startup type: service');
listener = childProcess.spawn(listenerExePath, ['run', '--startuptype', 'service'], { env: process.env });
}
console.log(`Started listener process, pid: ${listener.pid}`);
listener.stdout.on("data", (data) => {
if (data.toString("utf8").includes("Listening for Jobs")) {
consecutiveFailureCount = 0;
}
process.stdout.write(data.toString("utf8"));
listener.stdout.on('data', (data) => {
process.stdout.write(data.toString('utf8'));
});
listener.stderr.on("data", (data) => {
process.stdout.write(data.toString("utf8"));
listener.stderr.on('data', (data) => {
process.stdout.write(data.toString('utf8'));
});
listener.on("error", (err) => {
console.log(`Runner listener fail to start with error ${err.message}`);
});
listener.on("close", (code) => {
listener.on('close', (code) => {
console.log(`Runner listener exited with error code ${code}`);
if (code === 0) {
console.log(
"Runner listener exit with 0 return code, stop the service, no retry needed."
);
console.log('Runner listener exit with 0 return code, stop the service, no retry needed.');
stopping = true;
} else if (code === 1) {
console.log(
"Runner listener exit with terminated error, stop the service, no retry needed."
);
console.log('Runner listener exit with terminated error, stop the service, no retry needed.');
stopping = true;
} else if (code === 2) {
console.log(
"Runner listener exit with retryable error, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
} else if (code === 3 || code === 4) {
console.log(
"Runner listener exit because of updating, re-launch runner in 5 seconds."
);
consecutiveFailureCount = 0;
console.log('Runner listener exit with retryable error, re-launch runner in 5 seconds.');
} else if (code === 3) {
console.log('Runner listener exit because of updating, re-launch runner in 5 seconds.');
} else {
var messagePrefix = "Runner listener exit with undefined return code";
consecutiveFailureCount++;
if (
!isNaN(exitServiceAfterNFailures) &&
consecutiveFailureCount >= exitServiceAfterNFailures
) {
console.error(
`${messagePrefix}, exiting service after ${consecutiveFailureCount} consecutive failures`
);
gracefulShutdown();
return;
} else {
console.log(`${messagePrefix}, re-launch runner in 5 seconds.`);
}
console.log('Runner listener exit with undefined return code, re-launch runner in 5 seconds.');
}
if (!stopping) {
setTimeout(runService, 5000);
}
});
} catch (ex) {
console.log(ex);
}
}
};
}
runService();
console.log("Started running service");
console.log('Started running service');
process.on("SIGINT", () => {
gracefulShutdown();
var gracefulShutdown = function (code) {
console.log('Shutting down runner listener');
stopping = true;
if (listener) {
console.log('Sending SIGINT to runner listener to stop');
listener.kill('SIGINT');
console.log('Sending SIGKILL to runner listener');
setTimeout(() => listener.kill('SIGKILL'), 30000);
}
}
process.on('SIGINT', () => {
gracefulShutdown(0);
});
process.on("SIGTERM", () => {
gracefulShutdown();
process.on('SIGTERM', () => {
gracefulShutdown(0);
});

View File

@@ -17,13 +17,7 @@ RUNNER_ROOT=`pwd`
LAUNCH_PATH="${HOME}/Library/LaunchAgents"
PLIST_PATH="${LAUNCH_PATH}/${SVC_NAME}.plist"
TEMPLATE_PATH=$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE
IS_CUSTOM_TEMPLATE=0
if [[ -z $TEMPLATE_PATH ]]; then
TEMPLATE_PATH=./bin/actions.runner.plist.template
else
IS_CUSTOM_TEMPLATE=1
fi
TEMP_PATH=./bin/actions.runner.plist.temp
CONFIG_PATH=.service
@@ -35,11 +29,7 @@ function failed()
}
if [ ! -f "${TEMPLATE_PATH}" ]; then
if [[ $IS_CUSTOM_TEMPLATE = 0 ]]; then
failed "Must run from runner root or install is corrupt"
else
failed "Service file at '$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE' using GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE env variable is not found"
fi
fi
function install()
@@ -63,7 +53,7 @@ function install()
mkdir -p "${log_path}" || failed "failed to create ${log_path}"
echo Creating ${PLIST_PATH}
sed "s/{{User}}/${USER:-$SUDO_USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
sed "s/{{User}}/${SUDO_USER:-$USER}/g; s/{{SvcName}}/$SVC_NAME/g; s@{{RunnerRoot}}@${RUNNER_ROOT}@g; s@{{UserHome}}@$HOME@g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
mv "${TEMP_PATH}" "${PLIST_PATH}" || failed "failed to copy plist"
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.

View File

@@ -43,32 +43,6 @@ module.exports =
/************************************************************************/
/******/ ({
/***/ 82:
/***/ (function(__unusedmodule, exports) {
"use strict";
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @param input input to sanitize into a string
*/
function toCommandValue(input) {
if (input === null || input === undefined) {
return '';
}
else if (typeof input === 'string' || input instanceof String) {
return input;
}
return JSON.stringify(input);
}
exports.toCommandValue = toCommandValue;
//# sourceMappingURL=utils.js.map
/***/ }),
/***/ 87:
/***/ (function(module) {
@@ -1004,42 +978,6 @@ function regExpEscape (s) {
}
/***/ }),
/***/ 102:
/***/ (function(__unusedmodule, exports, __webpack_require__) {
"use strict";
// For internal use, subject to change.
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
const fs = __importStar(__webpack_require__(747));
const os = __importStar(__webpack_require__(87));
const utils_1 = __webpack_require__(82);
function issueCommand(command, message) {
const filePath = process.env[`GITHUB_${command}`];
if (!filePath) {
throw new Error(`Unable to find environment variable for file command ${command}`);
}
if (!fs.existsSync(filePath)) {
throw new Error(`Missing file at path: ${filePath}`);
}
fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
encoding: 'utf8'
});
}
exports.issueCommand = issueCommand;
//# sourceMappingURL=file-command.js.map
/***/ }),
/***/ 281:
@@ -1557,12 +1495,12 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const glob = __importStar(__webpack_require__(281));
const crypto = __importStar(__webpack_require__(417));
const fs = __importStar(__webpack_require__(747));
const glob = __importStar(__webpack_require__(281));
const path = __importStar(__webpack_require__(622));
const stream = __importStar(__webpack_require__(413));
const util = __importStar(__webpack_require__(669));
const path = __importStar(__webpack_require__(622));
function run() {
var e_1, _a;
return __awaiter(this, void 0, void 0, function* () {
@@ -1613,7 +1551,7 @@ function run() {
}
result.end();
if (hasMatch) {
console.log(`Found ${count} files to hash.`);
console.log(`Find ${count} files to hash.`);
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`);
}
else {
@@ -1621,15 +1559,7 @@ function run() {
}
});
}
run()
.then(out => {
console.log(out);
process.exit(0);
})
.catch(err => {
console.error(err);
process.exit(1);
});
run();
/***/ }),
@@ -1757,25 +1687,17 @@ module.exports = require("crypto");
"use strict";
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const os = __importStar(__webpack_require__(87));
const utils_1 = __webpack_require__(82);
const os = __webpack_require__(87);
/**
* Commands
*
* Command Format:
* ::name key=value,key=value::message
* ##[name key=value;key=value]message
*
* Examples:
* ::warning::This is the message
* ::set-env name=MY_VAR::some value
* ##[warning]This is the user warning message
* ##[set-secret name=mypassword]definitelyNotAPassword!
*/
function issueCommand(command, properties, message) {
const cmd = new Command(command, properties, message);
@@ -1800,39 +1722,34 @@ class Command {
let cmdStr = CMD_STRING + this.command;
if (this.properties && Object.keys(this.properties).length > 0) {
cmdStr += ' ';
let first = true;
for (const key in this.properties) {
if (this.properties.hasOwnProperty(key)) {
const val = this.properties[key];
if (val) {
if (first) {
first = false;
}
else {
cmdStr += ',';
}
cmdStr += `${key}=${escapeProperty(val)}`;
// safely append the val - avoid blowing up when attempting to
// call .replace() if message is not a string for some reason
cmdStr += `${key}=${escape(`${val || ''}`)},`;
}
}
}
}
cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
cmdStr += CMD_STRING;
// safely append the message - avoid blowing up when attempting to
// call .replace() if message is not a string for some reason
const message = `${this.message || ''}`;
cmdStr += escapeData(message);
return cmdStr;
}
}
function escapeData(s) {
return utils_1.toCommandValue(s)
.replace(/%/g, '%25')
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A');
return s.replace(/\r/g, '%0D').replace(/\n/g, '%0A');
}
function escapeProperty(s) {
return utils_1.toCommandValue(s)
.replace(/%/g, '%25')
function escape(s) {
return s
.replace(/\r/g, '%0D')
.replace(/\n/g, '%0A')
.replace(/:/g, '%3A')
.replace(/,/g, '%2C');
.replace(/]/g, '%5D')
.replace(/;/g, '%3B');
}
//# sourceMappingURL=command.js.map
@@ -1852,19 +1769,10 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
result["default"] = mod;
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const command_1 = __webpack_require__(431);
const file_command_1 = __webpack_require__(102);
const utils_1 = __webpack_require__(82);
const os = __importStar(__webpack_require__(87));
const path = __importStar(__webpack_require__(622));
const os = __webpack_require__(87);
const path = __webpack_require__(622);
/**
* The code to exit an action
*/
@@ -1885,21 +1793,11 @@ var ExitCode;
/**
* Sets env variable for this action and future actions in the job
* @param name the name of the variable to set
* @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
* @param val the value of the variable
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function exportVariable(name, val) {
const convertedVal = utils_1.toCommandValue(val);
process.env[name] = convertedVal;
const filePath = process.env['GITHUB_ENV'] || '';
if (filePath) {
const delimiter = '_GitHubActionsFileCommandDelimeter_';
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
file_command_1.issueCommand('ENV', commandValue);
}
else {
command_1.issueCommand('set-env', { name }, convertedVal);
}
process.env[name] = val;
command_1.issueCommand('set-env', { name }, val);
}
exports.exportVariable = exportVariable;
/**
@@ -1915,13 +1813,7 @@ exports.setSecret = setSecret;
* @param inputPath
*/
function addPath(inputPath) {
const filePath = process.env['GITHUB_PATH'] || '';
if (filePath) {
file_command_1.issueCommand('PATH', inputPath);
}
else {
command_1.issueCommand('add-path', {}, inputPath);
}
process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
}
exports.addPath = addPath;
@@ -1944,22 +1836,12 @@ exports.getInput = getInput;
* Sets the value of an output.
*
* @param name name of the output to set
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
* @param value value to store
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function setOutput(name, value) {
command_1.issueCommand('set-output', { name }, value);
}
exports.setOutput = setOutput;
/**
* Enables or disables the echoing of commands into stdout for the rest of the step.
* Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
*
*/
function setCommandEcho(enabled) {
command_1.issue('echo', enabled ? 'on' : 'off');
}
exports.setCommandEcho = setCommandEcho;
//-----------------------------------------------------------------------
// Results
//-----------------------------------------------------------------------
@@ -1976,13 +1858,6 @@ exports.setFailed = setFailed;
//-----------------------------------------------------------------------
// Logging Commands
//-----------------------------------------------------------------------
/**
* Gets whether Actions Step Debug is on or not
*/
function isDebug() {
return process.env['RUNNER_DEBUG'] === '1';
}
exports.isDebug = isDebug;
/**
* Writes debug message to user log
* @param message debug message
@@ -1993,18 +1868,18 @@ function debug(message) {
exports.debug = debug;
/**
* Adds an error issue
* @param message error issue message. Errors will be converted to string via toString()
* @param message error issue message
*/
function error(message) {
command_1.issue('error', message instanceof Error ? message.toString() : message);
command_1.issue('error', message);
}
exports.error = error;
/**
* Adds an warning issue
* @param message warning issue message. Errors will be converted to string via toString()
* @param message warning issue message
*/
function warning(message) {
command_1.issue('warning', message instanceof Error ? message.toString() : message);
command_1.issue('warning', message);
}
exports.warning = warning;
/**
@@ -2062,9 +1937,8 @@ exports.group = group;
* Saves state for current action, the state can only be retrieved by this action's post job execution.
*
* @param name name of the state to store
* @param value value to store. Non-string values will be converted to a string via JSON.stringify
* @param value value to store
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function saveState(name, value) {
command_1.issueCommand('save-state', { name }, value);
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,49 @@
// Job container creation
// podman network create {network} -> track and return `network` for ${{job.container.network}}
// podman pull docker.io/library/{image}
// podman create --name e088c842be1f46b394212618408aaba0_node1016jessie_6196c9
// --label fa4e14
// --workdir /__w/canary/canary
// --network github_network_f98a6e1e96e74d919d814c165641cba3
// -e "HOME=/github/home" -e GITHUB_ACTIONS=true -e CI=true
// -v "/var/run/docker.sock":"/var/run/docker.sock"
// -v "/home/runner/work":"/__w"
// -v "/home/runner/runners/2.283.2/externals":"/__e":ro
// -v "/home/runner/work/_temp":"/__w/_temp"
// -v "/home/runner/work/_actions":"/__w/_actions"
// -v "/opt/hostedtoolcache":"/__t"
// -v "/home/runner/work/_temp/_github_home":"/github/home"
// -v "/home/runner/work/_temp/_github_workflow":"/github/workflow"
// --entrypoint "tail" node:10.16-jessie "-f" "/dev/null"
// podman start {containerId}
// get PATH inside the container
// output containerId for ${{job.container.id}}
// Job container stop
// podman rm --force {containerId}
// podman network rm {network}
// Run step
// podman exec -i --workdir /__w/canary/canary
// -e GITHUB_JOB -e GITHUB_REF -e GITHUB_SHA -e GITHUB_REPOSITORY
// -e GITHUB_REPOSITORY_OWNER -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER
// -e GITHUB_RETENTION_DAYS -e GITHUB_RUN_ATTEMPT -e GITHUB_ACTOR
// -e GITHUB_WORKFLOW -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GITHUB_EVENT_NAME
// -e GITHUB_SERVER_URL -e GITHUB_API_URL -e GITHUB_GRAPHQL_URL
// -e GITHUB_WORKSPACE -e GITHUB_ACTION -e GITHUB_EVENT_PATH -e GITHUB_ACTION_REPOSITORY
// -e GITHUB_ACTION_REF -e GITHUB_PATH -e GITHUB_ENV -e RUNNER_DEBUG
// -e RUNNER_OS -e RUNNER_NAME -e RUNNER_TOOL_CACHE
// -e RUNNER_TEMP -e RUNNER_WORKSPACE
// eccdf520697a035599d6e8c8dc801f004fdd3797cdce88f590aba3669a88d9bc sh -e /__w/_temp/d3b30383-719c-4e76-a16f-8f85443352be.sh

File diff suppressed because it is too large Load Diff

View File

@@ -10,11 +10,10 @@ if [ -f ".path" ]; then
echo ".path=${PATH}"
fi
nodever=${GITHUB_ACTIONS_RUNNER_FORCED_NODE_VERSION:-node16}
# insert anything to setup env when running as a service
# run the host process which keep the listener alive
./externals/$nodever/bin/node ./bin/RunnerService.js &
./externals/node12/bin/node ./bin/RunnerService.js &
PID=$!
wait $PID
trap - TERM INT

View File

@@ -10,13 +10,7 @@ arg_2=${2}
RUNNER_ROOT=`pwd`
UNIT_PATH=/etc/systemd/system/${SVC_NAME}
TEMPLATE_PATH=$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE
IS_CUSTOM_TEMPLATE=0
if [[ -z $TEMPLATE_PATH ]]; then
TEMPLATE_PATH=./bin/actions.runner.service.template
else
IS_CUSTOM_TEMPLATE=1
fi
TEMP_PATH=./bin/actions.runner.service.temp
CONFIG_PATH=.service
@@ -37,11 +31,7 @@ function failed()
}
if [ ! -f "${TEMPLATE_PATH}" ]; then
if [[ $IS_CUSTOM_TEMPLATE = 0 ]]; then
failed "Must run from runner root or install is corrupt"
else
failed "Service file at '$GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE' using GITHUB_ACTIONS_RUNNER_SERVICE_TEMPLATE env variable is not found"
fi
fi
#check if we run as root

View File

@@ -30,13 +30,13 @@ date "+[%F %T-%4N] Waiting for $runnerprocessname ($runnerpid) to complete" >> "
while [ -e /proc/$runnerpid ]
do
date "+[%F %T-%4N] Process $runnerpid still running" >> "$logfile" 2>&1
"$rootfolder"/safe_sleep.sh 2
sleep 2
done
date "+[%F %T-%4N] Process $runnerpid finished running" >> "$logfile" 2>&1
# start re-organize folders
date "+[%F %T-%4N] Sleep 1 more second to make sure process exited" >> "$logfile" 2>&1
"$rootfolder"/safe_sleep.sh 1
sleep 1
# the folder structure under runner root will be
# ./bin -> bin.2.100.0 (junction folder)
@@ -125,7 +125,7 @@ attemptedtargetedfix=0
currentplatform=$(uname | awk '{print tolower($0)}')
if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
# We needed a fix for https://github.com/actions/runner/issues/743
# We will recreate the ./externals/nodeXY/bin/node of the past runner version that launched the runnerlistener service
# We will recreate the ./externals/node12/bin/node of the past runner version that launched the runnerlistener service
# Otherwise mac gatekeeper kills the processes we spawn on creation as we are running a process with no backing file
# We need the pid for the nodejs loop, get that here, its the parent of the runner C# pid
@@ -135,13 +135,7 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
then
# inspect the open file handles to find the node process
# we can't actually inspect the process using ps because it uses relative paths and doesn't follow symlinks
nodever="node16"
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
if [[ $? -ne 0 || -z "$path" ]] # Fallback if RunnerService.js was started with node12
then
nodever="node12"
path=$(lsof -a -g "$procgroup" -F n | grep $nodever/bin/node | grep externals | tail -1 | cut -c2-)
fi
path=$(lsof -a -g "$procgroup" -F n | grep node12/bin/node | grep externals | tail -1 | cut -c2-)
if [[ $? -eq 0 && -n "$path" ]]
then
# trim the last 5 characters of the path '/node'
@@ -154,7 +148,7 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
then
date "+[%F %T-%4N] Creating fallback node at path $path" >> "$logfile" 2>&1
mkdir -p "$trimmedpath"
cp "$rootfolder/externals/$nodever/bin/node" "$path"
cp "$rootfolder/externals/node12/bin/node" "$path"
else
date "+[%F %T-%4N] Path for fallback node exists, skipping creating $path" >> "$logfile" 2>&1
fi
@@ -166,15 +160,61 @@ if [[ "$currentplatform" == 'darwin' && restartinteractiverunner -eq 0 ]]; then
date "+[%F %T-%4N] DarwinRunnerUpgrade: Failed to find runner path. Path: $path, pgid: $procgroup, root: $rootfolder" >> "$logfile" 2>&1
date "+[%F %T-%4N] DarwinRunnerUpgrade: Failed to find runner path. Path: $path, pgid: $procgroup, root: $rootfolder" >> "$telemetryfile" 2>&1
fi
else
runproc=$(ps x -o pgid,command | grep "run.sh" | grep -v grep | awk '{print $1}')
if [[ $? -eq 0 && -n "$runproc" ]]
then
date "+[%F %T-%4N] Running as ephemeral using run.sh, no need to recreate node folder" >> "$logfile" 2>&1
else
date "+[%F %T-%4N] DarwinRunnerUpgrade: Failed to find runner pgid. pgid: $procgroup, root: $rootfolder" >> "$logfile" 2>&1
date "+[%F %T-%4N] DarwinRunnerUpgrade: Failed to find runner pgid. pgid: $procgroup, root: $rootfolder" >> "$telemetryfile" 2>&1
fi
if [ $attemptedtargetedfix -eq 0 ]
then
date "+[%F %T-%4N] DarwinRunnerUpgrade: Defaulting to old macOS service fix" >> "$logfile" 2>&1
date "+[%F %T-%4N] DarwinRunnerUpgrade: Defaulting to old macOS service fix" >> "$telemetryfile" 2>&1
if [[ ! -e "$rootfolder/externals.2.280.3/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.280.3/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.280.3/node12/bin/node"
fi
if [[ ! -e "$rootfolder/externals.2.280.2/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.280.2/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.280.2/node12/bin/node"
fi
if [[ ! -e "$rootfolder/externals.2.280.1/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.280.1/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.280.1/node12/bin/node"
fi
# GHES 3.2
if [[ ! -e "$rootfolder/externals.2.279.0/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.279.0/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.279.0/node12/bin/node"
fi
# GHES 3.1.2 or later
if [[ ! -e "$rootfolder/externals.2.278.0/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.278.0/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.278.0/node12/bin/node"
fi
# GHES 3.1.0
if [[ ! -e "$rootfolder/externals.2.276.1/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.276.1/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.276.1/node12/bin/node"
fi
# GHES 3.0
if [[ ! -e "$rootfolder/externals.2.273.5/node12/bin/node" ]]
then
mkdir -p "$rootfolder/externals.2.273.5/node12/bin"
cp "$rootfolder/externals/node12/bin/node" "$rootfolder/externals.2.273.5/node12/bin/node"
fi
fi
fi

View File

@@ -8,7 +8,7 @@ if [ $user_id -eq 0 -a -z "$RUNNER_ALLOW_RUNASROOT" ]; then
exit 1
fi
# Check dotnet Core 6.0 dependencies for Linux
# Check dotnet core 3.0 dependencies for Linux
if [[ (`uname` == "Linux") ]]
then
command -v ldd > /dev/null
@@ -18,25 +18,25 @@ then
exit 1
fi
message="Execute sudo ./bin/installdependencies.sh to install any missing Dotnet Core 6.0 dependencies."
message="Execute sudo ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 dependencies."
ldd ./bin/libcoreclr.so | grep 'not found'
if [ $? -eq 0 ]; then
echo "Dependencies is missing for Dotnet Core 6.0"
echo "Dependencies is missing for Dotnet Core 3.0"
echo $message
exit 1
fi
ldd ./bin/libSystem.Security.Cryptography.Native.OpenSsl.so | grep 'not found'
ldd ./bin/System.Security.Cryptography.Native.OpenSsl.so | grep 'not found'
if [ $? -eq 0 ]; then
echo "Dependencies is missing for Dotnet Core 6.0"
echo "Dependencies is missing for Dotnet Core 3.0"
echo $message
exit 1
fi
ldd ./bin/libSystem.IO.Compression.Native.so | grep 'not found'
ldd ./bin/System.IO.Compression.Native.so | grep 'not found'
if [ $? -eq 0 ]; then
echo "Dependencies is missing for Dotnet Core 6.0"
echo "Dependencies is missing for Dotnet Core 3.0"
echo $message
exit 1
fi
@@ -54,7 +54,7 @@ then
libpath=${LD_LIBRARY_PATH:-}
$LDCONFIG_COMMAND -NXv ${libpath//:/ } 2>&1 | grep libicu >/dev/null 2>&1
if [ $? -ne 0 ]; then
echo "Libicu's dependencies is missing for Dotnet Core 6.0"
echo "Libicu's dependencies is missing for Dotnet Core 3.0"
echo $message
exit 1
fi

View File

@@ -0,0 +1,68 @@
#!/bin/bash
set -euo pipefail
function fatal() {
echo "error: $1" >&2
exit 1
}
[ -n "${GITHUB_PAT:-""}" ] || fatal "GITHUB_PAT variable must be set"
[ -n "${RUNNER_CONFIG_URL:-""}" ] || fatal "RUNNER_CONFIG_URL variable must be set"
# [ -n "${RUNNER_NAME:-""}" ] || fatal "RUNNER_NAME variable must be set"
# if [ -n "${RUNNER_NAME}" ]; then
# # Use container id to gen unique runner name if name not provide
# CONTAINER_ID=$(cat /proc/self/cgroup | head -n 1 | tr '/' '\n' | tail -1 | cut -c1-12)
# RUNNER_NAME="actions-runner-${CONTAINER_ID}"
# fi
# if the scope has a slash, it's a repo runner
# orgs_or_repos="orgs"
# if [[ "$GITHUB_RUNNER_SCOPE" == *\/* ]]; then
# orgs_or_repos="repos"
# fi
# RUNNER_REG_URL="${GITHUB_SERVER_URL:=https://github.com}/${GITHUB_RUNNER_SCOPE}"
# echo "Runner Name : ${RUNNER_NAME}"
echo "Registration URL : ${RUNNER_CONFIG_URL}"
# echo "GitHub API URL : ${GITHUB_API_URL:=https://api.github.com}"
# echo "Runner Labels : ${RUNNER_LABELS:=""}"
# TODO: if api url is not default, validate it ends in /api/v3
# RUNNER_LABELS_ARG=""
# if [ -n "${RUNNER_LABELS}" ]; then
# RUNNER_LABELS_ARG="--labels ${RUNNER_LABELS}"
# fi
# RUNNER_GROUP_ARG=""
# if [ -n "${RUNNER_GROUP}" ]; then
# RUNNER_GROUP_ARG="--runnergroup ${RUNNER_GROUP}"
# fi
# if [ -n "${K8S_HOST_IP}" ]; then
# export http_proxy=http://$K8S_HOST_IP:9090
# fi
# curl -v -s -X POST ${GITHUB_API_URL}/${orgs_or_repos}/${GITHUB_RUNNER_SCOPE}/actions/runners/registration-token -H "authorization: token $GITHUB_PAT" -H "accept: application/vnd.github.everest-preview+json"
# Generate registration token
# RUNNER_REG_TOKEN=$(curl -s -X POST ${GITHUB_API_URL}/${orgs_or_repos}/${GITHUB_RUNNER_SCOPE}/actions/runners/registration-token -H "authorization: token $GITHUB_PAT" -H "accept: application/vnd.github.everest-preview+json" | jq -r '.token')
# Create the runner and configure it
./config.sh --unattended --url $RUNNER_CONFIG_URL --pat $GITHUB_PAT --replace --ephemeral
# while (! docker version ); do
# # Docker takes a few seconds to initialize
# echo "Waiting for Docker to launch..."
# sleep 1
# done
# unset env
unset RUNNER_CONFIG_URL
unset GITHUB_PAT
# Run it
./run.sh

View File

@@ -1,39 +0,0 @@
@echo off
"%~dp0\bin\Runner.Listener.exe" run %*
rem using `if %ERRORLEVEL% EQU N` insterad of `if ERRORLEVEL N`
rem `if ERRORLEVEL N` means: error level is N or MORE
if %ERRORLEVEL% EQU 0 (
echo "Runner listener exit with 0 return code, stop the service, no retry needed."
exit /b 0
)
if %ERRORLEVEL% EQU 1 (
echo "Runner listener exit with terminated error, stop the service, no retry needed."
exit /b 0
)
if %ERRORLEVEL% EQU 2 (
echo "Runner listener exit with retryable error, re-launch runner in 5 seconds."
ping 127.0.0.1 -n 6 -w 1000 >NUL
exit /b 1
)
if %ERRORLEVEL% EQU 3 (
rem Sleep 5 seconds to wait for the runner update process finish
echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
ping 127.0.0.1 -n 6 -w 1000 >NUL
exit /b 1
)
if %ERRORLEVEL% EQU 4 (
rem Sleep 5 seconds to wait for the ephemeral runner update process finish
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
ping 127.0.0.1 -n 6 -w 1000 >NUL
exit /b 1
)
echo "Exiting after unknown error code: %ERRORLEVEL%"
exit /b 0

View File

@@ -1,46 +0,0 @@
#!/bin/bash
# Validate not sudo
user_id=`id -u`
if [ $user_id -eq 0 -a -z "$RUNNER_ALLOW_RUNASROOT" ]; then
echo "Must not run interactively with sudo"
exit 1
fi
# Run
shopt -s nocasematch
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
"$DIR"/bin/Runner.Listener run $*
returnCode=$?
if [[ $returnCode == 0 ]]; then
echo "Runner listener exit with 0 return code, stop the service, no retry needed."
exit 0
elif [[ $returnCode == 1 ]]; then
echo "Runner listener exit with terminated error, stop the service, no retry needed."
exit 0
elif [[ $returnCode == 2 ]]; then
echo "Runner listener exit with retryable error, re-launch runner in 5 seconds."
"$DIR"/safe_sleep.sh 5
exit 2
elif [[ $returnCode == 3 ]]; then
# Sleep 5 seconds to wait for the runner update process finish
echo "Runner listener exit because of updating, re-launch runner in 5 seconds"
"$DIR"/safe_sleep.sh 5
exit 2
elif [[ $returnCode == 4 ]]; then
# Sleep 5 seconds to wait for the ephemeral runner update process finish
echo "Runner listener exit because of updating, re-launch ephemeral runner in 5 seconds"
"$DIR"/safe_sleep.sh 5
exit 2
else
echo "Exiting with unknown error code: ${returnCode}"
exit 0
fi

View File

@@ -13,19 +13,21 @@ if defined VERBOSE_ARG (
rem Unblock files in the root of the layout folder. E.g. .cmd files.
powershell.exe -NoLogo -Sta -NoProfile -NonInteractive -ExecutionPolicy Unrestricted -Command "$VerbosePreference = %VERBOSE_ARG% ; Get-ChildItem -LiteralPath '%~dp0' | ForEach-Object { Write-Verbose ('Unblock: {0}' -f $_.FullName) ; $_ } | Unblock-File | Out-Null"
if /i "%~1" equ "localRun" (
rem ********************************************************************************
rem Local run.
rem ********************************************************************************
"%~dp0bin\Runner.Listener.exe" %*
) else (
rem ********************************************************************************
rem Run.
rem ********************************************************************************
"%~dp0bin\Runner.Listener.exe" run %*
:launch_helper
copy "%~dp0run-helper.cmd.template" "%~dp0run-helper.cmd" /Y
call "%~dp0run-helper.cmd" %*
if %ERRORLEVEL% EQU 1 (
echo "Restarting runner..."
goto :launch_helper
) else (
echo "Exiting runner..."
exit /b 0
rem Return code 4 means the run once runner received an update message.
rem Sleep 5 seconds to wait for the update process finish and run the runner again.
if ERRORLEVEL 4 (
timeout /t 5 /nobreak > NUL
"%~dp0bin\Runner.Listener.exe" run %*
)
)

View File

@@ -1,5 +1,12 @@
#!/bin/bash
# Validate not sudo
user_id=`id -u`
if [ $user_id -eq 0 -a -z "$RUNNER_ALLOW_RUNASROOT" ]; then
echo "Must not run interactively with sudo"
exit 1
fi
# Change directory to the script root directory
# https://stackoverflow.com/questions/59895/getting-the-source-directory-of-a-bash-script-from-within
SOURCE="${BASH_SOURCE[0]}"
@@ -9,16 +16,49 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
cp -f "$DIR"/run-helper.sh.template "$DIR"/run-helper.sh
# run the helper process which keep the listener alive
while :;
do
"$DIR"/run-helper.sh $*
returnCode=$?
if [[ $returnCode -eq 2 ]]; then
echo "Restarting runner..."
# Do not "cd $DIR". For localRun, the current directory is expected to be the repo location on disk.
# Run
shopt -s nocasematch
if [[ "$1" == "localRun" ]]; then
"$DIR"/bin/Runner.Listener $*
else
echo "Exiting runner..."
exit 0
fi
"$DIR"/bin/Runner.Listener run $*
# Return code 3 means the run once runner received an update message.
# Sleep 5 seconds to wait for the update process finish
returnCode=$?
if [[ $returnCode == 3 ]]; then
if [ ! -x "$(command -v sleep)" ]; then
if [ ! -x "$(command -v ping)" ]; then
COUNT="0"
while [[ $COUNT != 5000 ]]; do
echo "SLEEP" > /dev/null
COUNT=$[$COUNT+1]
done
else
ping -c 5 127.0.0.1 > /dev/null
fi
else
sleep 5
fi
elif [[ $returnCode == 4 ]]; then
if [ ! -x "$(command -v sleep)" ]; then
if [ ! -x "$(command -v ping)" ]; then
COUNT="0"
while [[ $COUNT != 5000 ]]; do
echo "SLEEP" > /dev/null
COUNT=$[$COUNT+1]
done
else
ping -c 5 127.0.0.1 > /dev/null
fi
else
sleep 5
fi
"$DIR"/bin/Runner.Listener run $*
else
exit $returnCode
fi
fi

View File

@@ -1,6 +0,0 @@
#!/bin/bash
SECONDS=0
while [[ $SECONDS != $1 ]]; do
:
done

View File

@@ -1,57 +0,0 @@
actions.runner.plist.template
actions.runner.service.template
checkScripts/downloadCert.js
checkScripts/makeWebRequest.js
darwin.svc.sh.template
hashFiles/index.js
installdependencies.sh
macos-run-invoker.js
Microsoft.IdentityModel.Logging.dll
Microsoft.IdentityModel.Tokens.dll
Minimatch.dll
Newtonsoft.Json.Bson.dll
Newtonsoft.Json.dll
Runner.Common.deps.json
Runner.Common.dll
Runner.Common.pdb
Runner.Listener
Runner.Listener.deps.json
Runner.Listener.dll
Runner.Listener.exe
Runner.Listener.pdb
Runner.Listener.runtimeconfig.json
Runner.PluginHost
Runner.PluginHost.deps.json
Runner.PluginHost.dll
Runner.PluginHost.exe
Runner.PluginHost.pdb
Runner.PluginHost.runtimeconfig.json
Runner.Plugins.deps.json
Runner.Plugins.dll
Runner.Plugins.pdb
Runner.Sdk.deps.json
Runner.Sdk.dll
Runner.Sdk.pdb
Runner.Worker
Runner.Worker.deps.json
Runner.Worker.dll
Runner.Worker.exe
Runner.Worker.pdb
Runner.Worker.runtimeconfig.json
RunnerService.exe
RunnerService.exe.config
RunnerService.js
RunnerService.pdb
runsvc.sh
Sdk.deps.json
Sdk.dll
Sdk.pdb
System.IdentityModel.Tokens.Jwt.dll
System.Net.Http.Formatting.dll
System.Security.Cryptography.Pkcs.dll
System.Security.Cryptography.ProtectedData.dll
System.ServiceProcess.ServiceController.dll
systemd.svc.sh.template
update.cmd.template
update.sh.template
YamlDotNet.dll

View File

@@ -1,263 +0,0 @@
api-ms-win-core-console-l1-1-0.dll
api-ms-win-core-console-l1-2-0.dll
api-ms-win-core-datetime-l1-1-0.dll
api-ms-win-core-debug-l1-1-0.dll
api-ms-win-core-errorhandling-l1-1-0.dll
api-ms-win-core-file-l1-1-0.dll
api-ms-win-core-file-l1-2-0.dll
api-ms-win-core-file-l2-1-0.dll
api-ms-win-core-handle-l1-1-0.dll
api-ms-win-core-heap-l1-1-0.dll
api-ms-win-core-interlocked-l1-1-0.dll
api-ms-win-core-libraryloader-l1-1-0.dll
api-ms-win-core-localization-l1-2-0.dll
api-ms-win-core-memory-l1-1-0.dll
api-ms-win-core-namedpipe-l1-1-0.dll
api-ms-win-core-processenvironment-l1-1-0.dll
api-ms-win-core-processthreads-l1-1-0.dll
api-ms-win-core-processthreads-l1-1-1.dll
api-ms-win-core-profile-l1-1-0.dll
api-ms-win-core-rtlsupport-l1-1-0.dll
api-ms-win-core-string-l1-1-0.dll
api-ms-win-core-synch-l1-1-0.dll
api-ms-win-core-synch-l1-2-0.dll
api-ms-win-core-sysinfo-l1-1-0.dll
api-ms-win-core-timezone-l1-1-0.dll
api-ms-win-core-util-l1-1-0.dll
api-ms-win-crt-conio-l1-1-0.dll
api-ms-win-crt-convert-l1-1-0.dll
api-ms-win-crt-environment-l1-1-0.dll
api-ms-win-crt-filesystem-l1-1-0.dll
api-ms-win-crt-heap-l1-1-0.dll
api-ms-win-crt-locale-l1-1-0.dll
api-ms-win-crt-math-l1-1-0.dll
api-ms-win-crt-multibyte-l1-1-0.dll
api-ms-win-crt-private-l1-1-0.dll
api-ms-win-crt-process-l1-1-0.dll
api-ms-win-crt-runtime-l1-1-0.dll
api-ms-win-crt-stdio-l1-1-0.dll
api-ms-win-crt-string-l1-1-0.dll
api-ms-win-crt-time-l1-1-0.dll
api-ms-win-crt-utility-l1-1-0.dll
clrcompression.dll
clretwrc.dll
clrjit.dll
coreclr.dll
createdump
createdump.exe
dbgshim.dll
hostfxr.dll
hostpolicy.dll
libclrjit.dylib
libclrjit.so
libcoreclr.dylib
libcoreclr.so
libcoreclrtraceptprovider.so
libdbgshim.dylib
libdbgshim.so
libhostfxr.dylib
libhostfxr.so
libhostpolicy.dylib
libhostpolicy.so
libmscordaccore.dylib
libmscordaccore.so
libmscordbi.dylib
libmscordbi.so
Microsoft.CSharp.dll
Microsoft.DiaSymReader.Native.amd64.dll
Microsoft.VisualBasic.Core.dll
Microsoft.VisualBasic.dll
Microsoft.Win32.Primitives.dll
Microsoft.Win32.Registry.dll
mscordaccore.dll
mscordaccore_amd64_amd64_6.0.21.52210.dll
mscordbi.dll
mscorlib.dll
mscorrc.debug.dll
mscorrc.dll
msquic.dll
netstandard.dll
SOS_README.md
System.AppContext.dll
System.Buffers.dll
System.Collections.Concurrent.dll
System.Collections.dll
System.Collections.Immutable.dll
System.Collections.NonGeneric.dll
System.Collections.Specialized.dll
System.ComponentModel.Annotations.dll
System.ComponentModel.DataAnnotations.dll
System.ComponentModel.dll
System.ComponentModel.EventBasedAsync.dll
System.ComponentModel.Primitives.dll
System.ComponentModel.TypeConverter.dll
System.Configuration.dll
System.Console.dll
System.Core.dll
System.Data.Common.dll
System.Data.DataSetExtensions.dll
System.Data.dll
System.Diagnostics.Contracts.dll
System.Diagnostics.Debug.dll
System.Diagnostics.DiagnosticSource.dll
System.Diagnostics.FileVersionInfo.dll
System.Diagnostics.Process.dll
System.Diagnostics.StackTrace.dll
System.Diagnostics.TextWriterTraceListener.dll
System.Diagnostics.Tools.dll
System.Diagnostics.TraceSource.dll
System.Diagnostics.Tracing.dll
System.dll
System.Drawing.dll
System.Drawing.Primitives.dll
System.Dynamic.Runtime.dll
System.Formats.Asn1.dll
System.Globalization.Calendars.dll
System.Globalization.dll
System.Globalization.Extensions.dll
System.Globalization.Native.dylib
System.Globalization.Native.so
System.IO.Compression.Brotli.dll
System.IO.Compression.dll
System.IO.Compression.FileSystem.dll
System.IO.Compression.Native.a
System.IO.Compression.Native.dll
System.IO.Compression.Native.dylib
System.IO.Compression.Native.so
System.IO.Compression.ZipFile.dll
System.IO.dll
System.IO.FileSystem.AccessControl.dll
System.IO.FileSystem.dll
System.IO.FileSystem.DriveInfo.dll
System.IO.FileSystem.Primitives.dll
System.IO.FileSystem.Watcher.dll
System.IO.IsolatedStorage.dll
System.IO.MemoryMappedFiles.dll
System.IO.Pipes.AccessControl.dll
System.IO.Pipes.dll
System.IO.UnmanagedMemoryStream.dll
System.Linq.dll
System.Linq.Expressions.dll
System.Linq.Parallel.dll
System.Linq.Queryable.dll
System.Memory.dll
System.Native.a
System.Native.dylib
System.Native.so
System.Net.dll
System.Net.Http.dll
System.Net.Http.Json.dll
System.Net.Http.Native.a
System.Net.Http.Native.dylib
System.Net.Http.Native.so
System.Net.HttpListener.dll
System.Net.Mail.dll
System.Net.NameResolution.dll
System.Net.NetworkInformation.dll
System.Net.Ping.dll
System.Net.Primitives.dll
System.Net.Quic.dll
System.Net.Requests.dll
System.Net.Security.dll
System.Net.Security.Native.a
System.Net.Security.Native.dylib
System.Net.Security.Native.so
System.Net.ServicePoint.dll
System.Net.Sockets.dll
System.Net.WebClient.dll
System.Net.WebHeaderCollection.dll
System.Net.WebProxy.dll
System.Net.WebSockets.Client.dll
System.Net.WebSockets.dll
System.Numerics.dll
System.Numerics.Vectors.dll
System.ObjectModel.dll
System.Private.CoreLib.dll
System.Private.DataContractSerialization.dll
System.Private.Uri.dll
System.Private.Xml.dll
System.Private.Xml.Linq.dll
System.Reflection.DispatchProxy.dll
System.Reflection.dll
System.Reflection.Emit.dll
System.Reflection.Emit.ILGeneration.dll
System.Reflection.Emit.Lightweight.dll
System.Reflection.Extensions.dll
System.Reflection.Metadata.dll
System.Reflection.Primitives.dll
System.Reflection.TypeExtensions.dll
System.Resources.Reader.dll
System.Resources.ResourceManager.dll
System.Resources.Writer.dll
System.Runtime.CompilerServices.Unsafe.dll
System.Runtime.CompilerServices.VisualC.dll
System.Runtime.dll
System.Runtime.Extensions.dll
System.Runtime.Handles.dll
System.Runtime.InteropServices.dll
System.Runtime.InteropServices.RuntimeInformation.dll
System.Runtime.InteropServices.WindowsRuntime.dll
System.Runtime.Intrinsics.dll
System.Runtime.Loader.dll
System.Runtime.Numerics.dll
System.Runtime.Serialization.dll
System.Runtime.Serialization.Formatters.dll
System.Runtime.Serialization.Json.dll
System.Runtime.Serialization.Primitives.dll
System.Runtime.Serialization.Xml.dll
System.Runtime.WindowsRuntime.dll
System.Runtime.WindowsRuntime.UI.Xaml.dll
System.Security.AccessControl.dll
System.Security.Claims.dll
System.Security.Cryptography.Algorithms.dll
System.Security.Cryptography.Cng.dll
System.Security.Cryptography.Csp.dll
System.Security.Cryptography.Encoding.dll
System.Security.Cryptography.Native.Apple.a
System.Security.Cryptography.Native.Apple.dylib
System.Security.Cryptography.Native.OpenSsl.a
System.Security.Cryptography.Native.OpenSsl.dylib
System.Security.Cryptography.Native.OpenSsl.so
System.Security.Cryptography.OpenSsl.dll
System.Security.Cryptography.Primitives.dll
System.Security.Cryptography.X509Certificates.dll
System.Security.Cryptography.XCertificates.dll
System.Security.dll
System.Security.Principal.dll
System.Security.Principal.Windows.dll
System.Security.SecureString.dll
System.ServiceModel.Web.dll
System.ServiceProcess.dll
System.Text.Encoding.CodePages.dll
System.Text.Encoding.dll
System.Text.Encoding.Extensions.dll
System.Text.Encodings.Web.dll
System.Text.Json.dll
System.Text.RegularExpressions.dll
System.Threading.Channels.dll
System.Threading.dll
System.Threading.Overlapped.dll
System.Threading.Tasks.Dataflow.dll
System.Threading.Tasks.dll
System.Threading.Tasks.Extensions.dll
System.Threading.Tasks.Parallel.dll
System.Threading.Thread.dll
System.Threading.ThreadPool.dll
System.Threading.Timer.dll
System.Transactions.dll
System.Transactions.Local.dll
System.ValueTuple.dll
System.Web.dll
System.Web.HttpUtility.dll
System.Windows.dll
System.Xml.dll
System.Xml.Linq.dll
System.Xml.ReaderWriter.dll
System.Xml.Serialization.dll
System.Xml.XDocument.dll
System.Xml.XmlDocument.dll
System.Xml.XmlSerializer.dll
System.Xml.XPath.dll
System.Xml.XPath.XDocument.dll
ucrtbase.dll
WindowsBase.dll

View File

@@ -1,24 +0,0 @@
[
{
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.tar.gz",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>",
"externals": "<EXTERNALS_HASH>"
}
},
{
"HashValue": "<NO_RUNTIME_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.tar.gz",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>"
}
},
{
"HashValue": "<NO_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.tar.gz",
"TrimmedContents": {
"externals": "<EXTERNALS_HASH>"
}
}
]

View File

@@ -1,24 +0,0 @@
[
{
"HashValue": "<NO_RUNTIME_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime-noexternals.zip",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>",
"externals": "<EXTERNALS_HASH>"
}
},
{
"HashValue": "<NO_RUNTIME_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noruntime.zip",
"TrimmedContents": {
"dotnetRuntime": "<RUNTIME_HASH>"
}
},
{
"HashValue": "<NO_EXTERNALS_HASH>",
"DownloadUrl": "https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-<RUNNER_PLATFORM>-<RUNNER_VERSION>-noexternals.zip",
"TrimmedContents": {
"externals": "<EXTERNALS_HASH>"
}
}
]

View File

@@ -33,9 +33,6 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)]
public string PoolName { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool DisableUpdate { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool Ephemeral { get; set; }

View File

@@ -129,7 +129,6 @@ namespace GitHub.Runner.Common
public static readonly string Ephemeral = "ephemeral";
public static readonly string Help = "help";
public static readonly string Replace = "replace";
public static readonly string DisableUpdate = "disableupdate";
public static readonly string Once = "once"; // Keep this around since customers still relies on it
public static readonly string RunAsService = "runasservice";
public static readonly string Unattended = "unattended";
@@ -156,8 +155,7 @@ namespace GitHub.Runner.Common
public static readonly string LowDiskSpace = "LOW_DISK_SPACE";
public static readonly string UnsupportedCommand = "UNSUPPORTED_COMMAND";
public static readonly string UnsupportedCommandMessageDisabled = "The `{0}` command is disabled. Please upgrade to using Environment Files or opt into unsecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_COMMANDS` environment variable to `true`. For more information see: https://github.blog/changelog/2020-10-01-github-actions-deprecating-set-env-and-add-path-commands/";
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
public static readonly string UnsupportedSummarySize = "$GITHUB_STEP_SUMMARY upload aborted, supports content up to a size of {0}k, got {1}k. For more information see: https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-markdown-summary";
public static readonly string UnsupportedStopCommandTokenDisabled = "You cannot use a endToken that is an empty string, the string 'pause-logging', or another workflow command. For more information see: https://docs.github.com/en/actions/learn-github-actions/workflow-commands-for-github-actions#example-stopping-and-starting-workflow-commands or opt into insecure command execution by setting the `ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS` environment variable to `true`.";
}
public static class RunnerEvent
@@ -189,12 +187,6 @@ namespace GitHub.Runner.Common
public static readonly string Success = "success";
}
public static class Hooks
{
public static readonly string JobStartedStepName = "Set up runner";
public static readonly string JobCompletedStepName = "Complete runner";
}
public static class Path
{
public static readonly string ActionsDirectory = "_actions";
@@ -226,15 +218,11 @@ namespace GitHub.Runner.Common
public static readonly string AllowUnsupportedStopCommandTokens = "ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS";
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
public static readonly string AllowActionsUseUnsecureNodeVersion = "ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION";
}
public static class Agent
{
public static readonly string ToolsDirectory = "agent.ToolsDirectory";
// Set this env var to "node12" to downgrade the node version for internal functions (e.g hashfiles). This does NOT affect the version of node actions.
public static readonly string ForcedInternalNodeVersion = "ACTIONS_RUNNER_FORCED_INTERNAL_NODE_VERSION";
}
public static class System

View File

@@ -60,7 +60,6 @@ namespace GitHub.Runner.Common
case "GitHub.Runner.Worker.IFileCommandExtension":
Add<T>(extensions, "GitHub.Runner.Worker.AddPathFileCommand, Runner.Worker");
Add<T>(extensions, "GitHub.Runner.Worker.SetEnvFileCommand, Runner.Worker");
Add<T>(extensions, "GitHub.Runner.Worker.CreateStepSummaryCommand, Runner.Worker");
break;
case "GitHub.Runner.Listener.Check.ICheckExtension":
Add<T>(extensions, "GitHub.Runner.Listener.Check.InternetCheck, Runner.Listener");

View File

@@ -98,14 +98,14 @@ namespace GitHub.Runner.Common
{
int logPageSize;
string logSizeEnv = Environment.GetEnvironmentVariable($"{hostType.ToUpperInvariant()}_LOGSIZE");
if (string.IsNullOrEmpty(logSizeEnv) || !int.TryParse(logSizeEnv, out logPageSize))
if (!string.IsNullOrEmpty(logSizeEnv) || !int.TryParse(logSizeEnv, out logPageSize))
{
logPageSize = _defaultLogPageSize;
}
int logRetentionDays;
string logRetentionDaysEnv = Environment.GetEnvironmentVariable($"{hostType.ToUpperInvariant()}_LOGRETENTION");
if (string.IsNullOrEmpty(logRetentionDaysEnv) || !int.TryParse(logRetentionDaysEnv, out logRetentionDays))
if (!string.IsNullOrEmpty(logRetentionDaysEnv) || !int.TryParse(logRetentionDaysEnv, out logRetentionDays))
{
logRetentionDays = _defaultLogRetentionDays;
}
@@ -193,11 +193,6 @@ namespace GitHub.Runner.Common
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
}
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
{
_trace.Warning($"Runner is running under insecure mode: HTTPS server certifcate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
}
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
if (File.Exists(credFile))
{
@@ -205,19 +200,9 @@ namespace GitHub.Runner.Common
if (credData != null &&
credData.Data.TryGetValue("clientId", out var clientId))
{
_userAgents.Add(new ProductInfoHeaderValue("ClientId", clientId));
_userAgents.Add(new ProductInfoHeaderValue($"RunnerId", clientId));
}
}
var runnerFile = GetConfigFile(WellKnownConfigFile.Runner);
if (File.Exists(runnerFile))
{
var runnerSettings = IOUtil.LoadObject<RunnerSettings>(runnerFile);
_userAgents.Add(new ProductInfoHeaderValue("RunnerId", runnerSettings.AgentId.ToString(CultureInfo.InvariantCulture)));
_userAgents.Add(new ProductInfoHeaderValue("GroupId", runnerSettings.PoolId.ToString(CultureInfo.InvariantCulture)));
}
_userAgents.Add(new ProductInfoHeaderValue("CommitSHA", BuildConstants.Source.CommitHash));
}
public string GetDirectory(WellKnownDirectory directory)

View File

@@ -1,4 +1,3 @@
using System;
using System.Net.Http;
using GitHub.Runner.Sdk;
@@ -14,14 +13,7 @@ namespace GitHub.Runner.Common
{
public HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy)
{
var client = new HttpClientHandler() { Proxy = webProxy };
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
{
client.ServerCertificateCustomValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
}
return client;
return new HttpClientHandler() { Proxy = webProxy };
}
}
}

View File

@@ -1,36 +1,32 @@
using GitHub.DistributedTask.WebApi;
using GitHub.DistributedTask.WebApi;
using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Net.WebSockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.WebApi;
using Newtonsoft.Json;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(JobServer))]
public interface IJobServer : IRunnerService, IAsyncDisposable
public interface IJobServer : IRunnerService
{
Task ConnectAsync(VssConnection jobConnection);
void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint);
// logging and console
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken);
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken);
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken);
}
public sealed class JobServer : RunnerService, IJobServer
@@ -38,26 +34,11 @@ namespace GitHub.Runner.Common
private bool _hasConnection;
private VssConnection _connection;
private TaskHttpClient _taskClient;
private ClientWebSocket _websocketClient;
private ServiceEndpoint _serviceEndpoint;
private int totalBatchedLinesAttemptedByWebsocket = 0;
private int failedAttemptsToPostBatchedLinesByWebsocket = 0;
private static readonly TimeSpan _minDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(100);
private static readonly TimeSpan _maxDelayForWebsocketReconnect = TimeSpan.FromMilliseconds(500);
private static readonly int _minWebsocketFailurePercentageAllowed = 50;
private static readonly int _minWebsocketBatchedLinesCountToConsider = 5;
private Task _websocketConnectTask;
public async Task ConnectAsync(VssConnection jobConnection)
{
_connection = jobConnection;
int totalAttempts = 5;
int attemptCount = totalAttempts;
int attemptCount = 5;
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
@@ -75,21 +56,18 @@ namespace GitHub.Runner.Common
if (runnerSettings.IsHostedServer)
{
await CheckNetworkEndpointsAsync(attemptCount);
await CheckNetworkEndpointsAsync();
}
}
int attempt = totalAttempts - attemptCount;
TimeSpan backoff = BackoffTimerHelper.GetExponentialBackoff(attempt, TimeSpan.FromMilliseconds(100), TimeSpan.FromSeconds(3.2), TimeSpan.FromMilliseconds(100));
await Task.Delay(backoff);
await Task.Delay(100);
}
_taskClient = _connection.GetClient<TaskHttpClient>();
_hasConnection = true;
}
private async Task CheckNetworkEndpointsAsync(int attemptsLeft)
private async Task CheckNetworkEndpointsAsync()
{
try
{
@@ -101,8 +79,8 @@ namespace GitHub.Runner.Common
actionsClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
// Call the _apis/health endpoint, and include how many attempts are left as a URL query for easy tracking
var response = await actionsClient.GetAsync(new Uri(baseUri, $"_apis/health?_internalRunnerAttemptsLeft={attemptsLeft}"));
// Call the _apis/health endpoint
var response = await actionsClient.GetAsync(new Uri(baseUri, "_apis/health"));
Trace.Info($"Actions health status code: {response.StatusCode}");
}
}
@@ -122,8 +100,8 @@ namespace GitHub.Runner.Common
{
gitHubClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
// Call the api.github.com endpoint, and include how many attempts are left as a URL query for easy tracking
var response = await gitHubClient.GetAsync($"https://api.github.com?_internalRunnerAttemptsLeft={attemptsLeft}");
// Call the api.github.com endpoint
var response = await gitHubClient.GetAsync("https://api.github.com");
Trace.Info($"api.github.com status code: {response.StatusCode}");
}
}
@@ -135,19 +113,6 @@ namespace GitHub.Runner.Common
}
}
public void InitializeWebsocketClient(ServiceEndpoint serviceEndpoint)
{
this._serviceEndpoint = serviceEndpoint;
InitializeWebsocketClient(TimeSpan.Zero);
}
public ValueTask DisposeAsync()
{
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, "Shutdown", CancellationToken.None);
GC.SuppressFinalize(this);
return ValueTask.CompletedTask;
}
private void CheckConnection()
{
if (!_hasConnection)
@@ -156,48 +121,6 @@ namespace GitHub.Runner.Common
}
}
private void InitializeWebsocketClient(TimeSpan delay)
{
if (_serviceEndpoint.Authorization != null &&
_serviceEndpoint.Authorization.Parameters.TryGetValue(EndpointAuthorizationParameters.AccessToken, out var accessToken) &&
!string.IsNullOrEmpty(accessToken))
{
if (_serviceEndpoint.Data.TryGetValue("FeedStreamUrl", out var feedStreamUrl) && !string.IsNullOrEmpty(feedStreamUrl))
{
// let's ensure we use the right scheme
feedStreamUrl = feedStreamUrl.Replace("https://", "wss://").Replace("http://", "ws://");
Trace.Info($"Creating websocket client ..." + feedStreamUrl);
this._websocketClient = new ClientWebSocket();
this._websocketClient.Options.SetRequestHeader("Authorization", $"Bearer {accessToken}");
this._websocketConnectTask = ConnectWebSocketClient(feedStreamUrl, delay);
}
else
{
Trace.Info($"No FeedStreamUrl found, so we will use Rest API calls for sending feed data");
}
}
else
{
Trace.Info($"No access token from the service endpoint");
}
}
private async Task ConnectWebSocketClient(string feedStreamUrl, TimeSpan delay)
{
try
{
Trace.Info($"Attempting to start websocket client with delay {delay}.");
await Task.Delay(delay);
await this._websocketClient.ConnectAsync(new Uri(feedStreamUrl), default(CancellationToken));
Trace.Info($"Successfully started websocket client.");
}
catch(Exception ex)
{
Trace.Info("Exception caught during websocket client connect, fallback of HTTP would be used now instead of websocket.");
Trace.Error(ex);
}
}
//-----------------------------------------------------------------
// Feedback: WebConsole, TimelineRecords and Logs
//-----------------------------------------------------------------
@@ -208,72 +131,16 @@ namespace GitHub.Runner.Common
return _taskClient.AppendLogContentAsync(scopeIdentifier, hubName, planId, logId, uploadStream, cancellationToken: cancellationToken);
}
public async Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long? startLine, CancellationToken cancellationToken)
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken)
{
CheckConnection();
var pushedLinesViaWebsocket = false;
if (_websocketConnectTask != null)
{
await _websocketConnectTask;
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
}
// "_websocketClient != null" implies either: We have a successful connection OR we have to attempt sending again and then reconnect
// ...in other words, if websocket client is null, we will skip sending to websocket and just use rest api calls to send data
if (_websocketClient != null)
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken)
{
var linesWrapper = startLine.HasValue? new TimelineRecordFeedLinesWrapper(stepId, lines, startLine.Value): new TimelineRecordFeedLinesWrapper(stepId, lines);
var jsonData = StringUtil.ConvertToJson(linesWrapper);
try
{
totalBatchedLinesAttemptedByWebsocket++;
var jsonDataBytes = Encoding.UTF8.GetBytes(jsonData);
// break the message into chunks of 1024 bytes
for (var i = 0; i < jsonDataBytes.Length; i += 1 * 1024)
{
var lastChunk = i + (1 * 1024) >= jsonDataBytes.Length;
var chunk = new ArraySegment<byte>(jsonDataBytes, i, Math.Min(1 * 1024, jsonDataBytes.Length - i));
await _websocketClient.SendAsync(chunk, WebSocketMessageType.Text, endOfMessage:lastChunk, cancellationToken);
}
pushedLinesViaWebsocket = true;
}
catch (Exception ex)
{
failedAttemptsToPostBatchedLinesByWebsocket++;
Trace.Info($"Caught exception during append web console line to websocket, let's fallback to sending via non-websocket call (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}, websocket state: {this._websocketClient?.State}).");
Trace.Error(ex);
if (totalBatchedLinesAttemptedByWebsocket > _minWebsocketBatchedLinesCountToConsider)
{
// let's consider failure percentage
if (failedAttemptsToPostBatchedLinesByWebsocket * 100 / totalBatchedLinesAttemptedByWebsocket > _minWebsocketFailurePercentageAllowed)
{
Trace.Info($"Exhausted websocket allowed retries, we will not attempt websocket connection for this job to post lines again.");
_websocketClient?.CloseOutputAsync(WebSocketCloseStatus.InternalServerError, "Shutdown due to failures", cancellationToken);
// By setting it to null, we will ensure that we never try websocket path again for this job
_websocketClient = null;
}
}
if (_websocketClient != null)
{
var delay = BackoffTimerHelper.GetRandomBackoff(_minDelayForWebsocketReconnect, _maxDelayForWebsocketReconnect);
Trace.Info($"Websocket is not open, let's attempt to connect back again with random backoff {delay} ms (total calls: {totalBatchedLinesAttemptedByWebsocket}, failed calls: {failedAttemptsToPostBatchedLinesByWebsocket}).");
InitializeWebsocketClient(delay);
}
}
}
if (!pushedLinesViaWebsocket)
{
if (startLine.HasValue)
{
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine.Value, cancellationToken: cancellationToken);
}
else
{
await _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
}
}
CheckConnection();
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine, cancellationToken: cancellationToken);
}
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)
@@ -315,10 +182,10 @@ namespace GitHub.Runner.Common
//-----------------------------------------------------------------
// Action download info
//-----------------------------------------------------------------
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken)
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken)
{
CheckConnection();
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, jobId, actions, cancellationToken: cancellationToken);
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, actions, cancellationToken: cancellationToken);
}
}
}

View File

@@ -1,13 +1,14 @@
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Concurrent;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
{
@@ -75,7 +76,6 @@ namespace GitHub.Runner.Common
// at the same time we can cut the load to server after the build run for more than 60s
private int _webConsoleLineAggressiveDequeueCount = 0;
private const int _webConsoleLineAggressiveDequeueLimit = 4 * 60;
private const int _webConsoleLineQueueSizeLimit = 1024;
private bool _webConsoleLineAggressiveDequeue = true;
private bool _firstConsoleOutputs = true;
@@ -89,10 +89,6 @@ namespace GitHub.Runner.Common
{
Trace.Entering();
var serviceEndPoint = jobRequest.Resources.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
_jobServer.InitializeWebsocketClient(serviceEndPoint);
if (_queueInProcess)
{
Trace.Info("No-opt, all queue process tasks are running.");
@@ -160,29 +156,14 @@ namespace GitHub.Runner.Common
await ProcessTimelinesUpdateQueueAsync(runOnce: true);
Trace.Info("Timeline update queue drained.");
Trace.Info($"Disposing job server ...");
await _jobServer.DisposeAsync();
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
}
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
{
// We only process 500 lines of the queue everytime.
// If the queue is backing up due to slow Http request or flood of output from step,
// we will drop the output to avoid extra memory consumption from the runner since the live console feed is best effort.
if (!string.IsNullOrEmpty(line) && _webConsoleLineQueue.Count < _webConsoleLineQueueSizeLimit)
{
Trace.Verbose("Enqueue web console line queue: {0}", line);
if (line.Length > 1024)
{
Trace.Verbose("Web console line is more than 1024 chars, truncate to first 1024 chars");
line = $"{line.Substring(0, 1024)}...";
}
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line, lineNumber));
}
}
public void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource)
{
@@ -249,6 +230,12 @@ namespace GitHub.Runner.Common
stepRecordIds.Add(lineInfo.StepRecordId);
}
if (!string.IsNullOrEmpty(lineInfo.Line) && lineInfo.Line.Length > 1024)
{
Trace.Verbose("Web console line is more than 1024 chars, truncate to first 1024 chars");
lineInfo.Line = $"{lineInfo.Line.Substring(0, 1024)}...";
}
stepsConsoleLines[lineInfo.StepRecordId].Add(new TimelineRecordLogLine(lineInfo.Line, lineInfo.LineNumber));
linesCounter++;
@@ -299,7 +286,15 @@ namespace GitHub.Runner.Common
{
try
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber, default(CancellationToken));
// we will not requeue failed batch, since the web console lines are time sensitive.
if (batch[0].LineNumber.HasValue)
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber.Value, default(CancellationToken));
}
else
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), default(CancellationToken));
}
if (_firstConsoleOutputs)
{

View File

@@ -1,12 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net6.0</TargetFramework>
<TargetFramework>netcoreapp3.1</TargetFramework>
<OutputType>Library</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>
<AssetTargetFallback>portable-net45+win8</AssetTargetFallback>
<NoWarn>NU1701;NU1603</NoWarn>
<Version>$(Version)</Version>
<TieredCompilationQuickJit>true</TieredCompilationQuickJit>
</PropertyGroup>
<ItemGroup>

View File

@@ -51,7 +51,7 @@ namespace GitHub.Runner.Common
Task<PackageMetadata> GetPackageAsync(string packageType, string platform, string version, bool includeToken, CancellationToken cancellationToken);
// agent update
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState, string trace);
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState);
}
public sealed class RunnerServer : RunnerService, IRunnerServer
@@ -341,10 +341,25 @@ namespace GitHub.Runner.Common
return _genericTaskAgentClient.GetPackageAsync(packageType, platform, version, includeToken, cancellationToken: cancellationToken);
}
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState, string trace)
public Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState)
{
CheckConnection(RunnerConnectionType.Generic);
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState, trace);
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState);
}
//-----------------------------------------------------------------
// Runner Auth Url
//-----------------------------------------------------------------
public Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId)
{
CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.GetAgentAuthUrlAsync(runnerPoolId, runnerId);
}
public Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error)
{
CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.ReportAgentAuthUrlMigrationErrorAsync(runnerPoolId, runnerId, error);
}
}
}

View File

@@ -1,16 +0,0 @@
using System;
using System.Collections.ObjectModel;
namespace GitHub.Runner.Common.Util
{
public static class NodeUtil
{
private const string _defaultNodeVersion = "node16";
public static readonly ReadOnlyCollection<string> BuiltInNodeVersions = new(new[] {"node12", "node16"});
public static string GetInternalNodeVersion()
{
var forcedNodeVersion = Environment.GetEnvironmentVariable(Constants.Variables.Agent.ForcedInternalNodeVersion);
return !string.IsNullOrEmpty(forcedNodeVersion) && BuiltInNodeVersions.Contains(forcedNodeVersion) ? forcedNodeVersion : _defaultNodeVersion;
}
}
}

View File

@@ -10,7 +10,6 @@ using System.Net.NetworkInformation;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
@@ -315,12 +314,12 @@ namespace GitHub.Runner.Listener.Check
});
var downloadCertScript = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "downloadCert");
var node = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{downloadCertScript}\"' ");
var node12 = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{downloadCertScript}\"' ");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
await processInvoker.ExecuteAsync(
hostContext.GetDirectory(WellKnownDirectory.Root),
node,
node12,
$"\"{downloadCertScript}\"",
env,
true,

View File

@@ -6,7 +6,6 @@ using System.Net;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Listener.Check
@@ -145,12 +144,12 @@ namespace GitHub.Runner.Listener.Check
});
var makeWebRequestScript = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "checkScripts", "makeWebRequest.js");
var node = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), NodeUtil.GetInternalNodeVersion(), "bin", $"node{IOUtil.ExeExtension}");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node} \"{makeWebRequestScript}\"' ");
var node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} Run '{node12} \"{makeWebRequestScript}\"' ");
result.Logs.Add($"{DateTime.UtcNow.ToString("O")} {StringUtil.ConvertToJson(env)}");
await processInvoker.ExecuteAsync(
HostContext.GetDirectory(WellKnownDirectory.Root),
node,
node12,
$"\"{makeWebRequestScript}\"",
env,
true,

View File

@@ -29,7 +29,6 @@ namespace GitHub.Runner.Listener
{
Constants.Runner.CommandLine.Flags.Check,
Constants.Runner.CommandLine.Flags.Commit,
Constants.Runner.CommandLine.Flags.DisableUpdate,
Constants.Runner.CommandLine.Flags.Ephemeral,
Constants.Runner.CommandLine.Flags.Help,
Constants.Runner.CommandLine.Flags.Once,
@@ -69,7 +68,6 @@ namespace GitHub.Runner.Listener
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral);
public bool DisableUpdate => TestFlag(Constants.Runner.CommandLine.Flags.DisableUpdate);
// Keep this around since customers still relies on it
public bool RunOnce => TestFlag(Constants.Runner.CommandLine.Flags.Once);
@@ -245,7 +243,6 @@ namespace GitHub.Runner.Listener
validator: Validators.ServerUrlValidator);
}
#if OS_WINDOWS
public string GetWindowsLogonAccount(string defaultValue, string descriptionMsg)
{
return GetArgOrPrompt(
@@ -263,7 +260,7 @@ namespace GitHub.Runner.Listener
defaultValue: string.Empty,
validator: Validators.NonEmptyValidator);
}
#endif
public string GetWork()
{
return GetArgOrPrompt(

View File

@@ -54,7 +54,7 @@ namespace GitHub.Runner.Listener.Configuration
Trace.Info(nameof(LoadSettings));
if (!IsConfigured())
{
throw new NonRetryableException("Not configured. Run config.(sh/cmd) to configure the runner.");
throw new InvalidOperationException("Not configured. Run config.(sh/cmd) to configure the runner.");
}
RunnerSettings settings = _store.GetSettings();
@@ -196,7 +196,6 @@ namespace GitHub.Runner.Listener.Configuration
TaskAgent agent;
while (true)
{
runnerSettings.DisableUpdate = command.DisableUpdate;
runnerSettings.Ephemeral = command.Ephemeral;
runnerSettings.AgentName = command.GetRunnerName();
@@ -214,22 +213,11 @@ namespace GitHub.Runner.Listener.Configuration
if (command.GetReplace())
{
// Update existing agent with new PublicKey, agent version.
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral);
try
{
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
if (command.DisableUpdate &&
command.DisableUpdate != agent.DisableUpdate)
{
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'DisableUpdate' flag.");
}
if (command.Ephemeral &&
command.Ephemeral != agent.Ephemeral)
{
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'Ephemeral' flag.");
}
_term.WriteSuccessMessage("Successfully replaced the runner");
break;
}
@@ -248,22 +236,11 @@ namespace GitHub.Runner.Listener.Configuration
else
{
// Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate);
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral);
try
{
agent = await _runnerServer.AddAgentAsync(runnerSettings.PoolId, agent);
if (command.DisableUpdate &&
command.DisableUpdate != agent.DisableUpdate)
{
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'DisableUpdate' flag.");
}
if (command.Ephemeral &&
command.Ephemeral != agent.Ephemeral)
{
throw new NotSupportedException("The GitHub server does not support configuring a self-hosted runner with 'Ephemeral' flag.");
}
_term.WriteSuccessMessage("Runner successfully added");
break;
}
@@ -489,7 +466,7 @@ namespace GitHub.Runner.Listener.Configuration
}
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral)
{
ArgUtil.NotNull(agent, nameof(agent));
agent.Authorization = new TaskAgentAuthorization
@@ -501,7 +478,6 @@ namespace GitHub.Runner.Listener.Configuration
agent.Version = BuildConstants.RunnerPackage.Version;
agent.OSDescription = RuntimeInformation.OSDescription;
agent.Ephemeral = ephemeral;
agent.DisableUpdate = disableUpdate;
agent.MaxParallelism = 1;
agent.Labels.Clear();
@@ -518,7 +494,7 @@ namespace GitHub.Runner.Listener.Configuration
return agent;
}
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral, bool disableUpdate)
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral)
{
TaskAgent agent = new TaskAgent(agentName)
{
@@ -530,7 +506,6 @@ namespace GitHub.Runner.Listener.Configuration
Version = BuildConstants.RunnerPackage.Version,
OSDescription = RuntimeInformation.OSDescription,
Ephemeral = ephemeral,
DisableUpdate = disableUpdate
};
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
@@ -613,9 +588,6 @@ namespace GitHub.Runner.Listener.Configuration
throw new ArgumentException($"'{githubUrl}' should point to an org or repository.");
}
int retryCount = 0;
while(retryCount < 3)
{
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
@@ -624,8 +596,7 @@ namespace GitHub.Runner.Listener.Configuration
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
try
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
if (response.IsSuccessStatusCode)
@@ -634,32 +605,16 @@ namespace GitHub.Runner.Listener.Configuration
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
}
else if(response.StatusCode == System.Net.HttpStatusCode.NotFound)
{
// It doesn't make sense to retry in this case, so just stop
break;
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
}
}
catch(Exception ex) when (retryCount < 2)
{
retryCount++;
Trace.Error($"Failed to get JIT runner token -- Atempt: {retryCount}");
Trace.Error(ex);
Trace.Info("Retrying in 5 seconds");
}
}
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
await Task.Delay(backOff);
}
return null;
}
}
}
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
{
@@ -674,9 +629,6 @@ namespace GitHub.Runner.Listener.Configuration
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runner-registration";
}
int retryCount = 0;
while (retryCount < 3)
{
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
@@ -689,8 +641,6 @@ namespace GitHub.Runner.Listener.Configuration
{"runner_event", runnerEvent}
};
try
{
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
if (response.IsSuccessStatusCode)
@@ -699,32 +649,15 @@ namespace GitHub.Runner.Listener.Configuration
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<GitHubAuthResult>(jsonResponse);
}
else if(response.StatusCode == System.Net.HttpStatusCode.NotFound)
{
// It doesn't make sense to retry in this case, so just stop
break;
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
// Something else bad happened, let's go to our retry logic
response.EnsureSuccessStatusCode();
}
}
catch(Exception ex) when (retryCount < 2)
{
retryCount++;
Trace.Error($"Failed to get tenant credentials -- Atempt: {retryCount}");
Trace.Error(ex);
Trace.Info("Retrying in 5 seconds");
}
}
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(1), TimeSpan.FromSeconds(5));
await Task.Delay(backOff);
}
return null;
}
}
}
}
}

View File

@@ -1,5 +1,4 @@
#if OS_WINDOWS
#pragma warning disable CA1416
using System;
using System.Collections;
using System.Collections.Generic;
@@ -142,7 +141,7 @@ namespace GitHub.Runner.Listener.Configuration
Trace.Entering();
LocalGroupInfo groupInfo = new LocalGroupInfo();
groupInfo.Name = groupName;
groupInfo.Comment = StringUtil.Format("Built-in group used by GitHub Actions Runner.");
groupInfo.Comment = StringUtil.Format("Built-in group used by Team Foundation Server.");
int returnCode = NetLocalGroupAdd(null, // computer name
1, // 1 means include comment
@@ -1328,5 +1327,4 @@ namespace GitHub.Runner.Listener.Configuration
public IntPtr hProfile;
}
}
#pragma warning restore CA1416
#endif

View File

@@ -48,12 +48,13 @@ namespace GitHub.Runner.Listener.Configuration
string repoOrOrgName = regex.Replace(settings.RepoOrOrgName, "-");
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgName, settings.AgentName);
if (serviceName.Length > MaxServiceNameLength)
if (serviceName.Length > 80)
{
Trace.Verbose($"Calculated service name is too long (> {MaxServiceNameLength} chars). Trying again by calculating a shorter name.");
// Add 5 to add -xxxx random number on the end
int exceededCharLength = serviceName.Length - MaxServiceNameLength + 5;
string repoOrOrgNameSubstring = StringUtil.SubstringPrefix(repoOrOrgName, MaxRepoOrgCharacters);
Trace.Verbose($"Calculated service name is too long (> 80 chars). Trying again by calculating a shorter name.");
int exceededCharLength = serviceName.Length - 80;
string repoOrOrgNameSubstring = StringUtil.SubstringPrefix(repoOrOrgName, 45);
exceededCharLength -= repoOrOrgName.Length - repoOrOrgNameSubstring.Length;
@@ -65,10 +66,6 @@ namespace GitHub.Runner.Listener.Configuration
runnerNameSubstring = StringUtil.SubstringPrefix(settings.AgentName, settings.AgentName.Length - exceededCharLength);
}
// Lets add a suffix with a random number to reduce the chance of collisions between runner names once we truncate
var random = new Random();
var num = random.Next(1000, 9999).ToString();
runnerNameSubstring +=$"-{num}";
serviceName = StringUtil.Format(serviceNamePattern, repoOrOrgNameSubstring, runnerNameSubstring);
}
@@ -76,12 +73,5 @@ namespace GitHub.Runner.Listener.Configuration
Trace.Info($"Service name '{serviceName}' display name '{serviceDisplayName}' will be used for service configuration.");
}
#if (OS_LINUX || OS_OSX)
const int MaxServiceNameLength = 150;
const int MaxRepoOrgCharacters = 70;
#elif OS_WINDOWS
const int MaxServiceNameLength = 80;
const int MaxRepoOrgCharacters = 45;
#endif
}
}

Some files were not shown because too many files have changed in this diff Show More