Compare commits

..

10 Commits

Author SHA1 Message Date
Julio Barba
857770cd3d TEST 2019-12-13 14:47:09 -05:00
Tingluo Huang
38b03139f1 c 2019-12-12 15:04:38 -05:00
Tingluo Huang
c1154c0ec9 c 2019-12-12 14:43:50 -05:00
Tingluo Huang
d7f2f3085c c 2019-12-12 14:42:15 -05:00
Tingluo Huang
1404a73762 rm aad 2019-12-12 14:42:02 -05:00
Tingluo Huang
3ea3b5ff59 c 2019-12-12 14:05:25 -05:00
Tingluo Huang
b37aa3254f c 2019-12-12 14:05:25 -05:00
Tingluo Huang
9b5eab9c81 c 2019-12-12 14:05:25 -05:00
Tingluo Huang
132d06dc9b localrun 2019-12-12 14:05:25 -05:00
Tingluo Huang
f2db563c89 delete un-used code. 2019-12-12 14:05:25 -05:00
430 changed files with 9970 additions and 49880 deletions

10
.github/ISSUE_TEMPLATE.md vendored Normal file
View File

@@ -0,0 +1,10 @@
## Runner Version and Platform
Version of your runner?
OS of the machine running the runner? OSX/Windows/Linux/...
## What's not working?
Please include error messages and screenshots.
## Runner and Worker's Diagnostic Logs
Logs are located in the runner's `_diag` folder. The runner logs are prefixed with `Runner_` and the worker logs are prefixed with `Worker_`. All sensitive information should already be masked out, but please double-check before pasting here.

View File

@@ -1,34 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Run '....'
3. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
## Runner Version and Platform
Version of your runner?
OS of the machine running the runner? OSX/Windows/Linux/...
## What's not working?
Please include error messages and screenshots.
## Job Log Output
If applicable, include the relevant part of the job / step log output here. All sensitive information should already be masked out, but please double-check before pasting here.
## Runner and Worker's Diagnostic Logs
If applicable, add relevant diagnostic log information. Logs are located in the runner's `_diag` folder. The runner logs are prefixed with `Runner_` and the worker logs are prefixed with `Worker_`. Each job run correlates to a worker log. All sensitive information should already be masked out, but please double-check before pasting here.

View File

@@ -1,27 +0,0 @@
---
name: Feature Request
about: Create a request to help us improve
title: ''
labels: enhancement
assignees: ''
---
Thank you 🙇‍♀ for wanting to create a feature in this repository. Before you do, please ensure you are filing the issue in the right place. Issues should only be opened on if the issue **relates to code in this repository**.
* If you have found a security issue [please submit it here](https://hackerone.com/github)
* If you have questions or issues with the service, writing workflows or actions, then please [visit the GitHub Community Forum's Actions Board](https://github.community/t5/GitHub-Actions/bd-p/actions)
* If you are having an issue or question about GitHub Actions then please [contact customer support](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-github-actions#contacting-support)
If you have a feature request that is relevant to this repository, the runner, then please include the information below:
**Describe the enhancement**
A clear and concise description of what the features or enhancement you need.
**Code Snippet**
If applicable, add a code snippet.
**Additional information**
Add any other context about the feature here.
NOTE: if the feature request has been agreed upon then the assignee will create an ADR. See docs/adrs/README.md

View File

@@ -5,13 +5,9 @@ on:
branches:
- master
- releases/*
paths-ignore:
- '**.md'
pull_request:
branches:
- '*'
paths-ignore:
- '**.md'
jobs:
build:

View File

@@ -1,35 +0,0 @@
name: "Code Scanning - Action"
on:
push:
schedule:
- cron: '0 0 * * 0'
jobs:
CodeQL-Build:
strategy:
fail-fast: false
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
- name: Manual build
run : |
./dev.sh layout Release linux-x64
working-directory: src
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@@ -3,47 +3,10 @@ name: Runner CD
on:
push:
paths:
- releaseVersion
- src/runnerversion_block # Change this to src/runnerversion when we are ready.
jobs:
check:
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/master'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
# Make sure ./releaseVersion match ./src/runnerversion
# Query GitHub release ensure version is not used
- name: Check version
uses: actions/github-script@0.3.0
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const core = require('@actions/core')
const fs = require('fs');
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
const releaseVersion = fs.readFileSync('${{ github.workspace }}/releaseVersion', 'utf8').replace(/\n$/g, '')
if (runnerVersion != releaseVersion) {
console.log('Request Release Version: ' + releaseVersion + '\nCurrent Runner Version: ' + runnerVersion)
core.setFailed('Version mismatch! Make sure ./releaseVersion match ./src/runnerVersion')
return
}
try {
const release = await github.repos.getReleaseByTag({
owner: '${{ github.event.repository.owner.name }}',
repo: '${{ github.event.repository.name }}',
tag: 'v' + runnerVersion
})
core.setFailed('Release with same tag already created: ' + release.data.html_url)
} catch (e) {
// We are good to create the release if release with same tag doesn't exists
if (e.status != 404) {
throw e
}
}
build:
needs: check
strategy:
matrix:
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
@@ -89,7 +52,7 @@ jobs:
- name: Package Release
if: github.event_name != 'pull_request'
run: |
${{ matrix.devScript }} package Release ${{ matrix.runtime }}
${{ matrix.devScript }} package Release
working-directory: src
# Upload runner package tar.gz/zip as artifact.
@@ -103,17 +66,14 @@ jobs:
release:
needs: build
runs-on: ubuntu-latest
runs-on: linux-latest
steps:
- uses: actions/checkout@v2
# Download runner package tar.gz/zip produced by 'build' job
- name: Download Artifact
uses: actions/download-artifact@v1
with:
name: runner-packages
path: ./
# Create ReleaseNote file
- name: Create ReleaseNote
@@ -122,74 +82,103 @@ jobs:
with:
github-token: ${{secrets.GITHUB_TOKEN}}
script: |
const core = require('@actions/core')
const fs = require('fs');
const runnerVersion = fs.readFileSync('${{ github.workspace }}/src/runnerversion', 'utf8').replace(/\n$/g, '')
const releaseNote = fs.readFileSync('${{ github.workspace }}/releaseNote.md', 'utf8').replace(/<RUNNER_VERSION>/g, runnerVersion)
console.log(releaseNote)
// Get runner version from ./src/runnerVersion file
const versionContent = await github.repos.getContents({
owner: '${{ github.event.repository.owner.name }}',
repo: '${{ github.event.repository.name }}',
path: 'src/runnerversion'
ref: ${{ github.sha }}
})
const runnerVersion = Buffer.from(versionContent.data.content, 'base64').toString()
console.log("Runner Version ' + runnerVersion)
core.setOutput('version', runnerVersion);
core.setOutput('note', releaseNote);
// Query GitHub release ensure version is bumped
const latestRelease = await github.repos.getLatestRelease({
owner: '${{ github.event.repository.owner.name }}',
repo: '${{ github.event.repository.name }}'
})
console.log(latestRelease.name)
const latestReleaseVersion = latestRelease.name.substring(1)
const vLatest = latestReleaseVersion.split('.')
const vNew = runnerVersion.split('.')
let versionBumped = true
for (let i = 0; i < 3; ++i) {
var v1 = parseInt(vLatest[i], 10);
var v2 = parseInt(vNew[i], 10);
if (v2 > v1) {
console.log(runnerVersion + " > " + latestReleaseVersion + "(Latest)")
break
}
if (v1 > v2) {
versionBumped = false
core.setFailed(runnerVersion + " < " + latestReleaseVersion + "(Latest)")
break
}
}
// Generate release note
if (versionBumped) {
const releaseNoteContent = await github.repos.getContents({
owner: '${{ github.event.repository.owner.name }}',
repo: '${{ github.event.repository.name }}',
path: 'releaseNote.md'
ref: ${{ github.sha }}
})
const releaseNote = Buffer.from(releaseNoteContent.data.content, 'base64').toString().replace("<RUNNER_VERSION>", runnerVersion)
console.log(releaseNote)
core.setOutput('note', releaseNote);
}
# Create GitHub release
- uses: actions/create-release@master
- uses: actions/create-release@v1
id: createRelease
name: Create ${{ steps.releaseNote.outputs.version }} Runner Release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: "v${{ steps.releaseNote.outputs.version }}"
release_name: "v${{ steps.releaseNote.outputs.version }}"
body: |
${{ steps.releaseNote.outputs.note }}
body: ${{ steps.releaseNote.outputs.note }}
prerelease: true
# Upload release assets
- name: Upload Release Asset (win-x64)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_path: ./actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_name: actions-runner-win-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-x64)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ./actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_name: actions-runner-linux-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (osx-x64)
- name: Upload Release Asset (mac-x64)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-osx-x64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ./actions-runner-mac-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_name: actions-runner-mac-x64-${{ steps.releaseNote.outputs.version }}.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ./actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.zip
asset_name: actions-runner-linux-arm-${{ steps.releaseNote.outputs.version }}.zip
asset_content_type: application/octet-stream
- name: Upload Release Asset (linux-arm64)
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.createRelease.outputs.upload_url }}
asset_path: ${{ github.workspace }}/actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.tar.gz
asset_path: ./actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.zip
asset_name: actions-runner-linux-arm64-${{ steps.releaseNote.outputs.version }}.zip
asset_content_type: application/octet-stream

8
.gitignore vendored
View File

@@ -1,19 +1,12 @@
# build output
**/bin
**/obj
**/libs
**/lib
# editors
**/*.xproj
**/*.xproj.user
**/.vs
**/.vscode
**/*.error
**/*.json.pretty
.idea/
# output
node_modules
_downloads
_layout
@@ -26,3 +19,4 @@ TestLogs
#generated
src/Runner.Sdk/BuildConstants.cs

View File

@@ -3,23 +3,23 @@ Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 16
VisualStudioVersion = 16.0.29411.138
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Common", "Runner.Common\Runner.Common.csproj", "{084289A3-CD7A-42E0-9219-4348B4B7E19B}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Common", "src\Runner.Common\Runner.Common.csproj", "{084289A3-CD7A-42E0-9219-4348B4B7E19B}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Listener", "Runner.Listener\Runner.Listener.csproj", "{7D461AEE-BF2A-4855-BD96-56921160B36A}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Listener", "src\Runner.Listener\Runner.Listener.csproj", "{7D461AEE-BF2A-4855-BD96-56921160B36A}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.PluginHost", "Runner.PluginHost\Runner.PluginHost.csproj", "{D0320EB1-CB6D-4179-BFDC-2F2B664A370C}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.PluginHost", "src\Runner.PluginHost\Runner.PluginHost.csproj", "{D0320EB1-CB6D-4179-BFDC-2F2B664A370C}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Plugins", "Runner.Plugins\Runner.Plugins.csproj", "{C23AFD6F-4DCD-4243-BC61-865BE31B9168}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Plugins", "src\Runner.Plugins\Runner.Plugins.csproj", "{C23AFD6F-4DCD-4243-BC61-865BE31B9168}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Sdk", "Runner.Sdk\Runner.Sdk.csproj", "{D0484633-DA97-4C34-8E47-1DADE212A57A}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Sdk", "src\Runner.Sdk\Runner.Sdk.csproj", "{D0484633-DA97-4C34-8E47-1DADE212A57A}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RunnerService", "Runner.Service\Windows\RunnerService.csproj", "{D12EBD71-0464-46D0-8394-40BCFBA0A6F2}"
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "RunnerService", "src\Runner.Service\Windows\RunnerService.csproj", "{D12EBD71-0464-46D0-8394-40BCFBA0A6F2}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Worker", "Runner.Worker\Runner.Worker.csproj", "{C2F5B9FA-2621-411F-8EB2-273ED276F503}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Runner.Worker", "src\Runner.Worker\Runner.Worker.csproj", "{C2F5B9FA-2621-411F-8EB2-273ED276F503}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sdk", "Sdk\Sdk.csproj", "{D2EE812B-E4DF-49BB-AE87-12BC49949B5F}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Sdk", "src\Sdk\Sdk.csproj", "{D2EE812B-E4DF-49BB-AE87-12BC49949B5F}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Test", "Test\Test.csproj", "{C932061F-F6A1-4F1E-B854-A6C6B30DC3EF}"
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Test", "src\Test\Test.csproj", "{C932061F-F6A1-4F1E-B854-A6C6B30DC3EF}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution

View File

@@ -1,5 +1,5 @@
The MIT License (MIT)
Copyright (c) 2019 GitHub
Copyright (c) Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
@@ -17,4 +17,4 @@ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
SOFTWARE.

View File

@@ -1,25 +1,31 @@
# GitHub Actions Runner
<p align="center">
<img src="docs/res/github-graph.png">
</p>
# GitHub Actions Runner
[![Actions Status](https://github.com/actions/runner/workflows/Runner%20CI/badge.svg)](https://github.com/actions/runner/actions)
The runner is the application that runs a job from a GitHub Actions workflow. The runner can run on the [hosted machine pools](https://github.com/actions/virtual-environments) or run on [self-hosted environments](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners).
## Get Started
For more information about installing and using self-hosted runners, see [Adding self-hosted runners](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/adding-self-hosted-runners) and [Using self-hosted runners in a workflow](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/using-self-hosted-runners-in-a-workflow)
![win](docs/res/win_sm.png) [Pre-reqs](docs/start/envwin.md) | [Download](https://github.com/actions/runner/releases/latest)
Runner releases:
![macOS](docs/res/apple_sm.png) [Pre-reqs](docs/start/envosx.md) | [Download](https://github.com/actions/runner/releases/latest)
![win](docs/res/win_sm.png) [Pre-reqs](docs/start/envwin.md) | [Download](https://github.com/actions/runner/releases)
![linux](docs/res/linux_sm.png) [Pre-reqs](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases/latest)
![macOS](docs/res/apple_sm.png) [Pre-reqs](docs/start/envosx.md) | [Download](https://github.com/actions/runner/releases)
**Configure:**
![linux](docs/res/linux_sm.png) [Pre-reqs](docs/start/envlinux.md) | [Download](https://github.com/actions/runner/releases)
*MacOS and Linux*
```bash
./config.sh
```
*Windows*
```bash
config.cmd
```
## Contribute
We accept contributions in the form of issues and pull requests. [Read more here](docs/contribute.md) before contributing.
For developers that want to contribute, [read here](docs/contribute.md) on how to build and test.

32
assets.json Normal file
View File

@@ -0,0 +1,32 @@
[
{
"name": "actions-runner-win-x64-<RUNNER_VERSION>.zip",
"platform": "win-x64",
"version": "<RUNNER_VERSION>",
"downloadUrl": "https://githubassets.azureedge.net/runners/<RUNNER_VERSION>/actions-runner-win-x64-<RUNNER_VERSION>.zip"
},
{
"name": "actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz",
"platform": "osx-x64",
"version": "<RUNNER_VERSION>",
"downloadUrl": "https://githubassets.azureedge.net/runners/<RUNNER_VERSION>/actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz"
},
{
"name": "actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz",
"platform": "linux-x64",
"version": "<RUNNER_VERSION>",
"downloadUrl": "https://githubassets.azureedge.net/runners/<RUNNER_VERSION>/actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz"
},
{
"name": "actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz",
"platform": "linux-arm64",
"version": "<RUNNER_VERSION>",
"downloadUrl": "https://githubassets.azureedge.net/runners/<RUNNER_VERSION>/actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz"
},
{
"name": "actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz",
"platform": "linux-arm",
"version": "<RUNNER_VERSION>",
"downloadUrl": "https://githubassets.azureedge.net/runners/<RUNNER_VERSION>/actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz"
}
]

237
azure-pipelines-release.yml Normal file
View File

@@ -0,0 +1,237 @@
stages:
- stage: Build
jobs:
################################################################################
- job: build_windows_agent_x64
################################################################################
displayName: Windows Agent (x64)
pool:
vmImage: vs2017-win2016
steps:
# Steps template for windows platform
- template: windows.template.yml
parameters:
targetRuntime: win-x64
# Package dotnet core windows dependency (VC++ Redistributable)
- powershell: |
Write-Host "Downloading 'VC++ Redistributable' package."
$outDir = Join-Path -Path $env:TMP -ChildPath ([Guid]::NewGuid())
New-Item -Path $outDir -ItemType directory
$outFile = Join-Path -Path $outDir -ChildPath "ucrt.zip"
Invoke-WebRequest -Uri https://vstsagenttools.blob.core.windows.net/tools/ucrt/ucrt_x64.zip -OutFile $outFile
Write-Host "Unzipping 'VC++ Redistributable' package to agent layout."
$unzipDir = Join-Path -Path $outDir -ChildPath "unzip"
Add-Type -AssemblyName System.IO.Compression.FileSystem
[System.IO.Compression.ZipFile]::ExtractToDirectory($outFile, $unzipDir)
$agentLayoutBin = Join-Path -Path $(Build.SourcesDirectory) -ChildPath "_layout\bin"
Copy-Item -Path $unzipDir -Destination $agentLayoutBin -Force
displayName: Package UCRT
# Create agent package zip
- script: dev.cmd package Release win-x64
workingDirectory: src
displayName: Package Release
# Upload agent package zip as build artifact
- task: PublishBuildArtifacts@1
displayName: Publish Artifact (Windows x64)
inputs:
pathToPublish: _package
artifactName: runners
artifactType: container
################################################################################
- job: build_linux_agent_x64
################################################################################
displayName: Linux Agent (x64)
pool:
vmImage: ubuntu-16.04
steps:
# Steps template for non-windows platform
- template: nonwindows.template.yml
parameters:
targetRuntime: linux-x64
# Create agent package zip
- script: ./dev.sh package Release linux-x64
workingDirectory: src
displayName: Package Release
# Upload agent package zip as build artifact
- task: PublishBuildArtifacts@1
displayName: Publish Artifact (Linux x64)
inputs:
pathToPublish: _package
artifactName: runners
artifactType: container
################################################################################
- job: build_linux_agent_arm64
################################################################################
displayName: Linux Agent (arm64)
pool:
vmImage: ubuntu-16.04
steps:
# Steps template for non-windows platform
- template: nonwindows.template.yml
parameters:
targetRuntime: linux-arm64
# Create agent package zip
- script: ./dev.sh package Release linux-arm64
workingDirectory: src
displayName: Package Release
# Upload agent package zip as build artifact
- task: PublishBuildArtifacts@1
displayName: Publish Artifact (Linux ARM64)
inputs:
pathToPublish: _package
artifactName: runners
artifactType: container
################################################################################
- job: build_linux_agent_arm
################################################################################
displayName: Linux Agent (arm)
pool:
vmImage: ubuntu-16.04
steps:
# Steps template for non-windows platform
- template: nonwindows.template.yml
parameters:
targetRuntime: linux-arm
# Create agent package zip
- script: ./dev.sh package Release linux-arm
workingDirectory: src
displayName: Package Release
# Upload agent package zip as build artifact
- task: PublishBuildArtifacts@1
displayName: Publish Artifact (Linux ARM)
inputs:
pathToPublish: _package
artifactName: runners
artifactType: container
################################################################################
- job: build_osx_agent_x64
################################################################################
displayName: macOS Agent (x64)
pool:
vmImage: macOS-10.13
steps:
# Steps template for non-windows platform
- template: nonwindows.template.yml
parameters:
targetRuntime: osx-x64
# Create agent package zip
- script: ./dev.sh package Release osx-x64
workingDirectory: src
displayName: Package Release
# Upload agent package zip as build artifact
- task: PublishBuildArtifacts@1
displayName: Publish Artifact (OSX x64)
inputs:
pathToPublish: _package
artifactName: runners
artifactType: container
- stage: Release
dependsOn: Build
jobs:
################################################################################
- job: publish_agent_packages
################################################################################
displayName: Publish Agents (Windows/Linux/OSX)
pool:
name: ProductionRMAgents
steps:
# Download all agent packages from all previous phases
- task: DownloadBuildArtifacts@0
displayName: Download Agent Packages
inputs:
artifactName: runners
# Upload agent packages to Azure blob storage and refresh Azure CDN
- powershell: |
Write-Host "Preloading Azure modules." # This is for better performance, to avoid module-autoloading.
Import-Module AzureRM, AzureRM.profile, AzureRM.Storage, Azure.Storage, AzureRM.Cdn -ErrorAction Ignore -PassThru
Enable-AzureRmAlias -Scope CurrentUser
$uploadFiles = New-Object System.Collections.ArrayList
$certificateThumbprint = (Get-ItemProperty -Path "$(ServicePrincipalReg)").ServicePrincipalCertThumbprint
$clientId = (Get-ItemProperty -Path "$(ServicePrincipalReg)").ServicePrincipalClientId
Write-Host "##vso[task.setsecret]$certificateThumbprint"
Write-Host "##vso[task.setsecret]$clientId"
Login-AzureRmAccount -ServicePrincipal -CertificateThumbprint $certificateThumbprint -ApplicationId $clientId -TenantId $(GitHubTenantId)
Select-AzureRmSubscription -SubscriptionId $(GitHubSubscriptionId)
$storage = Get-AzureRmStorageAccount -ResourceGroupName githubassets -AccountName githubassets
Get-ChildItem -LiteralPath "$(System.ArtifactsDirectory)/runners" | ForEach-Object {
$versionDir = $_.Name.Trim('.zip').Trim('.tar.gz')
$versionDir = $versionDir.SubString($versionDir.LastIndexOf('-') + 1)
Write-Host "##vso[task.setvariable variable=ReleaseAgentVersion;]$versionDir"
Write-Host "Uploading $_ to BlobStorage githubassets/runners/$versionDir"
Set-AzureStorageBlobContent -Context $storage.Context -Container runners -File "$(System.ArtifactsDirectory)/runners/$_" -Blob "$versionDir/$_" -Force
$uploadFiles.Add("/runners/$versionDir/$_")
}
Write-Host "Get CDN info"
Get-AzureRmCdnEndpoint -ProfileName githubassets -ResourceGroupName githubassets
Write-Host "Purge Azure CDN Cache"
Unpublish-AzureRmCdnEndpointContent -EndpointName githubassets -ProfileName githubassets -ResourceGroupName githubassets -PurgeContent $uploadFiles
Write-Host "Pull assets through Azure CDN"
$uploadFiles | ForEach-Object {
$downloadUrl = "https://githubassets.azureedge.net" + $_
Write-Host $downloadUrl
Invoke-WebRequest -Uri $downloadUrl -OutFile $_.SubString($_.LastIndexOf('/') + 1)
}
displayName: Upload to Azure Blob
# Create agent release on Github
- powershell: |
Write-Host "Creating github release."
$releaseNotes = [System.IO.File]::ReadAllText("$(Build.SourcesDirectory)\releaseNote.md").Replace("<RUNNER_VERSION>","$(ReleaseAgentVersion)")
$releaseData = @{
tag_name = "v$(ReleaseAgentVersion)";
target_commitish = "$(Build.SourceVersion)";
name = "v$(ReleaseAgentVersion)";
body = $releaseNotes;
draft = $false;
prerelease = $true;
}
$releaseParams = @{
Uri = "https://api.github.com/repos/actions/runner/releases";
Method = 'POST';
Headers = @{
Authorization = 'Basic ' + [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes("github:$(GithubToken)"));
}
ContentType = 'application/json';
Body = (ConvertTo-Json $releaseData -Compress)
}
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12
$releaseCreated = Invoke-RestMethod @releaseParams
Write-Host $releaseCreated
$releaseId = $releaseCreated.id
Get-ChildItem -LiteralPath "$(System.ArtifactsDirectory)/runners" | ForEach-Object {
Write-Host "Uploading $_ as GitHub release assets"
$assetsParams = @{
Uri = "https://uploads.github.com/repos/actions/runner/releases/$releaseId/assets?name=$($_.Name)"
Method = 'POST';
Headers = @{
Authorization = 'Basic ' + [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes("github:$(GithubToken)"));
}
ContentType = 'application/octet-stream';
Body = [System.IO.File]::ReadAllBytes($_.FullName)
}
Invoke-RestMethod @assetsParams
}
displayName: Create agent release on Github

95
azure-pipelines.yml Normal file
View File

@@ -0,0 +1,95 @@
jobs:
################################################################################
- job: build_windows_x64_agent
################################################################################
displayName: Windows Agent (x64)
pool:
vmImage: vs2017-win2016
steps:
# Steps template for windows platform
- template: windows.template.yml
# Package dotnet core windows dependency (VC++ Redistributable)
- powershell: |
Write-Host "Downloading 'VC++ Redistributable' package."
$outDir = Join-Path -Path $env:TMP -ChildPath ([Guid]::NewGuid())
New-Item -Path $outDir -ItemType directory
$outFile = Join-Path -Path $outDir -ChildPath "ucrt.zip"
Invoke-WebRequest -Uri https://vstsagenttools.blob.core.windows.net/tools/ucrt/ucrt_x64.zip -OutFile $outFile
Write-Host "Unzipping 'VC++ Redistributable' package to agent layout."
$unzipDir = Join-Path -Path $outDir -ChildPath "unzip"
Add-Type -AssemblyName System.IO.Compression.FileSystem
[System.IO.Compression.ZipFile]::ExtractToDirectory($outFile, $unzipDir)
$agentLayoutBin = Join-Path -Path $(Build.SourcesDirectory) -ChildPath "_layout\bin"
Copy-Item -Path $unzipDir -Destination $agentLayoutBin -Force
displayName: Package UCRT
condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest'))
# Create agent package zip
- script: dev.cmd package Release
workingDirectory: src
displayName: Package Release
condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest'))
# Upload agent package zip as build artifact
- task: PublishBuildArtifacts@1
displayName: Publish Artifact (Windows x64)
condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest'))
inputs:
pathToPublish: _package
artifactName: agent
artifactType: container
################################################################################
- job: build_linux_x64_agent
################################################################################
displayName: Linux Agent (x64)
pool:
vmImage: ubuntu-16.04
steps:
# Steps template for non-windows platform
- template: nonwindows.template.yml
# Create agent package zip
- script: ./dev.sh package Release
workingDirectory: src
displayName: Package Release
condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest'))
# Upload agent package zip as build artifact
- task: PublishBuildArtifacts@1
displayName: Publish Artifact (Linux x64)
condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest'))
inputs:
pathToPublish: _package
artifactName: agent
artifactType: container
################################################################################
- job: build_osx_agent
################################################################################
displayName: macOS Agent (x64)
pool:
vmImage: macOS-10.14
steps:
# Steps template for non-windows platform
- template: nonwindows.template.yml
# Create agent package zip
- script: ./dev.sh package Release
workingDirectory: src
displayName: Package Release
condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest'))
# Upload agent package zip as build artifact
- task: PublishBuildArtifacts@1
displayName: Publish Artifact (OSX)
condition: and(succeeded(), ne(variables['build.reason'], 'PullRequest'))
inputs:
pathToPublish: _package
artifactName: agent
artifactType: container

View File

@@ -1,61 +0,0 @@
# ADR 263: Self Hosted Runner Proxies
**Date**: 2019-11-13
**Status**: Accepted
## Context
- Proxy support is required for some enterprises and organizations to start using their own self hosted runners
- While there is not a standard convention, many applications support setting proxies via the environmental variables `http_proxy`, `https_proxy`, `no_proxy`, such as curl, wget, perl, python, docker, git, R, ect
- Some of these applications use `HTTPS_PROXY` versus `https_proxy`, but most understand or primarily support the lowercase variant
## Decision
We will update the Runner to use the conventional environment variables for proxies: `http_proxy`, `https_proxy` and `no_proxy` if they are set.
These are described in detail below:
- `https_proxy` a proxy URL for all https traffic. It may contain basic authentication credentials. For example:
- http://proxy.com
- http://127.0.0.1:8080
- http://user:password@proxy.com
- `http_proxy` a proxy URL for all http traffic. It may contain basic authentication credentials. For example:
- http://proxy.com
- http://127.0.0.1:8080
- http://user:password@proxy.com
- `no_proxy` a comma seperated list of hosts that should not use the proxy. An optional port may be specified
- `google.com`
- `yahoo.com:443`
- `google.com,bing.com`
We won't use `http_proxy` for https traffic when `https_proxy` is not set, this behavior lines up with any libcurl based tools (curl, git) and wget.
Otherwise action authors and workflow users need to adjust to differences between the runner proxy convention, and tools used by their actions and scripts.
Example:
Customer set `http_proxy=http://127.0.0.1:8888` and configure the runner against `https://github.com/owner/repo`, with the `https_proxy` -> `http_proxy` fallback, the runner will connect to server without any problem. However, if user runs `git push` to `https://github.com/owner/repo`, `git` won't use the proxy since it require `https_proxy` to be set for any https traffic.
> `golang`, `node.js` and other dev tools from the linux community use `http_proxy` for both http and https traffic base on my research.
A majority of our users are using Linux where these variables are commonly required to be set by various programs. By reading these values, we simplify the process for self hosted runners to set up proxy, and expose it in a way users are already familiar with.
A password provided for a proxy will be masked in the logs.
We will support the lowercase and uppercase variants, with lowercase taking priority if both are set.
### No Proxy Format
While exact implementations are different per application on handle `no_proxy` env, most applications accept a comma separated list of hosts. Some accept wildcard characters (*). We are going to do exact case-insentive matches, and not support wildcards at this time.
For example:
- example.com will match example.com, foo.example.com, foo.bar.example.com
- foo.example.com will match bar.foo.example.com and foo.example.com
We will not support IP addresses for `no_proxy`, only hostnames.
## Consequences
1. Enterprises and organizations needing proxy support will be able to embrace self hosted runners
2. Users will need to set these environmental variables before configuring the runner in order to use a proxy when configuring
3. The runner will read from the environmental variables during config and runtime and use the provided proxy if it exists
4. Users may need to pass these environmental variables into other applications if they do not natively take these variables
5. Action authors may need to update their workflows to react to the these environment variables
6. We will document the way of setting environmental variables for runners using the environmental variables and how the runner uses them
7. Like all other secrets, users will be able to relatively easily figure out proxy password if they can modify a workflow file running on a self hosted machine

View File

@@ -1,62 +0,0 @@
# ADR 0274: Step outcome and conclusion
**Date**: 2020-01-13
**Status**: Accepted
## Context
This ADR proposes adding `steps.<id>.outcome` and `steps.<id>.conclusion` to the steps context.
This allows downstream a step to run based on whether a previous step succeeded or failed.
Reminder, currently the steps contains `steps.<id>.outputs`.
## Decision
For steps that have completed, populate `steps.<id>.outcome` and `steps.<id>.conclusion` with one of the following values:
- `success`
- `failure`
- `cancelled`
- `skipped`
When a continue-on-error step fails, the outcome will be `failure` even though the final conclusion is `success`.
### Example
```yaml
steps:
- id: experimental
continue-on-error: true
run: ./build.sh experimental
- if: ${{ steps.experimental.outcome == 'success' }}
run: ./publish.sh experimental
```
### Terminology
The runs API uses the term `conclusion`.
Therefore we use a different term `outcome` for the value prior to continue-on-error.
The following is a snippet from the runs API response payload:
```json
"steps": [
{
"name": "Set up job",
"status": "completed",
"conclusion": "success",
"number": 1,
"started_at": "2020-01-09T11:06:16.000-05:00",
"completed_at": "2020-01-09T11:06:18.000-05:00"
},
```
## Consequences
- Update runner
- Update [docs](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#steps-context)

View File

@@ -1,263 +0,0 @@
# ADR 0276: Problem Matchers
**Date** 2019-06-05
**Status** Accepted
## Context
Compilation failures during a CI build should surface good error messages.
For example, the actual compile errors from the typescript compiler should bubble as issues in the UI. And not simply "tsc exited with exit code 1".
VSCode has an extensible model for solving this type of problem. VSCode allows users to configure which problems matchers to use, when scanning output. For example, a user can apply the `tsc` problem matcher to receive a rich error output experience in VSCode, when compiling their typescript project.
The problem-matcher concept fits well with "setup" actions. For example, the `setup-nodejs` action will download node.js, add it to the PATH, and register the `tsc` problem matcher. For the duration of the job, the `tsc` problem matcher will be applied against the output.
## Decision
### Registration
#### Using `##` command
`##[add-matcher]path-to-problem-matcher-config.json`
Using a `##` command allows for flexibility:
- Ad hoc scripts can register problem matchers
- Allows problem matchers to be conditionally registered
Note, if a matcher with the same name is registered a second time, it will clobber the first instance.
#### Unregister using `##` command
A way out for rare cases where scoping is a problem.
`##[remove-matcher]owner`
For the this to be usable, the `owner` needs to be discoverable. Therefore, debug print the owner on registration.
### Single line matcher
Consider the output:
```
[...]
Build FAILED.
"C:\temp\problemmatcher\myproject\ConsoleApp1\ConsoleApp1.sln" (default target) (1) ->
"C:\temp\problemmatcher\myproject\ConsoleApp1\ConsoleApp1\ConsoleApp1.csproj" (default target) (2) ->
"C:\temp\problemmatcher\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj" (default target) (3) ->
(CoreCompile target) ->
Class1.cs(16,24): warning CS0612: 'ClassLibrary1.Helpers.MyHelper.Name' is obsolete [C:\temp\problemmatcher\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj]
"C:\temp\problemmatcher\myproject\ConsoleApp1\ConsoleApp1.sln" (default target) (1) ->
"C:\temp\problemmatcher\myproject\ConsoleApp1\ConsoleApp1\ConsoleApp1.csproj" (default target) (2) ->
"C:\temp\problemmatcher\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj" (default target) (3) ->
(CoreCompile target) ->
Helpers\MyHelper.cs(16,30): error CS1002: ; expected [C:\temp\problemmatcher\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj]
1 Warning(s)
1 Error(s)
```
The below match configuration uses a regular expression to discover problem lines. And the match groups are mapped into issue-properties.
```json
"owner": "msbuild",
"pattern": [
{
"regexp": "^\\s*([^:]+)\\((\\d+),(\\d+)\\): (error|warning) ([^:]+): (.*) \\[(.+)\\]$",
"file": 1,
"line": 2,
"column": 3,
"severity": 4,
"code": 5,
"message": 6,
"fromPath": 7
}
]
```
The above output and match configuration produces the following matches:
```
line: Class1.cs(16,24): warning CS0612: 'ClassLibrary1.Helpers.MyHelper.Name' is obsolete [C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj]
file: Class1.cs
line: 16
column: 24
severity: warning
code: CS0612
message: 'ClassLibrary1.Helpers.MyHelper.Name' is obsolete
fromPath: C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj
```
```
line: Helpers\MyHelper.cs(16,30): error CS1002: ; expected [C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj]
file: Helpers\MyHelper.cs
line: 16
column: 30
severity: error
code: CS1002
message: ; expected
fromPath: C:\myrepo\myproject\ConsoleApp1\ClassLibrary1\ClassLibrary1.csproj
```
Additionally the line will appear red in the web UI (prefix with `##[error]`).
Note, an error does not imply task failure. Exit codes communicate failure.
Note, strip color codes when evaluating regular expressions.
### Multi-line matcher
Consider the below output from ESLint in stylish mode. The file name is printed once, yet multiple error lines are printed.
```
test.js
1:0 error Missing "use strict" statement strict
5:10 error 'addOne' is defined but never used no-unused-vars
✖ 2 problems (2 errors, 0 warnings)
```
The below match configuration uses multiple regular expressions, for the multiple lines.
And the last pattern of a multiline matcher can specify the `loop` property. This allows multiple errors to be discovered.
```json
"owner": "eslint-stylish",
"pattern": [
{
"regexp": "^([^\\s].*)$",
"file": 1
},
{
"regexp": "^\\s+(\\d+):(\\d+)\\s+(error|warning|info)\\s+(.*)\\s\\s+(.*)$",
"line": 1,
"column": 2,
"severity": 3,
"message": 4,
"code": 5,
"loop": true
}
]
```
The above output and match configuration produces two matches:
```
line: 1:0 error Missing "use strict" statement strict
file: test.js
line: 1
column: 0
severity: error
message: Missing "use strict" statement
code: strict
```
```
line: 5:10 error 'addOne' is defined but never used no-unused-vars
file: test.js
line: 5
column: 10
severity: error
message: 'addOne' is defined but never used
code: no-unused-vars
```
Note, in the above example only the error line will appear red in the web UI. The \"file\" line will not appear red.
### Other details
#### Configuration `owner`
Can be used to stomp over or remove.
#### Rooting the file
The goal of the file information is to provide a hyperlink in the UI.
Solving this problem means:
- Rooting the file when unrooted:
- Use the `fromPath` if specified (assume file path)
- Use the `github.workspace` (where the repo is cloned on disk)
- Match against a repository to determine the relative path within the repo
This is a place where we diverge from VSCode. VSCode task configuration are specific to the local workspace (workspace root is known or can be specified). We're solving a more generic problem, so we need more information - specifically the `fromPath` property - in order to accurately root the path.
In order to avoid creating inaccurate hyperlinks on the error issues, the agent will verify the file exists and is in the main repository. Otherwise omit the file property from the error issue and debug trace what happened.
#### Supported severity levels
Ordinal ignore case:
- `warning`
- `error`
Coalesce empty with \"error\". For any other values, omit logging an issue and debug trace what happened.
#### Default severity level
Problem matchers are unable to interpret severity strings other than `warning` and `error`. The `severity` match group expects `warning` or `error` (case insensitive).
However some tools indicate error/warning in different ways. For example `flake8` uses codes like `E100`, `W200`, and `F300` (error, warning, fatal, respectively).
Therefore, allow a property `severity`, sibling to `owner`, which identifies the default severity for the problem matcher. This allows two problem matchers are registered - one for warnings and one for errors.
For example, given the following `flake8` output:
```
./bootcamp/settings.py:156:80: E501 line too long (94 > 79 characters)
./bootcamp/settings.py:165:5: F403 'from local_settings import *' used; unable to detect undefined names
```
Two problem matchers can be used:
```json
{
"problemMatcher": [
{
"owner": "flake8",
"pattern": [
{
"regexp": "^(.+):(\\d+):(\\d+): ([EF]\\d+) (.+)$",
"file": 1,
"line": 2,
"column": 3,
"code": 4,
"message": 5
}
]
},
{
"owner": "flake8-warnings",
"severity": "warning",
"pattern": [
{
"regexp": "^(.+):(\\d+):(\\d+): (W\\d+) (.+)$",
"file": 1,
"line": 2,
"column": 3,
"code": 4,
"message": 5
}
]
}
]
}
```
#### Mitigate regular expression denial of service (ReDos)
If a matcher exceeds a 1 second timeout when processing a line, retry up to two three times total.
After three unsuccessful attempts, warn and eject the matcher. The matcher will not run again for the duration of the job.
### Where we diverge from VSCode
- We added the `fromPath` concept for rooting paths. This is done differently in VSCode, since a task is the scope (root path well known). For us, the job is the scope.
- VSCode allows additional activation info background tasks that are always running (recompile on files changed). They allow regular expressions to define when the matcher scope begins and ends. This is an interesting concept that we could leverage to help solve our scoping problem.
## Consequences
- Setup actions should register problem matchers

View File

@@ -1,93 +0,0 @@
# ADR 0277: Run action shell option
**Date** 2019-07-09
**Status** Accepted
## Context
run-actions run scripts using a platform specific shell:
`bash -eo pipefail` on non-windows, and `cmd.exe /c /d /s` on windows
The `shell` option overwrites this to allow different flags or completely different shells/interpreters
A small example is:
```yml
jobs:
bash-job:
actions:
- run: echo "Hello"
shell: bash
python-job:
actions:
- run: print("Hello")
shell: python {0}
```
## Decision
___
### Shell option
The keyword being used is `shell`
`shell` can be either:
1. Builtins / Explicitly supported keywords. It is useful to support at least `cmd`, and `powershell` on Windows. Because `cmd my_cmd_script` and `powershell my_ps1_script` are not valid the same way many Linux/cross-platform interpreters are, e.g. `bash myscript` or `python myscript`. Those tools (and potentially others) also require the correct file extension to run, or must be run in a particular way to get the exit codes consistently, so we must have first class knowledge about them. We provide default templates for these keywords as follows:
- `cmd`: Default is: `%ComSpec% /D /E:ON /V:OFF /S /C "CALL "{0}""` where the script name is automatically appended with `.cmd` and substituted for `{0}`
- Note this is equivalent to the default Windows behavior if no shell option is given
- `pwsh`: Default is: `pwsh -command "& '{0}'"` where the script is automatically appended with `.ps1`
- `powershell`: Default is: `powershell -command "& '{0}'"` where the script is automatically appended with `.ps1`
- `bash`: Uses `bash --noprofile --norc -eo pipefail {0}`
- The default behavior on non-Windows if no shell is given is to attempt this first
- `sh`: Uses `sh -e {0}`
- This is the default behavior on non-Windows if no shell is given, AND `bash` (see above) was not located on the PATH
- `python`: `python {0}`
- **NOTE**: The exact command ran may vary by machine. We only provide default arguments and command format for the listed shell. While the above behavior is expected on hosted machines, private runners may vary. For example, `sh` (or other commands) may actually be a link to `/bin/dash`, `/bin/bash`, or other
1. A template string: `command [...options] {0} [...more_options]`
- As above, the file name of the temporary script will be templated in. This gives users more control to have options at any location relative to the script path
- The first whitespace-delimited word of the string will be interpreted as the command
- e.g. `python {0} arg1 arg2` or similar can be used if passing args is needed. Some shells will require other options after the filename for various reasons
Note that (1) simply provides defaults that are executed with the same mechanism as (2). That is:
- A temporary script file is generated, and the path to that file is templated into the string at `{0}`
- The first word of the formatted string is assumed to be a command, and we attempt to locate its full path
- The fully qualified path to the command, plus the remaining arguments, is executed
- e.g. `shell: bash` expands to `/bin/bash --noprofile --norc -eo pipefail /runner/_layout/_work/_temp/f8d4fb2b-19d9-47e6-a786-4cc538d52761.sh` on my private runner
At this time, **THE LIST OF WELL-KNOWN SHELL OPTIONS IS**:
- cmd - Windows (hosted vs2017, vs2019) only
- powershell - Windows (hosted vs2017, vs2019) only
- sh - All hosted platforms
- pwsh - All hosted platforms
- bash - All hosted platforms
- python - All hosted platforms. Can use setup-python to configure which python will be used
___
### Containers
For container jobs, `shell` should just work the same as above, transparently. We will simply `exec` the command in the job container, passing the same arguments in
___
### Exit codes / Error action preference
For builtin shells, we provide defaults that make the most sense for CI, running within Actions, and being executed by our runner
bash/sh:
- Fail-fast behavior using `set -e o pipefail` is the default for `bash` and `shell` builtins, and by default when no option is given on non-Windows platforms
- Users can opt out of fail-fast and take full control easily by providing a template string to the shell options, eg: `bash {0}`.
- sh-like shells exit with the exit code of the last command executed in a script, and is our default behavior. Thus the runner reports the status of the step as fail/succeed based on this exit code
powershell/pwsh
- Fail-fast behavior when possible. For `pwsh` and `powershell` builtins, we will prepend `$ErrorActionPreference = 'stop'` to script contents
- We append `if ((Test-Path -LiteralPath variable:\LASTEXITCODE)) { exit $LASTEXITCODE }` to powershell scripts to get Action statuses to reflect the script's last exit code
- Users can always opt out by not using the builtins, and providing a shell option like: `pwsh -File {0}`, or `powershell -Command "& '{0}'"`, depending on need
cmd
- There doesnt seem to be a way to fully opt in to fail-fast behavior other than writing your script to check each error code and respond accordingly, so we cant actually provide that behavior by default, it will be completely up to the user to write this behavior into their script
- cmd.exe will exit (return the error code to the runner) with the errorlevel of the last program it executed. This is internally consistent with the previous default behavior (sh, pwsh) and is the cmd.exe default, so we keep that behavior
## Consequences
Valid `shell` options will depend on the hosted images. We will need to maintain tight image compat
First class support for a shell will require a major version schema change to modify. We cannot remove or modify the behavior of a well-known supported option, However, adding first class support for new shells is backwards compatible. For instance, we can add a well-known `python` option, because non-well-known options would have always needed to include `{0}`, e.g. `python {0}`

View File

@@ -1,60 +0,0 @@
# ADR 0278: Env Context
**Date**: 2019-09-30
**Status**: Accepted
## Context
User wants to reference workflow variables defined in workflow yaml file for action's input, displayName and condition.
## Decision
### Add `env` context in the runner
Runner will create and populate the `env` context for every job execution using following logic:
1. On job start, create `env` context with any environment variables in the job message, these are env defined in customer's YAML file's job/workflow level `env` section.
2. Update `env` context when customer use `::set-env::` to set env at the runner level.
3. Update `env` context with step's `env` block before each step runs.
The `env` context is only available in the runner, customer can't use the `env` context in any server evaluation part, just like the `runner` context
Example yaml:
```yaml
env:
env1: 10
env2: 20
env3: 30
jobs:
build:
env:
env1: 100
env2: 200
runs-on: ubuntu-latest
steps:
- run: |
echo ${{ env.env1 }} // 1000
echo $env1 // 1000
echo $env2 // 200
echo $env3 // 30
if: env.env2 == 200 // true
name: ${{ env.env1 }}_${{ env.env2 }} //1000_200
env:
env1: 1000
```
### Don't populate the `env` context with environment variables from runner machine.
With job container and container action, the `env` context may not have the right value customer want and will cause confusion.
Ex:
```yaml
build:
runs-on: ubuntu-latest <- $USER=runner in hosted machine
container: ubuntu:16.04 <- $USER=root in container
steps:
- run: echo ${{env.USER}} <- what should customer expect this output? runner/root
- uses: docker://ubuntu:18.04
with:
args: echo ${{env.USER}} <- what should customer expect this output? runner/root
```

View File

@@ -1,71 +0,0 @@
# ADR 0279: HashFiles Expression Function
**Date**: 2019-09-30
**Status**: Accepted
## Context
First party action `actions/cache` needs a input which is an explicit `key` used for restoring and saving the cache. For packages caching, the most comment `key` might be the hash result of contents from all `package-lock.json` under `node_modules` folder.
There are serval different ways to get the hash `key` input for `actions/cache` action.
1. Customer calculate the `key` themselves from a different action, customer won't like this since it needs extra step for using cache feature
```yaml
steps:
- run: |
hash=some_linux_hash_method(file1, file2, file3)
echo ::set-output name=hash::$hash
id: createHash
- uses: actions/cache@v1
with:
key: ${{ steps.createHash.outputs.hash }}
```
2. Make the `key` input of `actions/cache` follow certain convention to calculate hash, this limited the `key` input to a certain format customer may not want.
```yaml
steps:
- uses: actions/cache@v1
with:
key: ${{ runner.os }}|${{ github.workspace }}|**/package-lock.json
```
## Decision
### Add hashFiles() function to expression engine for calculate files' hash
`hashFiles()` will only allow on runner side since it needs to read files on disk, using `hashFiles()` on any server side evaluated expression will cause runtime errors.
`hashFiles()` will only support hashing files under the `$GITHUB_WORKSPACE` since the expression evaluated on the runner, if customer use job container or container action, the runner won't have access to file system inside the container.
`hashFiles()` will only take 1 parameters:
- `hashFiles('**/package-lock.json')` // Search files under $GITHUB_WORKSPACE and calculate a hash for them
**Question: Do we need to support more than one match patterns?**
Ex: `hashFiles('**/package-lock.json', '!toolkit/core/package-lock.json', '!toolkit/io/package-lock.json')`
Answer: Only support single match pattern for GA, we can always add later.
This will help customer has better experience with the `actions/cache` action's input.
```yaml
steps:
- uses: actions/cache@v1
with:
key: ${{hashFiles('**/package-lock.json')}}-${{github.ref}}-${{runner.os}}
```
For search pattern, we will use basic globbing (`*` `?` and `[]`) and globstar (`**`).
Additional pattern details:
- Root relative paths with `github.workspace` (the main repo)
- Make `*` match files that start with `.`
- Case insensitive on Windows
- Accept `\` or `/` path separators on Windows
Hashing logic:
1. Get all files under `$GITHUB_WORKSPACE`.
2. Use search pattern filter all files to get files that matches the search pattern. (search pattern only apply to file path not folder path)
3. Sort all matched files by full file path in alphabet order.
4. Use SHA256 algorithm to hash each matched file and store hash result.
5. Use SHA256 to hash all stored files' hash results to get the final 64 chars hash result.
**Question: Should we include the folder structure info into the hash?**
Answer: No

View File

@@ -1,30 +0,0 @@
# ADR 0280: Echoing of Command Input
**Date**: 2019-11-04
**Status**: Accepted
## Context
Command echoing as a default behavior tends to clutter the user logs, so we want to swap to a system where users have to opt in to see this information.
Command outputs will still be echoed in the case there are any errors processing such commands. This is so the end user can have more context on why the command failed and help with troubleshooting.
Echo output in the user logs can be explicitly controlled by the new commands `::echo::on` and `::echo::off`. By default, echoing is enabled if `ACTIONS_STEP_DEBUG` secret is enabled, otherwise echoing is disabled.
## Decision
- The only commands that currently echo output are
- `remove-matcher`
- `add-matcher`
- `add-path`
- These will no longer echo the command, if processed successfully
- All commands echo the input when any of these conditions is fulfilled:
1. When such commands fail with an error
2. When `::echo::on` is set
3. When the `ACTIONS_STEP_DEBUG` is set, and echoing hasn't been explicitly disabled with `::echo::off`
- There are a few commands that won't be echoed, even when echo is enabled. These are (as of 2019/11/04):
- `add-mask`
- `debug`
- `warning`
- `error`
- The three commands above will not echo, either because echoing the command would leak secrets (e.g. `add-mask`), or it would not add any additional troubleshooting information to the logs (e.g. `debug`). It's expected that future commands would follow these "echo-suppressing" guidelines as well. Echo-suppressed commands are still free to output other information to the logs, as deemed fit.

View File

@@ -1,48 +0,0 @@
# ADR 0297: Base64 Masking Trailing Characters
**Date** 2020-01-21
**Status** Proposed
## Context
The Runner registers a number of Value Encoders, which mask various encodings of a provided secret. Currently, we register a 3 base64 Encoders:
- The base64 encoded secret
- The secret with the first character removed then base64 encoded
- The secret with the first two characters removed then base64 encoded
This gives us good coverage across the board for secrets and secrets with a prefix (i.e. `base64($user:$pass)`).
However, we don't have great coverage for cases where the secret has a string appended to it before it is base64 encoded (i.e.: `base64($pass\n))`).
Most notably we've seen this as a result of user error where a user accidentially appends a newline or space character before encoding their secret in base64.
## Decision
### Trim end characters
We are going to modify all existing base64 encoders to trim information before registering as a secret.
We will trim:
- `=` from the end of all base64 strings. This is a padding character that contains no information.
- Based on the number of `=`'s at the end of a base64 string, a malicious user could predict the length of the original secret modulo 3.
- If a user saw `***==`, they would know the secret could be 1,4,7,10... characters.
- If a string contains `=` we will also trim the last non-padding character from the base64 secret.
- This character can change if a string is appended to the secret before the encoding.
### Register a fourth encoder
We will also add back in the original base64 encoded secret encoder for four total encoders:
- The base64 encoded secret
- The base64 encoded secret trimmed
- The secret with the first character removed then base64 encoded and trimmed
- The secret with the first two characters removed then base64 encoded and trimmed
This allows us to fully cover the most common scenario where a user base64 encodes their secret and expects the entire thing to be masked.
This will result in us only revealing length or bit information when a prefix or suffix is added to a secret before encoding.
## Consequences
- In the case where a secret has a prefix or suffix added before base64 encoding, we may now reveal up to 20 bits of information and the length of the original string modulo 3, rather then the original 16 bits and no length information
- Secrets with a suffix appended before encoding will now be masked across the board. Previously it was only masked if it was a multiple of 3 characters
- Performance will suffer in a neglible way

View File

@@ -1,35 +0,0 @@
# ADR 354: Expose runner machine info
**Date**: 2020-03-02
**Status**: Pending
## Context
- Provide a mechanism in the runner to include extra information in `Set up job` step's log.
Ex: Include OS/Software info from Hosted image.
## Decision
The runner will look for a file `.setup_info` under the runner's root directory, The file can be a JSON with a simple schema.
```json
[
{
"group": "OS Detail",
"detail": "........"
},
{
"group": "Software Detail",
"detail": "........"
}
]
```
The runner will use `##[group]` and `##[endgroup]` to fold all detail info into an expandable group.
Both [virtual-environments](https://github.com/actions/virtual-environments) and self-hosted runners can use this mechanism to add extra logging info to the `Set up job` step's log.
## Consequences
1. Change the runner to best effort read/parse `.extra_setup_info` file under runner root directory.
2. [virtual-environments](https://github.com/actions/virtual-environments) generate the file during image generation.
3. Change MMS provisioner to properly copy the file to runner root directory at runtime.

View File

@@ -1,75 +0,0 @@
# ADR 361: Wrapper Action
**Date**: 2020-03-06
**Status**: Pending
## Context
In addition to action's regular execution, action author may wants their action has a chance to participate in:
- Job initialize
My Action will collect machine resource usage (CPU/RAM/Disk) during a workflow job execution, we need to start perf recorder at the begin of the job.
- Job cleanup
My Action will dirty local workspace or machine environment during execution, we need to cleanup these changes at the end of the job.
Ex: `actions/checkout@v2` will write `github.token` into local `.git/config` during execution, it has post job cleanup defined to undo the changes.
## Decision
### Add `pre` and `post` execution to action
Node Action Example:
```yaml
name: 'My action with pre'
description: 'My action with pre'
runs:
using: 'node12'
pre: 'setup.js'
pre-if: 'success()' // Optional
main: 'index.js'
post: 'cleanup.js'
post-if: 'success()' // Optional
```
Container Action Example:
```yaml
name: 'My action with pre'
description: 'My action with pre'
runs:
using: 'docker'
image: 'mycontainer:latest'
pre-entrypoint: 'setup.sh'
pre-if: 'success()' // Optional
entrypoint: 'entrypoint.sh'
post-entrypoint: 'cleanup.sh'
post-if: 'success()' // Optional
```
Both `pre` and `post` will has default `pre-if/post-if` sets to `always()`.
Setting `pre` to `always()` will make sure no matter what condition evaluate result the `main` gets at runtime, the `pre` has always run already.
`pre` executes in order of how the steps are defined.
`pre` will always be added to job steps list during job setup.
> Action referenced from local repository (`./my-action`) won't get `pre` setup correctly since the repository haven't checkout during job initialize.
> We can't use GitHub api to download the repository since there is a about 3 mins delay between `git push` and the new commit available to download using GitHub api.
`post` will be pushed into a `poststeps` stack lazily when the action's `pre` or `main` execution passed `if` condition check and about to run, you can't have an action that only contains a `post`, we will pop and run each `post` after all `pre` and `main` finished.
> Currently `post` works for both repository action (`org/repo@v1`) and local action (`./my-action`)
Valid action:
- only has `main`
- has `pre` and `main`
- has `main` and `post`
- has `pre`, `main` and `post`
Invalid action:
- only has `pre`
- only has `post`
- has `pre` and `post`
Potential downside of introducing `pre`:
- Extra magic wrt step order. Users should control the step order. Especially when we introduce templates.
- Eliminates the possibility to lazily download the action tarball, since `pre` always run by default, we have to download the tarball to check whether action defined a `pre`
- `pre` doesn't work with local action, we suggested customer use local action for testing their action changes, ex CI for their action, to avoid delay between `git push` and GitHub repo tarball download api.
- Condition on the `pre` can't be controlled using dynamic step outputs. `pre` executes too early.

View File

@@ -1,56 +0,0 @@
# ADR 0397: Support adding custom labels during runner config
**Date**: 2020-03-30
**Status**: Approved
## Context
Since configuring self-hosted runners is commonly automated via scripts, the labels need to be able to be created during configuration. The runner currently registers the built-in labels (os, arch) during registration but does not accept labels via command line args to extend the set registered.
See Issue: https://github.com/actions/runner/issues/262
This is another version of [ADR275](https://github.com/actions/runner/pull/275)
## Decision
This ADR proposes that we add a `--labels` option to `config`, which could be used to add custom additional labels to the configured runner.
For example, to add a single extra label the operator could run:
```bash
./config.sh --labels mylabel
```
> Note: the current runner command line parsing and envvar override algorithm only supports a single argument (key).
This would add the label `mylabel` to the runner, and enable users to select the runner in their workflow using this label:
```yaml
runs-on: [self-hosted, mylabel]
```
To add multiple labels the operator could run:
```bash
./config.sh --labels mylabel,anotherlabel
```
> Note: the current runner command line parsing and envvar override algorithm only supports a single argument (key).
This would add the label `mylabel` and `anotherlabel` to the runner, and enable users to select the runner in their workflow using this label:
```yaml
runs-on: [self-hosted, mylabel, anotherlabel]
```
It would not be possible to remove labels from an existing runner using `config.sh`, instead labels would have to be removed using the GitHub UI.
The labels argument will split on commas, trim and discard empty strings. That effectively means don't use commans in unattended config label names. Alternatively we could choose to escape commans but it's a nice to have.
## Replace
If an existing runner exists and the option to replace is chosen (interactively of via unattend as in this scenario), then the labels will be replaced / overwritten (not merged).
## Overriding built-in labels
Note that it is possible to register "built-in" hosted labels like `ubuntu-latest` and is not considered an error. This is an effective way for the org / runner admin to dictate by policy through registration that this set of runners will be used without having to edit all the workflow files now and in the future.
We will also not make other restrictions such as limiting explicitly adding os / arch labels and validating. We will assume that explicit labels were added for a reason and not restricting offers the most flexibility and future proofing / compat.
## Consequences
The ability to add custom labels to a self-hosted runner would enable most scenarios where job runner selection based on runner capabilities or characteristics are required.

View File

@@ -1,19 +0,0 @@
# ADRs
ADR, short for "Architecture Decision Record" is a way of capturing important architectural decisions, along with their context and consequences.
This folder includes ADRs for the actions runner. ADRs are proposed in the form of a pull request, and they commonly follow this format:
* **Title**: short present tense imperative phrase, less than 50 characters, like a git commit message.
* **Status**: proposed, accepted, rejected, deprecated, superseded, etc.
* **Context**: what is the issue that we're seeing that is motivating this decision or change.
* **Decision**: what is the change that we're actually proposing or doing.
* **Consequences**: what becomes easier or more difficult to do because of this change.
---
- More information about ADRs can be found [here](https://github.com/joelparkerhenderson/architecture_decision_record).

View File

@@ -1,57 +0,0 @@
# Automate Configuring Self-Hosted Runners
## Export PAT
Before running any of these sample scripts, create a GitHub PAT and export it before running the script
```bash
export RUNNER_CFG_PAT=yourPAT
```
## Create running as a service
**Scenario**: Run on a machine or VM (not container) which automates:
- Resolving latest released runner
- Download and extract latest
- Acquire a registration token
- Configure the runner
- Run as a systemd (linux) or Launchd (osx) service
:point_right: [Sample script here](../scripts/create-latest-svc.sh) :point_left:
Run as a one-liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level)
```bash
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/create-latest-svc.sh | bash -s yourorg/yourrepo
```
## Uninstall running as service
**Scenario**: Run on a machine or VM (not container) which automates:
- Stops and uninstalls the systemd (linux) or Launchd (osx) service
- Acquires a removal token
- Removes the runner
:point_right: [Sample script here](../scripts/remove-svc.sh) :point_left:
Repo level one liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level)
```bash
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/remove-svc.sh | bash -s yourorg/yourrepo
```
### Delete an offline runner
**Scenario**: Deletes a registered runner that is offline:
- Ensures the runner is offline
- Resolves id from name
- Deletes the runner
:point_right: [Sample script here](../scripts/delete.sh) :point_left:
Repo level one-liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level) and replace runnername
```bash
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/delete.sh | bash -s yourorg/yourrepo runnername
```

View File

@@ -1,31 +1,10 @@
# Contributions
# Contribution guide for developers
We welcome contributions in the form of issues and pull requests. We view the contributions and the process as the same for github and external contributors.
> IMPORTANT: Building your own runner is critical for the dev inner loop process when contributing changes. However, only runners built and distributed by GitHub (releases) are supported in production. Be aware that workflows and orchestrations run service side with the runner being a remote process to run steps. For that reason, the service can pull the runner forward so customizations can be lost.
## Issues
Log issues for both bugs and enhancement requests. Logging issues are important for the open community.
Issues in this repository should be for the runner application. Note that the VM and virtual machine images (including the developer toolsets) installed on the actions hosted machine pools are located [in this repository](https://github.com/actions/virtual-environments)
## Enhancements and Feature Requests
We ask that before significant effort is put into code changes, that we have agreement on taking the change before time is invested in code changes.
1. Create a feature request. Once agreed we will take the enhancment
2. Create an ADR to agree on the details of the change.
An ADR is an Architectural Decision Record. This allows consensus on the direction forward and also serves as a record of the change and motivation. [Read more here](adrs/README.md)
## Development Life Cycle
### Required Dev Dependencies
## Required Dev Dependencies
![Win](res/win_sm.png) Git for Windows [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
### To Build, Test, Layout
## To Build, Test, Layout
Navigate to the `src` directory and run the following command:
@@ -35,27 +14,27 @@ Navigate to the `src` directory and run the following command:
**Commands:**
* `layout` (`l`): Run first time to create a full runner layout in `{root}/_layout`
* `build` (`b`): Build everything and update runner layout folder
* `test` (`t`): Build runner binaries and run unit tests
* `layout` (`l`): Run first time to create a full agent layout in `{root}/_layout`
* `build` (`b`): Build everything and update agent layout folder
* `test` (`t`): Build agent binaries and run unit tests
Sample developer flow:
```bash
git clone https://github.com/actions/runner
cd ./src
./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout
./dev.(sh/cmd) layout # the agent that build from source is in {root}/_layout
<make code changes>
./dev.(sh/cmd) build # {root}/_layout will get updated
./dev.(sh/cmd) test # run all unit tests before git commit/push
```
### Editors
## Editors
[Using Visual Studio Code](https://code.visualstudio.com/)
[Using Visual Studio 2019](https://www.visualstudio.com/vs/)
[Using Visual Studio Code](https://code.visualstudio.com/)
### Styling
## Styling
We use the .NET Foundation and CoreCLR style guidelines [located here](
https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md)

View File

@@ -1,61 +0,0 @@
# Runner Authentication and Authorization
## Goals
- Support runner installs in untrusted domains.
- The account that configures or runs the runner process is not relevant for accessing GitHub resources.
- Accessing GitHub resources is done with a per-job token which expires when job completes.
- The token is granted to trusted parts of the system including the runner, actions and script steps specified by the workflow author as trusted.
- All OAuth tokens that come from the Token Service that the runner uses to access Actions Service resources are the same. It's just the scope and expiration of the token that may vary.
## Configuration
Configuring a self-hosted runner is [covered here in the documentation](https://help.github.com/en/actions/hosting-your-own-runners/adding-self-hosted-runners).
Configuration is done with the user being authenticated via a time-limited, GitHub runner registration token.
*Your credentials are never used for registering the runner with the service.*
![Self-hosted runner config](../res/self-hosted-config.png)
During configuration, an RSA public/private key pair is created, the private key is stored in file on disk. On Windows, the content is protected with DPAPI (machine level encrypted - runner only valid on that machine) and on Linux/OSX with `chmod` permissions.
Using your credentials, the runner is registered with the service by sending the public key to the service which adds that runner to the pool and stores the public key, the Token Service will generate a `clientId` associated with the public key.
## Start and Listen
After configuring the runner, the runner can be started interactively (`./run.cmd` or `./run.sh`) or as a service.
![Self-hosted runner start](../res/self-hosted-start.png)
On start, the runner listener process loads the RSA private key (on Windows decrypting with machine key DPAPI), and asks the Token Service for an OAuth token which is signed with the RSA private key.
The server then responds with an OAuth token that grants permission to access the message queue (HTTP long poll), allowing the runner to acquire the messages it will eventually run.
## Run a workflow
When a workflow is run, its labels are evaluated, it is matched to a runner and a message is placed in a queue of messages for that runner.
The runner then starts listening for jobs via the message queue HTTP long poll.
The message is encrypted with the runner's public key, stored during runner configuration.
![Runner workflow run](../res/workflow-run.png)
A workflow is queued as a result of a triggered [event](https://help.github.com/en/actions/reference/events-that-trigger-workflows). Workflows can be scheduled to [run at specific UTC times](https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) using POSIX `cron` syntax.
An [OAuth token](http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html) is generated, granting limited access to the host in Actions Service associated with the github.com repository/organization.
The lifetime of the OAuth token is the lifetime of the run or at most the [job timeout (default: 6 hours)](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idtimeout-minutes), plus 10 additional minutes.
## Accessing GitHub resources
The job message sent to the runner contains the OAuth token to talk back to the Actions Service.
The runner listener parent process will spawn a runner worker process for that job and send it the job message over IPC.
The token is never persisted.
Each action is run as a unique subprocess.
The encrypted access token will be provided as an environment variable in each action subprocess.
The token is registered with the runner as a secret and scrubbed from the logs as they are written.
Authentication in a workflow run to github.com can be accomplished by using the [`GITHUB_TOKEN`](https://help.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#about-the-github_token-secret)) secret. This token expires after 60 minutes. Please note that this token is different from the OAuth token that the runner uses to talk to the Actions Service.
## Hosted runner authentication
Hosted runner authentication differs from self-hosted authentication in that runners do not undergo a registration process, but instead, the hosted runners get the OAuth token directly by reading the `.credentials` file. The scope of this particular token is limited for a given workflow job execution, and the token is revoked as soon as the job is finished.
![Hosted runner config and start](../res/hosted-config-start.png)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

View File

@@ -1,52 +0,0 @@
# Markup used to generate the runner auth diagrams: https://websequencediagrams.com
title Runner Configuration (self-hosted only)
note left of Runner: GitHub repo URL as input
Runner->github.com: Retrieve Actions Service access using runner registration token
github.com->Runner: Access token for Actions Service
note left of Runner: Generate RSA key pair
note left of Runner: Store encrypted RSA private key on disk
Runner->Actions Service: Register runner using Actions Service access token
note right of Runner: Runner name, RSA public key sent
note right of Actions Service: Public key stored
Actions Service->Token Service: Register runner as an app along with the RSA public key
note right of Token Service: Public key stored
Token Service->Actions Service: Client Id for the runner application
Actions Service->Runner: Client Id and Token Endpoint URL
note left of Runner: Store runner configuration info into .runner file
note left of Runner: Store Token registration info into .credentials file
title Runner Start and Running (self-hosted only)
Runner.Listener->Runner.Listener: Start
note left of Runner.Listener: Load config info from .runner
note left of Runner.Listener: Load token registration from .credentials
Runner.Listener->Token Service: Exchange OAuth token (happens every 50 mins)
note right of Runner.Listener: Construct JWT token, use Client Id signed by RSA private key
note left of Actions Service: Find corresponding RSA public key, use Client Id\nVerify JWT token's signature
Token Service->Runner.Listener: OAuth token with limited permission and valid for 50 mins
Runner.Listener->Actions Service: Connect to Actions Service with OAuth token
Actions Service->Runner.Listener: Workflow job
title Running workflow
Runner.Listener->Service (Message Queue): Get message
note right of Runner.Listener: Authenticate with exchanged OAuth token
Event->Actions Service: Queue workflow
Actions Service->Actions Service: Generate OAuth token per job
Actions Service->Actions Service: Build job message with the OAuth token
Actions Service->Actions Service: Encrypt job message with the target runner's public key
Actions Service->Service (Message Queue): Send encrypted job message to runner
Service (Message Queue)->Runner.Listener: Send job
note right of Runner.Listener: Decrypt message with runner's private key
Runner.Listener->Runner.Worker: Create worker process per job and run the job
title Runner Configuration, Start and Running (hosted only)
Machine Management Service->Runner.Listener: Construct .runner configuration file, store token in .credentials
Runner.Listener->Runner.Listener: Start
note left of Runner.Listener: Load config info from .runner
note left of Runner.Listener: Load OAuth token from .credentials
Runner.Listener->Actions Service: Connect to Actions Service with OAuth token in .credentials
Actions Service->Runner.Listener: Workflow job

Binary file not shown.

Before

Width:  |  Height:  |  Size: 98 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

View File

@@ -28,7 +28,7 @@ Execute ./bin/installdependencies.sh to install any missing Dotnet Core 3.0 depe
```
You can easily correct the problem by executing `./bin/installdependencies.sh`.
The `installdependencies.sh` script should install all required dependencies on all supported Linux versions
> Note: The `installdependencies.sh` script will try to use the default package management mechanism on your Linux flavor (ex. `yum`/`apt-get`/`apt`).
> Note: The `installdependencies.sh` script will try to use the default package management mechanism on your Linux flavor (ex. `yum`/`apt-get`/`apt`). You might need to deal with error coming from the package management mechanism related to your setup, like [#1353](https://github.com/Microsoft/vsts-agent/issues/1353)
### Full dependencies list
@@ -40,7 +40,7 @@ Debian based OS (Debian, Ubuntu, Linux Mint)
- libssl1.1, libssl1.0.2 or libssl1.0.0
- libicu63, libicu60, libicu57 or libicu55
Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7)
Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7)
- lttng-ust
- openssl-libs

View File

@@ -9,4 +9,4 @@
- Windows Server 2016 64-bit
- Windows Server 2019 64-bit
## [More .NET Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30)
## [More .Net Core Prerequisites Information](https://docs.microsoft.com/en-us/dotnet/core/windows-prerequisites?tabs=netcore30)

7
images/arm/Dockerfile Normal file
View File

@@ -0,0 +1,7 @@
FROM mcr.microsoft.com/dotnet/core/runtime-deps:2.1
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
curl \
git \
&& rm -rf /var/lib/apt/lists/*

150
images/centos6/Dockerfile Normal file
View File

@@ -0,0 +1,150 @@
FROM centos:6
# Install dependencies
RUN yum install -y \
centos-release-SCL \
epel-release \
wget \
unzip \
&& \
rpm --import http://linuxsoft.cern.ch/cern/slc6X/x86_64/RPM-GPG-KEY-cern && \
wget -O /etc/yum.repos.d/slc6-devtoolset.repo http://linuxsoft.cern.ch/cern/devtoolset/slc6-devtoolset.repo && \
yum install -y \
"perl(Time::HiRes)" \
autoconf \
cmake \
cmake3 \
devtoolset-2-toolchain \
doxygen \
expat-devel \
gcc \
gcc-c++ \
gdb \
gettext-devel \
krb5-devel \
libedit-devel \
libidn-devel \
libmetalink-devel \
libnghttp2-devel \
libssh2-devel \
libunwind-devel \
libuuid-devel \
lttng-ust-devel \
lzma \
ncurses-devel \
openssl-devel \
perl-devel \
python-argparse \
python27 \
readline-devel \
swig \
xz \
zlib-devel \
&& \
yum clean all
# Build and install clang and lldb 3.9.1
RUN wget ftp://sourceware.org/pub/binutils/snapshots/binutils-2.29.1.tar.xz && \
wget http://releases.llvm.org/3.9.1/cfe-3.9.1.src.tar.xz && \
wget http://releases.llvm.org/3.9.1/llvm-3.9.1.src.tar.xz && \
wget http://releases.llvm.org/3.9.1/lldb-3.9.1.src.tar.xz && \
wget http://releases.llvm.org/3.9.1/compiler-rt-3.9.1.src.tar.xz && \
\
tar -xf binutils-2.29.1.tar.xz && \
tar -xf llvm-3.9.1.src.tar.xz && \
mkdir llvm-3.9.1.src/tools/clang && \
mkdir llvm-3.9.1.src/tools/lldb && \
mkdir llvm-3.9.1.src/projects/compiler-rt && \
tar -xf cfe-3.9.1.src.tar.xz --strip 1 -C llvm-3.9.1.src/tools/clang && \
tar -xf lldb-3.9.1.src.tar.xz --strip 1 -C llvm-3.9.1.src/tools/lldb && \
tar -xf compiler-rt-3.9.1.src.tar.xz --strip 1 -C llvm-3.9.1.src/projects/compiler-rt && \
rm binutils-2.29.1.tar.xz && \
rm cfe-3.9.1.src.tar.xz && \
rm lldb-3.9.1.src.tar.xz && \
rm llvm-3.9.1.src.tar.xz && \
rm compiler-rt-3.9.1.src.tar.xz && \
\
mkdir llvmbuild && \
cd llvmbuild && \
scl enable python27 devtoolset-2 \
' \
cmake3 \
-DCMAKE_CXX_COMPILER=/opt/rh/devtoolset-2/root/usr/bin/g++ \
-DCMAKE_C_COMPILER=/opt/rh/devtoolset-2/root/usr/bin/gcc \
-DCMAKE_LINKER=/opt/rh/devtoolset-2/root/usr/bin/ld \
-DCMAKE_BUILD_TYPE=Release \
-DLLVM_LIBDIR_SUFFIX=64 \
-DLLVM_ENABLE_EH=1 \
-DLLVM_ENABLE_RTTI=1 \
-DLLVM_BINUTILS_INCDIR=../binutils-2.29.1/include \
../llvm-3.9.1.src \
&& \
make -j $(($(getconf _NPROCESSORS_ONLN)+1)) && \
make install \
' && \
cd .. && \
rm -r llvmbuild && \
rm -r llvm-3.9.1.src && \
rm -r binutils-2.29.1
# Build and install curl 7.45.0
RUN wget https://curl.haxx.se/download/curl-7.45.0.tar.lzma && \
tar -xf curl-7.45.0.tar.lzma && \
rm curl-7.45.0.tar.lzma && \
cd curl-7.45.0 && \
scl enable python27 devtoolset-2 \
' \
./configure \
--disable-dict \
--disable-ftp \
--disable-gopher \
--disable-imap \
--disable-ldap \
--disable-ldaps \
--disable-libcurl-option \
--disable-manual \
--disable-pop3 \
--disable-rtsp \
--disable-smb \
--disable-smtp \
--disable-telnet \
--disable-tftp \
--enable-ipv6 \
--enable-optimize \
--enable-symbol-hiding \
--with-ca-bundle=/etc/pki/tls/certs/ca-bundle.crt \
--with-nghttp2 \
--with-gssapi \
--with-ssl \
--without-librtmp \
&& \
make install \
' && \
cd .. && \
rm -r curl-7.45.0
# Install ICU 57.1
RUN wget http://download.icu-project.org/files/icu4c/57.1/icu4c-57_1-RHEL6-x64.tgz && \
tar -xf icu4c-57_1-RHEL6-x64.tgz -C / && \
rm icu4c-57_1-RHEL6-x64.tgz
# Compile and install a version of the git that supports the features that cli repo build needs
# NOTE: The git needs to be built after the curl so that it can use the libcurl to add https
# protocol support.
RUN \
wget https://www.kernel.org/pub/software/scm/git/git-2.9.5.tar.gz && \
tar -xf git-2.9.5.tar.gz && \
rm git-2.9.5.tar.gz && \
cd git-2.9.5 && \
make configure && \
./configure --prefix=/usr/local --without-tcltk && \
make -j $(nproc --all) all && \
make install && \
cd .. && \
rm -r git-2.9.5
ENV LD_LIBRARY_PATH=/usr/local/lib

33
nonwindows.template.yml Normal file
View File

@@ -0,0 +1,33 @@
parameters:
targetRuntime: ''
steps:
# Build agent layout
- script: ./dev.sh layout Release ${{ parameters.targetRuntime }}
workingDirectory: src
displayName: Build & Layout Release ${{ parameters.targetRuntime }}
# Run test
- script: ./dev.sh test
workingDirectory: src
displayName: Test
condition: and(ne('${{ parameters.targetRuntime }}', 'linux-arm64'), ne('${{ parameters.targetRuntime }}', 'linux-arm'))
# # Publish test results
# - task: PublishTestResults@2
# displayName: Publish Test Results **/*.trx
# condition: always()
# inputs:
# testRunner: VSTest
# testResultsFiles: '**/*.trx'
# testRunTitle: 'Agent Tests'
# # Upload test log
# - task: PublishBuildArtifacts@1
# displayName: Publish Test logs
# condition: always()
# inputs:
# pathToPublish: src/Test/TestLogs
# artifactName: $(System.JobId)
# artifactType: container

View File

@@ -1,70 +1,67 @@
## Features
- N/A
- Added the "severity" keyword to allow action authors to set the default severity for problem matchers (#203)
## Bugs
- Handle `jq` returns "null" if the field does not exist in create-latest-svc.sh (#478)
- Switch GITHUB_URL to GITHUB_SERVER_URL (#482)
- Fix problem matcher for GHES (#488)
- Fix container action inputs validation warning (#490)
- Fix post step display name (#490)
- Fix worker crash due to exception from evaluating step.env (#490)
- Fixed generated self-hosted runner names to never go over 80 characters (helps Windows customers) (#193)
- Fixed `PrepareActions_DownloadActionFromGraph` test by pointing to an active Actions repository (#205)
## Misc
- N/A
- Updated the publish and download artifact actions to use the v2 endpoint (#188)
- Updated the service name on self-hosted runner name to include repository or organization information (#193)
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
The following snipped needs to be run on `powershell`:
``` powershell
# Create a folder under the drive root
We recommend configuring the runner under "<DRIVE>:\actions-runner". This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows
```
// Create a folder under the drive root
mkdir \actions-runner ; cd \actions-runner
# Download the latest runner package
Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-win-x64-<RUNNER_VERSION>.zip -OutFile actions-runner-win-x64-<RUNNER_VERSION>.zip
# Extract the installer
// Download the latest runner package
Invoke-WebRequest -Uri https://githubassets.azureedge.net/runners/<RUNNER_VERSION>/actions-runner-win-x64-<RUNNER_VERSION>.zip -OutFile actions-runner-win-x64-<RUNNER_VERSION>.zip
// Extract the installer
Add-Type -AssemblyName System.IO.Compression.FileSystem ;
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
[System.IO.Compression.ZipFile]::ExtractToDirectory("$HOME\Downloads\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
```
## OSX
``` bash
# Create a folder
// Create a folder
mkdir actions-runner && cd actions-runner
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
# Extract the installer
// Download the latest runner package
curl -O https://githubassets.azureedge.net/runners/<RUNNER_VERSION>/actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
// Extract the installer
tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
```
## Linux x64
``` bash
# Create a folder
// Create a folder
mkdir actions-runner && cd actions-runner
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
# Extract the installer
// Download the latest runner package
curl -O https://githubassets.azureedge.net/runners/<RUNNER_VERSION>/actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
// Extract the installer
tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
```
## Linux arm64 (Pre-release)
``` bash
# Create a folder
// Create a folder
mkdir actions-runner && cd actions-runner
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
# Extract the installer
// Download the latest runner package
curl -O https://githubassets.azureedge.net/runners/<RUNNER_VERSION>/actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
// Extract the installer
tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
```
## Linux arm (Pre-release)
``` bash
# Create a folder
// Create a folder
mkdir actions-runner && cd actions-runner
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
# Extract the installer
// Download the latest runner package
curl -O https://githubassets.azureedge.net/runners/<RUNNER_VERSION>/actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
// Extract the installer
tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
```

View File

@@ -1 +0,0 @@
<Update to ./src/runnerversion when creating release>

View File

@@ -1,4 +0,0 @@
# Sample scripts for self-hosted runners
Here are some examples to work from if you'd like to automate your use of self-hosted runners.
See the docs [here](../docs/automate.md).

View File

@@ -1,147 +0,0 @@
#/bin/bash
set -e
#
# Downloads latest releases (not pre-release) runner
# Configures as a service
#
# Examples:
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myuser/myrepo my.ghe.deployment.net
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myorg my.ghe.deployment.net
#
# Usage:
# export RUNNER_CFG_PAT=<yourPAT>
# ./create-latest-svc scope [ghe_domain] [name] [user]
#
# scope required repo (:owner/:repo) or org (:organization)
# ghe_domain optional the fully qualified domain name of your GitHub Enterprise Server deployment
# name optional defaults to hostname
# user optional user svc will run as. defaults to current
#
# Notes:
# PATS over envvars are more secure
# Should be used on VMs and not containers
# Works on OSX and Linux
# Assumes x64 arch
#
runner_scope=${1}
ghe_hostname=${2}
runner_name=${3:-$(hostname)}
svc_user=${4:-$USER}
echo "Configuring runner @ ${runner_scope}"
sudo echo
#---------------------------------------
# Validate Environment
#---------------------------------------
runner_plat=linux
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
function fatal()
{
echo "error: $1" >&2
exit 1
}
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
# bail early if there's already a runner there. also sudo early
if [ -d ./runner ]; then
fatal "Runner already exists. Use a different directory or delete ./runner"
fi
sudo -u ${svc_user} mkdir runner
# TODO: validate not in a container
# TODO: validate systemd or osx svc installer
#--------------------------------------
# Get a config token
#--------------------------------------
echo
echo "Generating a registration token..."
base_api_url="https://api.github.com"
if [ -n "${ghe_hostname}" ]; then
base_api_url="https://${ghe_hostname}/api/v3"
fi
# if the scope has a slash, it's a repo runner
orgs_or_repos="orgs"
if [[ "$runner_scope" == *\/* ]]; then
orgs_or_repos="repos"
fi
export RUNNER_TOKEN=$(curl -s -X POST ${base_api_url}/${orgs_or_repos}/${runner_scope}/actions/runners/registration-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token')
if [ "null" == "$RUNNER_TOKEN" -o -z "$RUNNER_TOKEN" ]; then fatal "Failed to get a token"; fi
#---------------------------------------
# Download latest released and extract
#---------------------------------------
echo
echo "Downloading latest runner ..."
# For the GHES Alpha, download the runner from github.com
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
latest_version=$(echo ${latest_version_label:1})
runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz"
if [ -f "${runner_file}" ]; then
echo "${runner_file} exists. skipping download."
else
runner_url="https://github.com/actions/runner/releases/download/${latest_version_label}/${runner_file}"
echo "Downloading ${latest_version_label} for ${runner_plat} ..."
echo $runner_url
curl -O -L ${runner_url}
fi
ls -la *.tar.gz
#---------------------------------------------------
# extract to runner directory in this directory
#---------------------------------------------------
echo
echo "Extracting ${runner_file} to ./runner"
tar xzf "./${runner_file}" -C runner
# export of pass
sudo chown -R $svc_user ./runner
pushd ./runner
#---------------------------------------
# Unattend config
#---------------------------------------
runner_url="https://github.com/${runner_scope}"
if [ -n "${ghe_hostname}" ]; then
runner_url="https://${ghe_hostname}/${runner_scope}"
fi
echo
echo "Configuring ${runner_name} @ $runner_url"
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name"
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name
#---------------------------------------
# Configuring as a service
#---------------------------------------
echo
echo "Configuring as a service ..."
prefix=""
if [ "${runner_plat}" == "linux" ]; then
prefix="sudo "
fi
${prefix}./svc.sh install ${svc_user}
${prefix}./svc.sh start

View File

@@ -1,83 +0,0 @@
#/bin/bash
set -e
#
# Force deletes a runner from the service
# The caller should have already ensured the runner is gone and/or stopped
#
# Examples:
# RUNNER_CFG_PAT=<yourPAT> ./delete.sh myuser/myrepo myname
# RUNNER_CFG_PAT=<yourPAT> ./delete.sh myorg
#
# Usage:
# export RUNNER_CFG_PAT=<yourPAT>
# ./delete.sh scope name
#
# scope required repo (:owner/:repo) or org (:organization)
# name optional defaults to hostname. name to delete
#
# Notes:
# PATS over envvars are more secure
# Works on OSX and Linux
# Assumes x64 arch
#
runner_scope=${1}
runner_name=${2}
echo "Deleting runner ${runner_name} @ ${runner_scope}"
function fatal()
{
echo "error: $1" >&2
exit 1
}
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
if [ -z "${runner_name}" ]; then fatal "supply name as argument 2"; fi
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
base_api_url="https://api.github.com/orgs"
if [[ "$runner_scope" == *\/* ]]; then
base_api_url="https://api.github.com/repos"
fi
#--------------------------------------
# Ensure offline
#--------------------------------------
runner_status=$(curl -s -X GET ${base_api_url}/${runner_scope}/actions/runners?per_page=100 -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" \
| jq -M -j ".runners | .[] | [select(.name == \"${runner_name}\")] | .[0].status")
if [ -z "${runner_status}" ]; then
fatal "Could not find runner with name ${runner_name}"
fi
echo "Status: ${runner_status}"
if [ "${runner_status}" != "offline" ]; then
fatal "Runner should be offline before removing"
fi
#--------------------------------------
# Get id of runner to remove
#--------------------------------------
runner_id=$(curl -s -X GET ${base_api_url}/${runner_scope}/actions/runners?per_page=100 -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" \
| jq -M -j ".runners | .[] | [select(.name == \"${runner_name}\")] | .[0].id")
if [ -z "${runner_id}" ]; then
fatal "Could not find runner with name ${runner_name}"
fi
echo "Removing id ${runner_id}"
#--------------------------------------
# Remove the runner
#--------------------------------------
curl -s -X DELETE ${base_api_url}/${runner_scope}/actions/runners/${runner_id} -H "authorization: token ${RUNNER_CFG_PAT}"
echo "Done."

View File

@@ -1,76 +0,0 @@
#/bin/bash
set -e
#
# Removes a runner running as a service
# Must be run on the machine where the service is run
#
# Examples:
# RUNNER_CFG_PAT=<yourPAT> ./remove-svc.sh myuser/myrepo
# RUNNER_CFG_PAT=<yourPAT> ./remove-svc.sh myorg
#
# Usage:
# export RUNNER_CFG_PAT=<yourPAT>
# ./remove-svc scope name
#
# scope required repo (:owner/:repo) or org (:organization)
# name optional defaults to hostname. name to uninstall and remove
#
# Notes:
# PATS over envvars are more secure
# Should be used on VMs and not containers
# Works on OSX and Linux
# Assumes x64 arch
#
runner_scope=${1}
runner_name=${2:-$(hostname)}
echo "Uninstalling runner ${runner_name} @ ${runner_scope}"
sudo echo
function fatal()
{
echo "error: $1" >&2
exit 1
}
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
runner_plat=linux
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
#--------------------------------------
# Get a remove token
#--------------------------------------
echo
echo "Generating a removal token..."
# if the scope has a slash, it's an repo runner
base_api_url="https://api.github.com/orgs"
if [[ "$runner_scope" == *\/* ]]; then
base_api_url="https://api.github.com/repos"
fi
export REMOVE_TOKEN=$(curl -s -X POST ${base_api_url}/${runner_scope}/actions/runners/remove-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token')
if [ -z "$REMOVE_TOKEN" ]; then fatal "Failed to get a token"; fi
#---------------------------------------
# Stop and uninstall the service
#---------------------------------------
echo
echo "Uninstall the service ..."
pushd ./runner
prefix=""
if [ "${runner_plat}" == "linux" ]; then
prefix="sudo "
fi
${prefix}./svc.sh stop
${prefix}./svc.sh uninstall
${prefix}./config.sh remove --token $REMOVE_TOKEN

View File

@@ -46,9 +46,4 @@
<PropertyGroup Condition="'$(Configuration)' == 'Debug'">
<DefineConstants>$(DefineConstants);DEBUG</DefineConstants>
</PropertyGroup>
<!-- Set Treat tarnings as errors -->
<PropertyGroup>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
</Project>

View File

@@ -172,7 +172,7 @@ get_current_os_name() {
return 0
elif [ "$uname" = "FreeBSD" ]; then
echo "freebsd"
return 0
return 0
elif [ "$uname" = "Linux" ]; then
local linux_platform_name
linux_platform_name="$(get_linux_platform_name)" || { echo "linux" && return 0 ; }
@@ -728,12 +728,11 @@ downloadcurl() {
# Append feed_credential as late as possible before calling curl to avoid logging feed_credential
remote_path="${remote_path}${feed_credential}"
local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
local failed=false
if [ -z "$out_path" ]; then
curl $curl_options "$remote_path" || failed=true
curl --retry 10 -sSL -f --create-dirs "$remote_path" || failed=true
else
curl $curl_options -o "$out_path" "$remote_path" || failed=true
curl --retry 10 -sSL -f --create-dirs -o "$out_path" "$remote_path" || failed=true
fi
if [ "$failed" = true ]; then
say_verbose "Curl download failed"
@@ -749,12 +748,12 @@ downloadwget() {
# Append feed_credential as late as possible before calling wget to avoid logging feed_credential
remote_path="${remote_path}${feed_credential}"
local wget_options="--tries 20 --waitretry 2 --connect-timeout 15 "
local failed=false
if [ -z "$out_path" ]; then
wget -q $wget_options -O - "$remote_path" || failed=true
wget -q --tries 10 -O - "$remote_path" || failed=true
else
wget $wget_options -O "$out_path" "$remote_path" || failed=true
wget --tries 10 -O "$out_path" "$remote_path" || failed=true
fi
if [ "$failed" = true ]; then
say_verbose "Wget download failed"

View File

@@ -1,3 +0,0 @@
dist/
lib/
node_modules/

View File

@@ -1,59 +0,0 @@
{
"plugins": ["jest", "@typescript-eslint"],
"extends": ["plugin:github/es6"],
"parser": "@typescript-eslint/parser",
"parserOptions": {
"ecmaVersion": 9,
"sourceType": "module",
"project": "./tsconfig.json"
},
"rules": {
"eslint-comments/no-use": "off",
"import/no-namespace": "off",
"no-console": "off",
"no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error",
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
"@typescript-eslint/no-require-imports": "error",
"@typescript-eslint/array-type": "error",
"@typescript-eslint/await-thenable": "error",
"@typescript-eslint/ban-ts-ignore": "error",
"camelcase": "off",
"@typescript-eslint/camelcase": "error",
"@typescript-eslint/class-name-casing": "error",
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
"@typescript-eslint/func-call-spacing": ["error", "never"],
"@typescript-eslint/generic-type-naming": ["error", "^[A-Z][A-Za-z]*$"],
"@typescript-eslint/no-array-constructor": "error",
"@typescript-eslint/no-empty-interface": "error",
"@typescript-eslint/no-explicit-any": "error",
"@typescript-eslint/no-extraneous-class": "error",
"@typescript-eslint/no-for-in-array": "error",
"@typescript-eslint/no-inferrable-types": "error",
"@typescript-eslint/no-misused-new": "error",
"@typescript-eslint/no-namespace": "error",
"@typescript-eslint/no-non-null-assertion": "warn",
"@typescript-eslint/no-object-literal-type-assertion": "error",
"@typescript-eslint/no-unnecessary-qualifier": "error",
"@typescript-eslint/no-unnecessary-type-assertion": "error",
"@typescript-eslint/no-useless-constructor": "error",
"@typescript-eslint/no-var-requires": "error",
"@typescript-eslint/prefer-for-of": "warn",
"@typescript-eslint/prefer-function-type": "warn",
"@typescript-eslint/prefer-includes": "error",
"@typescript-eslint/prefer-interface": "error",
"@typescript-eslint/prefer-string-starts-ends-with": "error",
"@typescript-eslint/promise-function-async": "error",
"@typescript-eslint/require-array-sort-compare": "error",
"@typescript-eslint/restrict-plus-operands": "error",
"semi": "off",
"@typescript-eslint/semi": ["error", "never"],
"@typescript-eslint/type-annotation-spacing": "error",
"@typescript-eslint/unbound-method": "error"
},
"env": {
"node": true,
"es6": true,
"jest/globals": true
}
}

View File

@@ -1,3 +0,0 @@
dist/
lib/
node_modules/

View File

@@ -1,11 +0,0 @@
{
"printWidth": 80,
"tabWidth": 2,
"useTabs": false,
"semi": false,
"singleQuote": true,
"trailingComma": "none",
"bracketSpacing": false,
"arrowParens": "avoid",
"parser": "typescript"
}

View File

@@ -1 +0,0 @@
To update hashFiles under `Misc/layoutbin` run `npm install && npm run all`

File diff suppressed because it is too large Load Diff

View File

@@ -1,35 +0,0 @@
{
"name": "hashFiles",
"version": "1.0.0",
"description": "GitHub Actions HashFiles() expression function",
"main": "lib/hashFiles.js",
"scripts": {
"build": "tsc",
"format": "prettier --write **/*.ts",
"format-check": "prettier --check **/*.ts",
"lint": "eslint src/**/*.ts",
"pack": "ncc build -o ../../layoutbin/hashFiles",
"all": "npm run build && npm run format && npm run lint && npm run pack"
},
"repository": {
"type": "git",
"url": "git+https://github.com/actions/runner.git"
},
"keywords": [
"actions"
],
"author": "GitHub Actions",
"license": "MIT",
"dependencies": {
"@actions/glob": "^0.1.0"
},
"devDependencies": {
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^2.8.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^6.8.0",
"eslint-plugin-github": "^2.0.0",
"prettier": "^1.19.1",
"typescript": "^3.6.4"
}
}

View File

@@ -1,55 +0,0 @@
import * as glob from '@actions/glob'
import * as crypto from 'crypto'
import * as fs from 'fs'
import * as stream from 'stream'
import * as util from 'util'
import * as path from 'path'
async function run(): Promise<void> {
// arg0 -> node
// arg1 -> hashFiles.js
// env[followSymbolicLinks] = true/null
// env[patterns] -> glob patterns
let followSymbolicLinks = false
const matchPatterns = process.env.patterns || ''
if (process.env.followSymbolicLinks === 'true') {
console.log('Follow symbolic links')
followSymbolicLinks = true
}
console.log(`Match Pattern: ${matchPatterns}`)
let hasMatch = false
const githubWorkspace = process.cwd()
const result = crypto.createHash('sha256')
let count = 0
const globber = await glob.create(matchPatterns, {followSymbolicLinks})
for await (const file of globber.globGenerator()) {
console.log(file)
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
console.log(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`)
continue
}
if (fs.statSync(file).isDirectory()) {
console.log(`Skip directory '${file}'.`)
continue
}
const hash = crypto.createHash('sha256')
const pipeline = util.promisify(stream.pipeline)
await pipeline(fs.createReadStream(file), hash)
result.write(hash.digest())
count++
if (!hasMatch) {
hasMatch = true
}
}
result.end()
if (hasMatch) {
console.log(`Find ${count} files to hash.`)
console.error(`__OUTPUT__${result.digest('hex')}__OUTPUT__`)
} else {
console.error(`__OUTPUT____OUTPUT__`)
}
}
run()

View File

@@ -1,12 +0,0 @@
{
"compilerOptions": {
"target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"outDir": "./lib", /* Redirect output structure to the directory. */
"rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
"strict": true, /* Enable all strict type-checking options. */
"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
},
"exclude": ["node_modules", "**/*.test.ts"]
}

View File

@@ -1,7 +1,6 @@
#!/bin/bash
SVC_NAME="{{SvcNameVar}}"
SVC_NAME=${SVC_NAME// /_}
SVC_DESCRIPTION="{{SvcDescription}}"
user_id=`id -u`

File diff suppressed because it is too large Load Diff

View File

@@ -9,7 +9,7 @@ fi
# Determine OS type
# Debian based OS (Debian, Ubuntu, Linux Mint) has /etc/debian_version
# Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7) has /etc/redhat-release
# Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7) has /etc/redhat-release
# SUSE based OS (OpenSUSE, SUSE Enterprise) has ID_LIKE=suse in /etc/os-release
function print_errormessage()
@@ -116,12 +116,12 @@ then
elif [ -e /etc/redhat-release ]
then
echo "The current OS is Fedora based"
echo "--Fedora/RHEL/CentOS Version--"
echo "--------Redhat Version--------"
cat /etc/redhat-release
echo "------------------------------"
# use dnf on fedora
# use yum on centos and rhel
# use yum on centos and redhat
if [ -e /etc/fedora-release ]
then
command -v dnf
@@ -191,7 +191,7 @@ then
redhatRelease=$(</etc/redhat-release)
if [[ $redhatRelease == "CentOS release 6."* || $redhatRelease == "Red Hat Enterprise Linux Server release 6."* ]]
then
echo "The current OS is Red Hat Enterprise Linux 6 or CentOS 6"
echo "The current OS is Red Hat Enterprise Linux 6 or Centos 6"
# Install known dependencies, as a best effort.
# The remaining dependencies are covered by the GitHub doc that will be shown by `print_rhel6message`

View File

@@ -1,13 +0,0 @@
const { spawn } = require('child_process');
// argv[0] = node
// argv[1] = macos-run-invoker.js
var shell = process.argv[2];
var args = process.argv.slice(3);
console.log(`::debug::macos-run-invoker: ${shell}`);
console.log(`::debug::macos-run-invoker: ${JSON.stringify(args)}`);
var launch = spawn(shell, args, { stdio: 'inherit' });
launch.on('exit', function (code) {
if (code !== 0) {
process.exit(code);
}
});

View File

@@ -1,7 +1,6 @@
#!/bin/bash
SVC_NAME="{{SvcNameVar}}"
SVC_NAME=${SVC_NAME// /_}
SVC_DESCRIPTION="{{SvcDescription}}"
SVC_CMD=$1

View File

@@ -3,7 +3,7 @@
user_id=`id -u`
# we want to snapshot the environment of the config user
if [ $user_id -eq 0 -a -z "$RUNNER_ALLOW_RUNASROOT" ]; then
if [ $user_id -eq 0 -a -z "$AGENT_ALLOW_RUNASROOT" ]; then
echo "Must not run with sudo"
exit 1
fi

View File

@@ -2,7 +2,7 @@
# Validate not sudo
user_id=`id -u`
if [ $user_id -eq 0 -a -z "$RUNNER_ALLOW_RUNASROOT" ]; then
if [ $user_id -eq 0 -a -z "$AGENT_ALLOW_RUNASROOT" ]; then
echo "Must not run interactively with sudo"
exit 1
fi
@@ -26,8 +26,8 @@ if [[ "$1" == "localRun" ]]; then
else
"$DIR"/bin/Runner.Listener run $*
# Return code 4 means the run once runner received an update message.
# Sleep 5 seconds to wait for the update process finish and run the runner again.
# Return code 4 means the run once agent received an update message.
# Sleep 5 seconds to wait for the update process finish and run the agent again.
returnCode=$?
if [[ $returnCode == 4 ]]; then
if [ ! -x "$(command -v sleep)" ]; then

View File

@@ -3,6 +3,8 @@
<packageSources>
<!--To inherit the global NuGet package sources remove the <clear/> line below -->
<clear />
<add key="dotnet-core" value="https://www.myget.org/F/dotnet-core/api/v3/index.json" />
<add key="dotnet-buildtools" value="https://www.myget.org/F/dotnet-buildtools/api/v3/index.json" />
<add key="api.nuget.org" value="https://api.nuget.org/v3/index.json" />
</packageSources>
</configuration>

View File

@@ -9,27 +9,26 @@ namespace GitHub.Runner.Common
{
private static readonly EscapeMapping[] _escapeMappings = new[]
{
new EscapeMapping(token: "%", replacement: "%25"),
new EscapeMapping(token: ";", replacement: "%3B"),
new EscapeMapping(token: "\r", replacement: "%0D"),
new EscapeMapping(token: "\n", replacement: "%0A"),
new EscapeMapping(token: "]", replacement: "%5D"),
new EscapeMapping(token: "%", replacement: "%25"),
};
private static readonly EscapeMapping[] _escapeDataMappings = new[]
{
new EscapeMapping(token: "\r", replacement: "%0D"),
new EscapeMapping(token: "\n", replacement: "%0A"),
new EscapeMapping(token: "%", replacement: "%25"),
};
private static readonly EscapeMapping[] _escapePropertyMappings = new[]
{
new EscapeMapping(token: "%", replacement: "%25"),
new EscapeMapping(token: "\r", replacement: "%0D"),
new EscapeMapping(token: "\n", replacement: "%0A"),
new EscapeMapping(token: ":", replacement: "%3A"),
new EscapeMapping(token: ",", replacement: "%2C"),
new EscapeMapping(token: "%", replacement: "%25"),
};
private readonly Dictionary<string, string> _properties = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);

View File

@@ -0,0 +1,33 @@
using System.Threading;
using System.Threading.Tasks;
namespace GitHub.Runner.Common
{
//Stephen Toub: http://blogs.msdn.com/b/pfxteam/archive/2012/02/11/10266920.aspx
public class AsyncManualResetEvent
{
private volatile TaskCompletionSource<bool> m_tcs = new TaskCompletionSource<bool>();
public Task WaitAsync() { return m_tcs.Task; }
public void Set()
{
var tcs = m_tcs;
Task.Factory.StartNew(s => ((TaskCompletionSource<bool>)s).TrySetResult(true),
tcs, CancellationToken.None, TaskCreationOptions.PreferFairness, TaskScheduler.Default);
tcs.Task.Wait();
}
public void Reset()
{
while (true)
{
var tcs = m_tcs;
if (!tcs.Task.IsCompleted ||
Interlocked.CompareExchange(ref m_tcs, new TaskCompletionSource<bool>(), tcs) == tcs)
return;
}
}
}
}

View File

@@ -15,9 +15,6 @@ namespace GitHub.Runner.Common
[DataContract]
public sealed class RunnerSettings
{
[DataMember(Name = "IsHostedServer", EmitDefaultValue = false)]
private bool? _isHostedServer;
[DataMember(EmitDefaultValue = false)]
public int AgentId { get; set; }
@@ -45,21 +42,6 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)]
public string MonitorSocketAddress { get; set; }
[IgnoreDataMember]
public bool IsHostedServer
{
get
{
// Old runners do not have this property. Hosted runners likely don't have this property either.
return _isHostedServer ?? true;
}
set
{
_isHostedServer = value;
}
}
/// <summary>
// Computed property for convenience. Can either return:
// 1. If runner was configured at the repo level, returns something like: "myorg/myrepo"
@@ -87,15 +69,15 @@ namespace GitHub.Runner.Common
return repoOrOrgName;
}
}
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (_isHostedServer.HasValue && _isHostedServer.Value)
{
_isHostedServer = null;
}
}
[DataContract]
public sealed class RunnerRuntimeOptions
{
#if OS_WINDOWS
[DataMember(EmitDefaultValue = false)]
public bool GitUseSecureChannel { get; set; }
#endif
}
[ServiceLocator(Default = typeof(ConfigurationStore))]
@@ -105,13 +87,14 @@ namespace GitHub.Runner.Common
bool IsServiceConfigured();
bool HasCredentials();
CredentialData GetCredentials();
CredentialData GetMigratedCredentials();
RunnerSettings GetSettings();
void SaveCredential(CredentialData credential);
void SaveMigratedCredential(CredentialData credential);
void SaveSettings(RunnerSettings settings);
void DeleteCredential();
void DeleteSettings();
RunnerRuntimeOptions GetRunnerRuntimeOptions();
void SaveRunnerRuntimeOptions(RunnerRuntimeOptions options);
void DeleteRunnerRuntimeOptions();
}
public sealed class ConfigurationStore : RunnerService, IConfigurationStore
@@ -119,12 +102,12 @@ namespace GitHub.Runner.Common
private string _binPath;
private string _configFilePath;
private string _credFilePath;
private string _migratedCredFilePath;
private string _serviceConfigFilePath;
private string _runtimeOptionsFilePath;
private CredentialData _creds;
private CredentialData _migratedCreds;
private RunnerSettings _settings;
private RunnerRuntimeOptions _runtimeOptions;
public override void Initialize(IHostContext hostContext)
{
@@ -145,11 +128,11 @@ namespace GitHub.Runner.Common
_credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials);
Trace.Info("CredFilePath: {0}", _credFilePath);
_migratedCredFilePath = hostContext.GetConfigFile(WellKnownConfigFile.MigratedCredentials);
Trace.Info("MigratedCredFilePath: {0}", _migratedCredFilePath);
_serviceConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Service);
Trace.Info("ServiceConfigFilePath: {0}", _serviceConfigFilePath);
_runtimeOptionsFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Options);
Trace.Info("RuntimeOptionsFilePath: {0}", _runtimeOptionsFilePath);
}
public string RootFolder { get; private set; }
@@ -157,7 +140,7 @@ namespace GitHub.Runner.Common
public bool HasCredentials()
{
Trace.Info("HasCredentials()");
bool credsStored = (new FileInfo(_credFilePath)).Exists || (new FileInfo(_migratedCredFilePath)).Exists;
bool credsStored = (new FileInfo(_credFilePath)).Exists;
Trace.Info("stored {0}", credsStored);
return credsStored;
}
@@ -188,16 +171,6 @@ namespace GitHub.Runner.Common
return _creds;
}
public CredentialData GetMigratedCredentials()
{
if (_migratedCreds == null && File.Exists(_migratedCredFilePath))
{
_migratedCreds = IOUtil.LoadObject<CredentialData>(_migratedCredFilePath);
}
return _migratedCreds;
}
public RunnerSettings GetSettings()
{
if (_settings == null)
@@ -232,21 +205,6 @@ namespace GitHub.Runner.Common
File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden);
}
public void SaveMigratedCredential(CredentialData credential)
{
Trace.Info("Saving {0} migrated credential @ {1}", credential.Scheme, _migratedCredFilePath);
if (File.Exists(_migratedCredFilePath))
{
// Delete existing credential file first, since the file is hidden and not able to overwrite.
Trace.Info("Delete exist runner migrated credential file.");
IOUtil.DeleteFile(_migratedCredFilePath);
}
IOUtil.SaveObject(credential, _migratedCredFilePath);
Trace.Info("Migrated Credentials Saved.");
File.SetAttributes(_migratedCredFilePath, File.GetAttributes(_migratedCredFilePath) | FileAttributes.Hidden);
}
public void SaveSettings(RunnerSettings settings)
{
Trace.Info("Saving runner settings.");
@@ -265,12 +223,41 @@ namespace GitHub.Runner.Common
public void DeleteCredential()
{
IOUtil.Delete(_credFilePath, default(CancellationToken));
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
}
public void DeleteSettings()
{
IOUtil.Delete(_configFilePath, default(CancellationToken));
}
public RunnerRuntimeOptions GetRunnerRuntimeOptions()
{
if (_runtimeOptions == null && File.Exists(_runtimeOptionsFilePath))
{
_runtimeOptions = IOUtil.LoadObject<RunnerRuntimeOptions>(_runtimeOptionsFilePath);
}
return _runtimeOptions;
}
public void SaveRunnerRuntimeOptions(RunnerRuntimeOptions options)
{
Trace.Info("Saving runtime options.");
if (File.Exists(_runtimeOptionsFilePath))
{
// Delete existing runtime options file first, since the file is hidden and not able to overwrite.
Trace.Info("Delete exist runtime options file.");
IOUtil.DeleteFile(_runtimeOptionsFilePath);
}
IOUtil.SaveObject(options, _runtimeOptionsFilePath);
Trace.Info("Options Saved.");
File.SetAttributes(_runtimeOptionsFilePath, File.GetAttributes(_runtimeOptionsFilePath) | FileAttributes.Hidden);
}
public void DeleteRunnerRuntimeOptions()
{
IOUtil.Delete(_runtimeOptionsFilePath, default(CancellationToken));
}
}
}

View File

@@ -19,13 +19,11 @@ namespace GitHub.Runner.Common
{
Runner,
Credentials,
MigratedCredentials,
RSACredentials,
Service,
CredentialStore,
Certificates,
Options,
SetupInfo,
}
public static class Constants
@@ -87,10 +85,13 @@ namespace GitHub.Runner.Common
public static class Args
{
public static readonly string Auth = "auth";
public static readonly string Labels = "labels";
public static readonly string MonitorSocketAddress = "monitorsocketaddress";
public static readonly string Name = "name";
public static readonly string Pool = "pool";
public static readonly string SslCACert = "sslcacert";
public static readonly string SslClientCert = "sslclientcert";
public static readonly string SslClientCertKey = "sslclientcertkey";
public static readonly string SslClientCertArchive = "sslclientcertarchive";
public static readonly string StartupType = "startuptype";
public static readonly string Url = "url";
public static readonly string UserName = "username";
@@ -98,10 +99,14 @@ namespace GitHub.Runner.Common
public static readonly string Work = "work";
// Secret args. Must be added to the "Secrets" getter as well.
public static readonly string Password = "password";
public static readonly string SslClientCertPassword = "sslclientcertpassword";
public static readonly string Token = "token";
public static readonly string WindowsLogonPassword = "windowslogonpassword";
public static string[] Secrets => new[]
{
Password,
SslClientCertPassword,
Token,
WindowsLogonPassword,
};
@@ -120,10 +125,13 @@ namespace GitHub.Runner.Common
public static class Flags
{
public static readonly string Commit = "commit";
public static readonly string GitUseSChannel = "gituseschannel";
public static readonly string Help = "help";
public static readonly string Replace = "replace";
public static readonly string LaunchBrowser = "launchbrowser";
public static readonly string Once = "once";
public static readonly string RunAsService = "runasservice";
public static readonly string SslSkipCertValidation = "sslskipcertvalidation";
public static readonly string Unattended = "unattended";
public static readonly string Version = "version";
}
@@ -137,15 +145,6 @@ namespace GitHub.Runner.Common
public const int RunnerUpdating = 3;
public const int RunOnceRunnerUpdating = 4;
}
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
public static readonly string WorkerCrash = "WORKER_CRASH";
}
public static class RunnerEvent
{
public static readonly string Register = "register";
public static readonly string Remove = "remove";
}
public static class Pipeline
@@ -174,8 +173,7 @@ namespace GitHub.Runner.Common
public static class Path
{
public static readonly string ActionsDirectory = "_actions";
public static readonly string ActionManifestYmlFile = "action.yml";
public static readonly string ActionManifestYamlFile = "action.yaml";
public static readonly string ActionManifestFile = "action.yml";
public static readonly string BinDirectory = "bin";
public static readonly string DiagDirectory = "_diag";
public static readonly string ExternalsDirectory = "externals";
@@ -202,11 +200,6 @@ namespace GitHub.Runner.Common
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
}
public static class Agent
{
public static readonly string ToolsDirectory = "agent.ToolsDirectory";
}
public static class System
{
//

View File

@@ -1,18 +1,19 @@
using System;
using GitHub.Runner.Common.Util;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.Tracing;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Reflection;
using System.Runtime.Loader;
using System.Threading;
using System.Threading.Tasks;
using System.Diagnostics;
using System.Net.Http;
using System.Diagnostics.Tracing;
using GitHub.DistributedTask.Logging;
using System.Net.Http.Headers;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
@@ -23,7 +24,7 @@ namespace GitHub.Runner.Common
CancellationToken RunnerShutdownToken { get; }
ShutdownReason RunnerShutdownReason { get; }
ISecretMasker SecretMasker { get; }
List<ProductInfoHeaderValue> UserAgents { get; }
ProductInfoHeaderValue UserAgent { get; }
RunnerWebProxy WebProxy { get; }
string GetDirectory(WellKnownDirectory directory);
string GetConfigFile(WellKnownConfigFile configFile);
@@ -53,12 +54,12 @@ namespace GitHub.Runner.Common
private readonly ConcurrentDictionary<Type, object> _serviceInstances = new ConcurrentDictionary<Type, object>();
private readonly ConcurrentDictionary<Type, Type> _serviceTypes = new ConcurrentDictionary<Type, Type>();
private readonly ISecretMasker _secretMasker = new SecretMasker();
private readonly List<ProductInfoHeaderValue> _userAgents = new List<ProductInfoHeaderValue>() { new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version) };
private readonly ProductInfoHeaderValue _userAgent = new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version);
private CancellationTokenSource _runnerShutdownTokenSource = new CancellationTokenSource();
private object _perfLock = new object();
private Tracing _trace;
private Tracing _actionsHttpTrace;
private Tracing _netcoreHttpTrace;
private Tracing _vssTrace;
private Tracing _httpTrace;
private ITraceManager _traceManager;
private AssemblyLoadContext _loadContext;
private IDisposable _httpTraceSubscription;
@@ -71,7 +72,7 @@ namespace GitHub.Runner.Common
public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token;
public ShutdownReason RunnerShutdownReason { get; private set; }
public ISecretMasker SecretMasker => _secretMasker;
public List<ProductInfoHeaderValue> UserAgents => _userAgents;
public ProductInfoHeaderValue UserAgent => _userAgent;
public RunnerWebProxy WebProxy => _webProxy;
public HostContext(string hostType, string logFile = null)
{
@@ -88,7 +89,6 @@ namespace GitHub.Runner.Common
this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.UriDataEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.XmlDataEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.TrimDoubleQuotes);
// Create the trace manager.
if (string.IsNullOrEmpty(logFile))
@@ -117,7 +117,8 @@ namespace GitHub.Runner.Common
}
_trace = GetTrace(nameof(HostContext));
_actionsHttpTrace = GetTrace("GitHubActionsService");
_vssTrace = GetTrace("GitHubActionsRunner"); // VisualStudioService
// Enable Http trace
bool enableHttpTrace;
if (bool.TryParse(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_HTTPTRACE"), out enableHttpTrace) && enableHttpTrace)
@@ -129,7 +130,7 @@ namespace GitHub.Runner.Common
_trace.Warning("** **");
_trace.Warning("*****************************************************************************************");
_netcoreHttpTrace = GetTrace("HttpTrace");
_httpTrace = GetTrace("HttpTrace");
_diagListenerSubscription = DiagnosticListener.AllListeners.Subscribe(this);
}
@@ -189,17 +190,6 @@ namespace GitHub.Runner.Common
{
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
}
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
if (File.Exists(credFile))
{
var credData = IOUtil.LoadObject<CredentialData>(credFile);
if (credData != null &&
credData.Data.TryGetValue("clientId", out var clientId))
{
_userAgents.Add(new ProductInfoHeaderValue($"RunnerId", clientId));
}
}
}
public string GetDirectory(WellKnownDirectory directory)
@@ -240,9 +230,8 @@ namespace GitHub.Runner.Common
break;
case WellKnownDirectory.Tools:
// TODO: Coallesce to just check RUNNER_TOOL_CACHE when images stabilize
path = Environment.GetEnvironmentVariable("RUNNER_TOOL_CACHE") ?? Environment.GetEnvironmentVariable("RUNNER_TOOLSDIRECTORY") ?? Environment.GetEnvironmentVariable("AGENT_TOOLSDIRECTORY") ?? Environment.GetEnvironmentVariable(Constants.Variables.Agent.ToolsDirectory);
path = Environment.GetEnvironmentVariable("RUNNER_TOOL_CACHE");
if (string.IsNullOrEmpty(path))
{
path = Path.Combine(
@@ -292,12 +281,6 @@ namespace GitHub.Runner.Common
".credentials");
break;
case WellKnownConfigFile.MigratedCredentials:
path = Path.Combine(
GetDirectory(WellKnownDirectory.Root),
".credentials_migrated");
break;
case WellKnownConfigFile.RSACredentials:
path = Path.Combine(
GetDirectory(WellKnownDirectory.Root),
@@ -333,13 +316,6 @@ namespace GitHub.Runner.Common
GetDirectory(WellKnownDirectory.Root),
".options");
break;
case WellKnownConfigFile.SetupInfo:
path = Path.Combine(
GetDirectory(WellKnownDirectory.Root),
".setup_info");
break;
default:
throw new NotSupportedException($"Unexpected well known config file: '{configFile}'");
}
@@ -502,12 +478,12 @@ namespace GitHub.Runner.Common
void IObserver<DiagnosticListener>.OnCompleted()
{
_netcoreHttpTrace.Info("DiagListeners finished transmitting data.");
_httpTrace.Info("DiagListeners finished transmitting data.");
}
void IObserver<DiagnosticListener>.OnError(Exception error)
{
_netcoreHttpTrace.Error(error);
_httpTrace.Error(error);
}
void IObserver<DiagnosticListener>.OnNext(DiagnosticListener listener)
@@ -520,22 +496,22 @@ namespace GitHub.Runner.Common
void IObserver<KeyValuePair<string, object>>.OnCompleted()
{
_netcoreHttpTrace.Info("HttpHandlerDiagnosticListener finished transmitting data.");
_httpTrace.Info("HttpHandlerDiagnosticListener finished transmitting data.");
}
void IObserver<KeyValuePair<string, object>>.OnError(Exception error)
{
_netcoreHttpTrace.Error(error);
_httpTrace.Error(error);
}
void IObserver<KeyValuePair<string, object>>.OnNext(KeyValuePair<string, object> value)
{
_netcoreHttpTrace.Info($"Trace {value.Key} event:{Environment.NewLine}{value.Value.ToString()}");
_httpTrace.Info($"Trace {value.Key} event:{Environment.NewLine}{value.Value.ToString()}");
}
protected override void OnEventSourceCreated(EventSource source)
{
if (source.Name.Equals("GitHub-Actions-Http"))
if (source.Name.Equals("Microsoft-VSS-Http"))
{
EnableEvents(source, EventLevel.Verbose);
}
@@ -575,24 +551,24 @@ namespace GitHub.Runner.Common
{
case EventLevel.Critical:
case EventLevel.Error:
_actionsHttpTrace.Error(message);
_vssTrace.Error(message);
break;
case EventLevel.Warning:
_actionsHttpTrace.Warning(message);
_vssTrace.Warning(message);
break;
case EventLevel.Informational:
_actionsHttpTrace.Info(message);
_vssTrace.Info(message);
break;
default:
_actionsHttpTrace.Verbose(message);
_vssTrace.Verbose(message);
break;
}
}
catch (Exception ex)
{
_actionsHttpTrace.Error(ex);
_actionsHttpTrace.Info(eventData.Message);
_actionsHttpTrace.Info(string.Join(", ", eventData.Payload?.ToArray() ?? new string[0]));
_vssTrace.Error(ex);
_vssTrace.Info(eventData.Message);
_vssTrace.Info(string.Join(", ", eventData.Payload?.ToArray() ?? new string[0]));
}
}
@@ -614,8 +590,9 @@ namespace GitHub.Runner.Common
{
public static HttpClientHandler CreateHttpClientHandler(this IHostContext context)
{
var handlerFactory = context.GetService<IHttpClientHandlerFactory>();
return handlerFactory.CreateClientHandler(context.WebProxy);
HttpClientHandler clientHandler = new HttpClientHandler();
clientHandler.Proxy = context.WebProxy;
return clientHandler;
}
}

View File

@@ -1,19 +0,0 @@
using System.Net.Http;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(HttpClientHandlerFactory))]
public interface IHttpClientHandlerFactory : IRunnerService
{
HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy);
}
public class HttpClientHandlerFactory : RunnerService, IHttpClientHandlerFactory
{
public HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy)
{
return new HttpClientHandler() { Proxy = webProxy };
}
}
}

View File

@@ -24,6 +24,7 @@ namespace GitHub.Runner.Common
private Guid _timelineId;
private Guid _timelineRecordId;
private string _pageId;
private FileStream _pageData;
private StreamWriter _pageWriter;
private int _byteCount;
@@ -39,6 +40,7 @@ namespace GitHub.Runner.Common
{
base.Initialize(hostContext);
_totalLines = 0;
_pageId = Guid.NewGuid().ToString();
_pagesFolder = Path.Combine(hostContext.GetDirectory(WellKnownDirectory.Diag), PagingFolder);
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
Directory.CreateDirectory(_pagesFolder);
@@ -100,7 +102,7 @@ namespace GitHub.Runner.Common
{
EndPage();
_byteCount = 0;
_dataFileName = Path.Combine(_pagesFolder, $"{_timelineId}_{_timelineRecordId}_{++_pageCount}.log");
_dataFileName = Path.Combine(_pagesFolder, $"{_pageId}_{++_pageCount}.log");
_pageData = new FileStream(_dataFileName, FileMode.CreateNew);
_pageWriter = new StreamWriter(_pageData, System.Text.Encoding.UTF8);
}

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netcoreapp3.1</TargetFramework>
<TargetFramework>netcoreapp3.0</TargetFramework>
<OutputType>Library</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>

View File

@@ -0,0 +1,231 @@
using System;
using GitHub.Runner.Common.Util;
using System.IO;
using System.Runtime.Serialization;
using GitHub.Services.Common;
using System.Security.Cryptography.X509Certificates;
using System.Net;
using System.Net.Security;
using System.Net.Http;
using GitHub.Services.WebApi;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(RunnerCertificateManager))]
public interface IRunnerCertificateManager : IRunnerService
{
bool SkipServerCertificateValidation { get; }
string CACertificateFile { get; }
string ClientCertificateFile { get; }
string ClientCertificatePrivateKeyFile { get; }
string ClientCertificateArchiveFile { get; }
string ClientCertificatePassword { get; }
IVssClientCertificateManager VssClientCertificateManager { get; }
}
public class RunnerCertificateManager : RunnerService, IRunnerCertificateManager
{
private RunnerClientCertificateManager _runnerClientCertificateManager = new RunnerClientCertificateManager();
public bool SkipServerCertificateValidation { private set; get; }
public string CACertificateFile { private set; get; }
public string ClientCertificateFile { private set; get; }
public string ClientCertificatePrivateKeyFile { private set; get; }
public string ClientCertificateArchiveFile { private set; get; }
public string ClientCertificatePassword { private set; get; }
public IVssClientCertificateManager VssClientCertificateManager => _runnerClientCertificateManager;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
LoadCertificateSettings();
}
// This should only be called from config
public void SetupCertificate(bool skipCertValidation, string caCert, string clientCert, string clientCertPrivateKey, string clientCertArchive, string clientCertPassword)
{
Trace.Info("Setup runner certificate setting base on configuration inputs.");
if (skipCertValidation)
{
Trace.Info("Ignore SSL server certificate validation error");
SkipServerCertificateValidation = true;
VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
}
if (!string.IsNullOrEmpty(caCert))
{
ArgUtil.File(caCert, nameof(caCert));
Trace.Info($"Self-Signed CA '{caCert}'");
}
if (!string.IsNullOrEmpty(clientCert))
{
ArgUtil.File(clientCert, nameof(clientCert));
ArgUtil.File(clientCertPrivateKey, nameof(clientCertPrivateKey));
ArgUtil.File(clientCertArchive, nameof(clientCertArchive));
Trace.Info($"Client cert '{clientCert}'");
Trace.Info($"Client cert private key '{clientCertPrivateKey}'");
Trace.Info($"Client cert archive '{clientCertArchive}'");
}
CACertificateFile = caCert;
ClientCertificateFile = clientCert;
ClientCertificatePrivateKeyFile = clientCertPrivateKey;
ClientCertificateArchiveFile = clientCertArchive;
ClientCertificatePassword = clientCertPassword;
_runnerClientCertificateManager.AddClientCertificate(ClientCertificateArchiveFile, ClientCertificatePassword);
}
// This should only be called from config
public void SaveCertificateSetting()
{
string certSettingFile = HostContext.GetConfigFile(WellKnownConfigFile.Certificates);
IOUtil.DeleteFile(certSettingFile);
var setting = new RunnerCertificateSetting();
if (SkipServerCertificateValidation)
{
Trace.Info($"Store Skip ServerCertificateValidation setting to '{certSettingFile}'");
setting.SkipServerCertValidation = true;
}
if (!string.IsNullOrEmpty(CACertificateFile))
{
Trace.Info($"Store CA cert setting to '{certSettingFile}'");
setting.CACert = CACertificateFile;
}
if (!string.IsNullOrEmpty(ClientCertificateFile) &&
!string.IsNullOrEmpty(ClientCertificatePrivateKeyFile) &&
!string.IsNullOrEmpty(ClientCertificateArchiveFile))
{
Trace.Info($"Store client cert settings to '{certSettingFile}'");
setting.ClientCert = ClientCertificateFile;
setting.ClientCertPrivatekey = ClientCertificatePrivateKeyFile;
setting.ClientCertArchive = ClientCertificateArchiveFile;
if (!string.IsNullOrEmpty(ClientCertificatePassword))
{
string lookupKey = Guid.NewGuid().ToString("D").ToUpperInvariant();
Trace.Info($"Store client cert private key password with lookup key {lookupKey}");
var credStore = HostContext.GetService<IRunnerCredentialStore>();
credStore.Write($"GITHUB_ACTIONS_RUNNER_CLIENT_CERT_PASSWORD_{lookupKey}", "GitHub", ClientCertificatePassword);
setting.ClientCertPasswordLookupKey = lookupKey;
}
}
if (SkipServerCertificateValidation ||
!string.IsNullOrEmpty(CACertificateFile) ||
!string.IsNullOrEmpty(ClientCertificateFile))
{
IOUtil.SaveObject(setting, certSettingFile);
File.SetAttributes(certSettingFile, File.GetAttributes(certSettingFile) | FileAttributes.Hidden);
}
}
// This should only be called from unconfig
public void DeleteCertificateSetting()
{
string certSettingFile = HostContext.GetConfigFile(WellKnownConfigFile.Certificates);
if (File.Exists(certSettingFile))
{
Trace.Info($"Load runner certificate setting from '{certSettingFile}'");
var certSetting = IOUtil.LoadObject<RunnerCertificateSetting>(certSettingFile);
if (certSetting != null && !string.IsNullOrEmpty(certSetting.ClientCertPasswordLookupKey))
{
Trace.Info("Delete client cert private key password from credential store.");
var credStore = HostContext.GetService<IRunnerCredentialStore>();
credStore.Delete($"GITHUB_ACTIONS_RUNNER_CLIENT_CERT_PASSWORD_{certSetting.ClientCertPasswordLookupKey}");
}
Trace.Info($"Delete cert setting file: {certSettingFile}");
IOUtil.DeleteFile(certSettingFile);
}
}
public void LoadCertificateSettings()
{
string certSettingFile = HostContext.GetConfigFile(WellKnownConfigFile.Certificates);
if (File.Exists(certSettingFile))
{
Trace.Info($"Load runner certificate setting from '{certSettingFile}'");
var certSetting = IOUtil.LoadObject<RunnerCertificateSetting>(certSettingFile);
ArgUtil.NotNull(certSetting, nameof(RunnerCertificateSetting));
if (certSetting.SkipServerCertValidation)
{
Trace.Info("Ignore SSL server certificate validation error");
SkipServerCertificateValidation = true;
VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
}
if (!string.IsNullOrEmpty(certSetting.CACert))
{
// make sure all settings file exist
ArgUtil.File(certSetting.CACert, nameof(certSetting.CACert));
Trace.Info($"CA '{certSetting.CACert}'");
CACertificateFile = certSetting.CACert;
}
if (!string.IsNullOrEmpty(certSetting.ClientCert))
{
// make sure all settings file exist
ArgUtil.File(certSetting.ClientCert, nameof(certSetting.ClientCert));
ArgUtil.File(certSetting.ClientCertPrivatekey, nameof(certSetting.ClientCertPrivatekey));
ArgUtil.File(certSetting.ClientCertArchive, nameof(certSetting.ClientCertArchive));
Trace.Info($"Client cert '{certSetting.ClientCert}'");
Trace.Info($"Client cert private key '{certSetting.ClientCertPrivatekey}'");
Trace.Info($"Client cert archive '{certSetting.ClientCertArchive}'");
ClientCertificateFile = certSetting.ClientCert;
ClientCertificatePrivateKeyFile = certSetting.ClientCertPrivatekey;
ClientCertificateArchiveFile = certSetting.ClientCertArchive;
if (!string.IsNullOrEmpty(certSetting.ClientCertPasswordLookupKey))
{
var cerdStore = HostContext.GetService<IRunnerCredentialStore>();
ClientCertificatePassword = cerdStore.Read($"GITHUB_ACTIONS_RUNNER_CLIENT_CERT_PASSWORD_{certSetting.ClientCertPasswordLookupKey}").Password;
HostContext.SecretMasker.AddValue(ClientCertificatePassword);
}
_runnerClientCertificateManager.AddClientCertificate(ClientCertificateArchiveFile, ClientCertificatePassword);
}
}
else
{
Trace.Info("No certificate setting found.");
}
}
}
[DataContract]
internal class RunnerCertificateSetting
{
[DataMember]
public bool SkipServerCertValidation { get; set; }
[DataMember]
public string CACert { get; set; }
[DataMember]
public string ClientCert { get; set; }
[DataMember]
public string ClientCertPrivatekey { get; set; }
[DataMember]
public string ClientCertArchive { get; set; }
[DataMember]
public string ClientCertPasswordLookupKey { get; set; }
}
}

View File

@@ -0,0 +1,948 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Net;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using GitHub.Runner.Common.Util;
using Newtonsoft.Json;
using System.IO;
using System.Runtime.Serialization;
using System.Security.Cryptography;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
{
// The purpose of this class is to store user's credential during runner configuration and retrive the credential back at runtime.
#if OS_WINDOWS
[ServiceLocator(Default = typeof(WindowsRunnerCredentialStore))]
#elif OS_OSX
[ServiceLocator(Default = typeof(MacOSRunnerCredentialStore))]
#else
[ServiceLocator(Default = typeof(LinuxRunnerCredentialStore))]
#endif
public interface IRunnerCredentialStore : IRunnerService
{
NetworkCredential Write(string target, string username, string password);
// throw exception when target not found from cred store
NetworkCredential Read(string target);
// throw exception when target not found from cred store
void Delete(string target);
}
#if OS_WINDOWS
// Windows credential store is per user.
// This is a limitation for user configure the runner run as windows service, when user's current login account is different with the service run as account.
// Ex: I login the box as domain\admin, configure the runner as windows service and run as domian\buildserver
// domain\buildserver won't read the stored credential from domain\admin's windows credential store.
// To workaround this limitation.
// Anytime we try to save a credential:
// 1. store it into current user's windows credential store
// 2. use DP-API do a machine level encrypt and store the encrypted content on disk.
// At the first time we try to read the credential:
// 1. read from current user's windows credential store, delete the DP-API encrypted backup content on disk if the windows credential store read succeed.
// 2. if credential not found in current user's windows credential store, read from the DP-API encrypted backup content on disk,
// write the credential back the current user's windows credential store and delete the backup on disk.
public sealed class WindowsRunnerCredentialStore : RunnerService, IRunnerCredentialStore
{
private string _credStoreFile;
private Dictionary<string, string> _credStore;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_credStoreFile = hostContext.GetConfigFile(WellKnownConfigFile.CredentialStore);
if (File.Exists(_credStoreFile))
{
_credStore = IOUtil.LoadObject<Dictionary<string, string>>(_credStoreFile);
}
else
{
_credStore = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
}
}
public NetworkCredential Write(string target, string username, string password)
{
Trace.Entering();
ArgUtil.NotNullOrEmpty(target, nameof(target));
ArgUtil.NotNullOrEmpty(username, nameof(username));
ArgUtil.NotNullOrEmpty(password, nameof(password));
// save to .credential_store file first, then Windows credential store
string usernameBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(username));
string passwordBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(password));
// Base64Username:Base64Password -> DP-API machine level encrypt -> Base64Encoding
string encryptedUsernamePassword = Convert.ToBase64String(ProtectedData.Protect(Encoding.UTF8.GetBytes($"{usernameBase64}:{passwordBase64}"), null, DataProtectionScope.LocalMachine));
Trace.Info($"Credentials for '{target}' written to credential store file.");
_credStore[target] = encryptedUsernamePassword;
// save to .credential_store file
SyncCredentialStoreFile();
// save to Windows Credential Store
return WriteInternal(target, username, password);
}
public NetworkCredential Read(string target)
{
Trace.Entering();
ArgUtil.NotNullOrEmpty(target, nameof(target));
IntPtr credPtr = IntPtr.Zero;
try
{
if (CredRead(target, CredentialType.Generic, 0, out credPtr))
{
Credential credStruct = (Credential)Marshal.PtrToStructure(credPtr, typeof(Credential));
int passwordLength = (int)credStruct.CredentialBlobSize;
string password = passwordLength > 0 ? Marshal.PtrToStringUni(credStruct.CredentialBlob, passwordLength / sizeof(char)) : String.Empty;
string username = Marshal.PtrToStringUni(credStruct.UserName);
Trace.Info($"Credentials for '{target}' read from windows credential store.");
// delete from .credential_store file since we are able to read it from windows credential store
if (_credStore.Remove(target))
{
Trace.Info($"Delete credentials for '{target}' from credential store file.");
SyncCredentialStoreFile();
}
return new NetworkCredential(username, password);
}
else
{
// Can't read from Windows Credential Store, fail back to .credential_store file
if (_credStore.ContainsKey(target) && !string.IsNullOrEmpty(_credStore[target]))
{
Trace.Info($"Credentials for '{target}' read from credential store file.");
// Base64Decode -> DP-API machine level decrypt -> Base64Username:Base64Password -> Base64Decode
string decryptedUsernamePassword = Encoding.UTF8.GetString(ProtectedData.Unprotect(Convert.FromBase64String(_credStore[target]), null, DataProtectionScope.LocalMachine));
string[] credential = decryptedUsernamePassword.Split(':');
if (credential.Length == 2 && !string.IsNullOrEmpty(credential[0]) && !string.IsNullOrEmpty(credential[1]))
{
string username = Encoding.UTF8.GetString(Convert.FromBase64String(credential[0]));
string password = Encoding.UTF8.GetString(Convert.FromBase64String(credential[1]));
// store back to windows credential store for current user
NetworkCredential creds = WriteInternal(target, username, password);
// delete from .credential_store file since we are able to write the credential to windows credential store for current user.
if (_credStore.Remove(target))
{
Trace.Info($"Delete credentials for '{target}' from credential store file.");
SyncCredentialStoreFile();
}
return creds;
}
else
{
throw new ArgumentOutOfRangeException(nameof(decryptedUsernamePassword));
}
}
throw new Win32Exception(Marshal.GetLastWin32Error(), $"CredRead throw an error for '{target}'");
}
}
finally
{
if (credPtr != IntPtr.Zero)
{
CredFree(credPtr);
}
}
}
public void Delete(string target)
{
Trace.Entering();
ArgUtil.NotNullOrEmpty(target, nameof(target));
// remove from .credential_store file
if (_credStore.Remove(target))
{
Trace.Info($"Delete credentials for '{target}' from credential store file.");
SyncCredentialStoreFile();
}
// remove from windows credential store
if (!CredDelete(target, CredentialType.Generic, 0))
{
throw new Win32Exception(Marshal.GetLastWin32Error(), $"Failed to delete credentials for {target}");
}
else
{
Trace.Info($"Credentials for '{target}' deleted from windows credential store.");
}
}
private NetworkCredential WriteInternal(string target, string username, string password)
{
// save to Windows Credential Store
Credential credential = new Credential()
{
Type = CredentialType.Generic,
Persist = (UInt32)CredentialPersist.LocalMachine,
TargetName = Marshal.StringToCoTaskMemUni(target),
UserName = Marshal.StringToCoTaskMemUni(username),
CredentialBlob = Marshal.StringToCoTaskMemUni(password),
CredentialBlobSize = (UInt32)Encoding.Unicode.GetByteCount(password),
AttributeCount = 0,
Comment = IntPtr.Zero,
Attributes = IntPtr.Zero,
TargetAlias = IntPtr.Zero
};
try
{
if (CredWrite(ref credential, 0))
{
Trace.Info($"Credentials for '{target}' written to windows credential store.");
return new NetworkCredential(username, password);
}
else
{
int error = Marshal.GetLastWin32Error();
throw new Win32Exception(error, "Failed to write credentials");
}
}
finally
{
if (credential.CredentialBlob != IntPtr.Zero)
{
Marshal.FreeCoTaskMem(credential.CredentialBlob);
}
if (credential.TargetName != IntPtr.Zero)
{
Marshal.FreeCoTaskMem(credential.TargetName);
}
if (credential.UserName != IntPtr.Zero)
{
Marshal.FreeCoTaskMem(credential.UserName);
}
}
}
private void SyncCredentialStoreFile()
{
Trace.Info("Sync in-memory credential store with credential store file.");
// delete the cred store file first anyway, since it's a readonly file.
IOUtil.DeleteFile(_credStoreFile);
// delete cred store file when all creds gone
if (_credStore.Count == 0)
{
return;
}
else
{
IOUtil.SaveObject(_credStore, _credStoreFile);
File.SetAttributes(_credStoreFile, File.GetAttributes(_credStoreFile) | FileAttributes.Hidden);
}
}
[DllImport("Advapi32.dll", EntryPoint = "CredDeleteW", CharSet = CharSet.Unicode, SetLastError = true)]
internal static extern bool CredDelete(string target, CredentialType type, int reservedFlag);
[DllImport("Advapi32.dll", EntryPoint = "CredReadW", CharSet = CharSet.Unicode, SetLastError = true)]
internal static extern bool CredRead(string target, CredentialType type, int reservedFlag, out IntPtr CredentialPtr);
[DllImport("Advapi32.dll", EntryPoint = "CredWriteW", CharSet = CharSet.Unicode, SetLastError = true)]
internal static extern bool CredWrite([In] ref Credential userCredential, [In] UInt32 flags);
[DllImport("Advapi32.dll", EntryPoint = "CredFree", SetLastError = true)]
internal static extern bool CredFree([In] IntPtr cred);
internal enum CredentialPersist : UInt32
{
Session = 0x01,
LocalMachine = 0x02
}
internal enum CredentialType : uint
{
Generic = 0x01,
DomainPassword = 0x02,
DomainCertificate = 0x03
}
[StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
internal struct Credential
{
public UInt32 Flags;
public CredentialType Type;
public IntPtr TargetName;
public IntPtr Comment;
public System.Runtime.InteropServices.ComTypes.FILETIME LastWritten;
public UInt32 CredentialBlobSize;
public IntPtr CredentialBlob;
public UInt32 Persist;
public UInt32 AttributeCount;
public IntPtr Attributes;
public IntPtr TargetAlias;
public IntPtr UserName;
}
}
#elif OS_OSX
public sealed class MacOSRunnerCredentialStore : RunnerService, IRunnerCredentialStore
{
private const string _osxRunnerCredStoreKeyChainName = "_GITHUB_ACTIONS_RUNNER_CREDSTORE_INTERNAL_";
// Keychain requires a password, but this is not intended to add security
private const string _osxRunnerCredStoreKeyChainPassword = "C46F23C36AF94B72B1EAEE32C68670A0";
private string _securityUtil;
private string _runnerCredStoreKeyChain;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_securityUtil = WhichUtil.Which("security", true, Trace);
_runnerCredStoreKeyChain = hostContext.GetConfigFile(WellKnownConfigFile.CredentialStore);
// Create osx key chain if it doesn't exists.
if (!File.Exists(_runnerCredStoreKeyChain))
{
List<string> securityOut = new List<string>();
List<string> securityError = new List<string>();
object outputLock = new object();
using (var p = HostContext.CreateService<IProcessInvoker>())
{
p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
lock (outputLock)
{
securityOut.Add(stdout.Data);
}
}
};
p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
lock (outputLock)
{
securityError.Add(stderr.Data);
}
}
};
// make sure the 'security' has access to the key so we won't get prompt at runtime.
int exitCode = p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: _securityUtil,
arguments: $"create-keychain -p {_osxRunnerCredStoreKeyChainPassword} \"{_runnerCredStoreKeyChain}\"",
environment: null,
cancellationToken: CancellationToken.None).GetAwaiter().GetResult();
if (exitCode == 0)
{
Trace.Info($"Successfully create-keychain for {_runnerCredStoreKeyChain}");
}
else
{
if (securityOut.Count > 0)
{
Trace.Error(string.Join(Environment.NewLine, securityOut));
}
if (securityError.Count > 0)
{
Trace.Error(string.Join(Environment.NewLine, securityError));
}
throw new InvalidOperationException($"'security create-keychain' failed with exit code {exitCode}.");
}
}
}
else
{
// Try unlock and lock the keychain, make sure it's still in good stage
UnlockKeyChain();
LockKeyChain();
}
}
public NetworkCredential Write(string target, string username, string password)
{
Trace.Entering();
ArgUtil.NotNullOrEmpty(target, nameof(target));
ArgUtil.NotNullOrEmpty(username, nameof(username));
ArgUtil.NotNullOrEmpty(password, nameof(password));
try
{
UnlockKeyChain();
// base64encode username + ':' + base64encode password
// OSX keychain requires you provide -s target and -a username to retrieve password
// So, we will trade both username and password as 'secret' store into keychain
string usernameBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(username));
string passwordBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(password));
string secretForKeyChain = $"{usernameBase64}:{passwordBase64}";
List<string> securityOut = new List<string>();
List<string> securityError = new List<string>();
object outputLock = new object();
using (var p = HostContext.CreateService<IProcessInvoker>())
{
p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
lock (outputLock)
{
securityOut.Add(stdout.Data);
}
}
};
p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
lock (outputLock)
{
securityError.Add(stderr.Data);
}
}
};
// make sure the 'security' has access to the key so we won't get prompt at runtime.
int exitCode = p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: _securityUtil,
arguments: $"add-generic-password -s {target} -a GITHUBACTIONSRUNNER -w {secretForKeyChain} -T \"{_securityUtil}\" \"{_runnerCredStoreKeyChain}\"",
environment: null,
cancellationToken: CancellationToken.None).GetAwaiter().GetResult();
if (exitCode == 0)
{
Trace.Info($"Successfully add-generic-password for {target} (GITHUBACTIONSRUNNER)");
}
else
{
if (securityOut.Count > 0)
{
Trace.Error(string.Join(Environment.NewLine, securityOut));
}
if (securityError.Count > 0)
{
Trace.Error(string.Join(Environment.NewLine, securityError));
}
throw new InvalidOperationException($"'security add-generic-password' failed with exit code {exitCode}.");
}
}
return new NetworkCredential(username, password);
}
finally
{
LockKeyChain();
}
}
public NetworkCredential Read(string target)
{
Trace.Entering();
ArgUtil.NotNullOrEmpty(target, nameof(target));
try
{
UnlockKeyChain();
string username;
string password;
List<string> securityOut = new List<string>();
List<string> securityError = new List<string>();
object outputLock = new object();
using (var p = HostContext.CreateService<IProcessInvoker>())
{
p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
lock (outputLock)
{
securityOut.Add(stdout.Data);
}
}
};
p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
lock (outputLock)
{
securityError.Add(stderr.Data);
}
}
};
int exitCode = p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: _securityUtil,
arguments: $"find-generic-password -s {target} -a GITHUBACTIONSRUNNER -w -g \"{_runnerCredStoreKeyChain}\"",
environment: null,
cancellationToken: CancellationToken.None).GetAwaiter().GetResult();
if (exitCode == 0)
{
string keyChainSecret = securityOut.First();
string[] secrets = keyChainSecret.Split(':');
if (secrets.Length == 2 && !string.IsNullOrEmpty(secrets[0]) && !string.IsNullOrEmpty(secrets[1]))
{
Trace.Info($"Successfully find-generic-password for {target} (GITHUBACTIONSRUNNER)");
username = Encoding.UTF8.GetString(Convert.FromBase64String(secrets[0]));
password = Encoding.UTF8.GetString(Convert.FromBase64String(secrets[1]));
return new NetworkCredential(username, password);
}
else
{
throw new ArgumentOutOfRangeException(nameof(keyChainSecret));
}
}
else
{
if (securityOut.Count > 0)
{
Trace.Error(string.Join(Environment.NewLine, securityOut));
}
if (securityError.Count > 0)
{
Trace.Error(string.Join(Environment.NewLine, securityError));
}
throw new InvalidOperationException($"'security find-generic-password' failed with exit code {exitCode}.");
}
}
}
finally
{
LockKeyChain();
}
}
public void Delete(string target)
{
Trace.Entering();
ArgUtil.NotNullOrEmpty(target, nameof(target));
try
{
UnlockKeyChain();
List<string> securityOut = new List<string>();
List<string> securityError = new List<string>();
object outputLock = new object();
using (var p = HostContext.CreateService<IProcessInvoker>())
{
p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
lock (outputLock)
{
securityOut.Add(stdout.Data);
}
}
};
p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
lock (outputLock)
{
securityError.Add(stderr.Data);
}
}
};
int exitCode = p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: _securityUtil,
arguments: $"delete-generic-password -s {target} -a GITHUBACTIONSRUNNER \"{_runnerCredStoreKeyChain}\"",
environment: null,
cancellationToken: CancellationToken.None).GetAwaiter().GetResult();
if (exitCode == 0)
{
Trace.Info($"Successfully delete-generic-password for {target} (GITHUBACTIONSRUNNER)");
}
else
{
if (securityOut.Count > 0)
{
Trace.Error(string.Join(Environment.NewLine, securityOut));
}
if (securityError.Count > 0)
{
Trace.Error(string.Join(Environment.NewLine, securityError));
}
throw new InvalidOperationException($"'security delete-generic-password' failed with exit code {exitCode}.");
}
}
}
finally
{
LockKeyChain();
}
}
private void UnlockKeyChain()
{
Trace.Entering();
ArgUtil.NotNullOrEmpty(_securityUtil, nameof(_securityUtil));
ArgUtil.NotNullOrEmpty(_runnerCredStoreKeyChain, nameof(_runnerCredStoreKeyChain));
List<string> securityOut = new List<string>();
List<string> securityError = new List<string>();
object outputLock = new object();
using (var p = HostContext.CreateService<IProcessInvoker>())
{
p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
lock (outputLock)
{
securityOut.Add(stdout.Data);
}
}
};
p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
lock (outputLock)
{
securityError.Add(stderr.Data);
}
}
};
// make sure the 'security' has access to the key so we won't get prompt at runtime.
int exitCode = p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: _securityUtil,
arguments: $"unlock-keychain -p {_osxRunnerCredStoreKeyChainPassword} \"{_runnerCredStoreKeyChain}\"",
environment: null,
cancellationToken: CancellationToken.None).GetAwaiter().GetResult();
if (exitCode == 0)
{
Trace.Info($"Successfully unlock-keychain for {_runnerCredStoreKeyChain}");
}
else
{
if (securityOut.Count > 0)
{
Trace.Error(string.Join(Environment.NewLine, securityOut));
}
if (securityError.Count > 0)
{
Trace.Error(string.Join(Environment.NewLine, securityError));
}
throw new InvalidOperationException($"'security unlock-keychain' failed with exit code {exitCode}.");
}
}
}
private void LockKeyChain()
{
Trace.Entering();
ArgUtil.NotNullOrEmpty(_securityUtil, nameof(_securityUtil));
ArgUtil.NotNullOrEmpty(_runnerCredStoreKeyChain, nameof(_runnerCredStoreKeyChain));
List<string> securityOut = new List<string>();
List<string> securityError = new List<string>();
object outputLock = new object();
using (var p = HostContext.CreateService<IProcessInvoker>())
{
p.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
lock (outputLock)
{
securityOut.Add(stdout.Data);
}
}
};
p.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
lock (outputLock)
{
securityError.Add(stderr.Data);
}
}
};
// make sure the 'security' has access to the key so we won't get prompt at runtime.
int exitCode = p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: _securityUtil,
arguments: $"lock-keychain \"{_runnerCredStoreKeyChain}\"",
environment: null,
cancellationToken: CancellationToken.None).GetAwaiter().GetResult();
if (exitCode == 0)
{
Trace.Info($"Successfully lock-keychain for {_runnerCredStoreKeyChain}");
}
else
{
if (securityOut.Count > 0)
{
Trace.Error(string.Join(Environment.NewLine, securityOut));
}
if (securityError.Count > 0)
{
Trace.Error(string.Join(Environment.NewLine, securityError));
}
throw new InvalidOperationException($"'security lock-keychain' failed with exit code {exitCode}.");
}
}
}
}
#else
public sealed class LinuxRunnerCredentialStore : RunnerService, IRunnerCredentialStore
{
// 'ghrunner' 128 bits iv
private readonly byte[] iv = new byte[] { 0x67, 0x68, 0x72, 0x75, 0x6e, 0x6e, 0x65, 0x72, 0x67, 0x68, 0x72, 0x75, 0x6e, 0x6e, 0x65, 0x72 };
// 256 bits key
private byte[] _symmetricKey;
private string _credStoreFile;
private Dictionary<string, Credential> _credStore;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_credStoreFile = hostContext.GetConfigFile(WellKnownConfigFile.CredentialStore);
if (File.Exists(_credStoreFile))
{
_credStore = IOUtil.LoadObject<Dictionary<string, Credential>>(_credStoreFile);
}
else
{
_credStore = new Dictionary<string, Credential>(StringComparer.OrdinalIgnoreCase);
}
string machineId;
if (File.Exists("/etc/machine-id"))
{
// try use machine-id as encryption key
// this helps avoid accidental information disclosure, but isn't intended for true security
machineId = File.ReadAllLines("/etc/machine-id").FirstOrDefault();
Trace.Info($"machine-id length {machineId?.Length ?? 0}.");
// machine-id doesn't exist or machine-id is not 256 bits
if (string.IsNullOrEmpty(machineId) || machineId.Length != 32)
{
Trace.Warning("Can not get valid machine id from '/etc/machine-id'.");
machineId = "43e7fe5da07740cf914b90f1dac51c2a";
}
}
else
{
// /etc/machine-id not exist
Trace.Warning("/etc/machine-id doesn't exist.");
machineId = "43e7fe5da07740cf914b90f1dac51c2a";
}
List<byte> keyBuilder = new List<byte>();
foreach (var c in machineId)
{
keyBuilder.Add(Convert.ToByte(c));
}
_symmetricKey = keyBuilder.ToArray();
}
public NetworkCredential Write(string target, string username, string password)
{
Trace.Entering();
ArgUtil.NotNullOrEmpty(target, nameof(target));
ArgUtil.NotNullOrEmpty(username, nameof(username));
ArgUtil.NotNullOrEmpty(password, nameof(password));
Trace.Info($"Store credential for '{target}' to cred store.");
Credential cred = new Credential(username, Encrypt(password));
_credStore[target] = cred;
SyncCredentialStoreFile();
return new NetworkCredential(username, password);
}
public NetworkCredential Read(string target)
{
Trace.Entering();
ArgUtil.NotNullOrEmpty(target, nameof(target));
Trace.Info($"Read credential for '{target}' from cred store.");
if (_credStore.ContainsKey(target))
{
Credential cred = _credStore[target];
if (!string.IsNullOrEmpty(cred.UserName) && !string.IsNullOrEmpty(cred.Password))
{
Trace.Info($"Return credential for '{target}' from cred store.");
return new NetworkCredential(cred.UserName, Decrypt(cred.Password));
}
}
throw new KeyNotFoundException(target);
}
public void Delete(string target)
{
Trace.Entering();
ArgUtil.NotNullOrEmpty(target, nameof(target));
if (_credStore.ContainsKey(target))
{
Trace.Info($"Delete credential for '{target}' from cred store.");
_credStore.Remove(target);
SyncCredentialStoreFile();
}
else
{
throw new KeyNotFoundException(target);
}
}
private void SyncCredentialStoreFile()
{
Trace.Entering();
Trace.Info("Sync in-memory credential store with credential store file.");
// delete cred store file when all creds gone
if (_credStore.Count == 0)
{
IOUtil.DeleteFile(_credStoreFile);
return;
}
if (!File.Exists(_credStoreFile))
{
CreateCredentialStoreFile();
}
IOUtil.SaveObject(_credStore, _credStoreFile);
}
private string Encrypt(string secret)
{
using (Aes aes = Aes.Create())
{
aes.Key = _symmetricKey;
aes.IV = iv;
// Create a decrytor to perform the stream transform.
ICryptoTransform encryptor = aes.CreateEncryptor();
// Create the streams used for encryption.
using (MemoryStream msEncrypt = new MemoryStream())
{
using (CryptoStream csEncrypt = new CryptoStream(msEncrypt, encryptor, CryptoStreamMode.Write))
{
using (StreamWriter swEncrypt = new StreamWriter(csEncrypt))
{
swEncrypt.Write(secret);
}
return Convert.ToBase64String(msEncrypt.ToArray());
}
}
}
}
private string Decrypt(string encryptedText)
{
using (Aes aes = Aes.Create())
{
aes.Key = _symmetricKey;
aes.IV = iv;
// Create a decrytor to perform the stream transform.
ICryptoTransform decryptor = aes.CreateDecryptor();
// Create the streams used for decryption.
using (MemoryStream msDecrypt = new MemoryStream(Convert.FromBase64String(encryptedText)))
{
using (CryptoStream csDecrypt = new CryptoStream(msDecrypt, decryptor, CryptoStreamMode.Read))
{
using (StreamReader srDecrypt = new StreamReader(csDecrypt))
{
// Read the decrypted bytes from the decrypting stream and place them in a string.
return srDecrypt.ReadToEnd();
}
}
}
}
}
private void CreateCredentialStoreFile()
{
File.WriteAllText(_credStoreFile, "");
File.SetAttributes(_credStoreFile, File.GetAttributes(_credStoreFile) | FileAttributes.Hidden);
// Try to lock down the .credentials_store file to the owner/group
var chmodPath = WhichUtil.Which("chmod", trace: Trace);
if (!String.IsNullOrEmpty(chmodPath))
{
var arguments = $"600 {new FileInfo(_credStoreFile).FullName}";
using (var invoker = HostContext.CreateService<IProcessInvoker>())
{
var exitCode = invoker.ExecuteAsync(HostContext.GetDirectory(WellKnownDirectory.Root), chmodPath, arguments, null, default(CancellationToken)).GetAwaiter().GetResult();
if (exitCode == 0)
{
Trace.Info("Successfully set permissions for credentials store file {0}", _credStoreFile);
}
else
{
Trace.Warning("Unable to successfully set permissions for credentials store file {0}. Received exit code {1} from {2}", _credStoreFile, exitCode, chmodPath);
}
}
}
else
{
Trace.Warning("Unable to locate chmod to set permissions for credentials store file {0}.", _credStoreFile);
}
}
}
[DataContract]
internal class Credential
{
public Credential()
{ }
public Credential(string userName, string password)
{
UserName = userName;
Password = password;
}
[DataMember(IsRequired = true)]
public string UserName { get; set; }
[DataMember(IsRequired = true)]
public string Password { get; set; }
}
#endif
}

View File

@@ -41,7 +41,7 @@ namespace GitHub.Runner.Common
// job request
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, CancellationToken cancellationToken);
Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, CancellationToken cancellationToken);
Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken);
// agent package
@@ -50,10 +50,6 @@ namespace GitHub.Runner.Common
// agent update
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState);
// runner authorization url
Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId);
Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error);
}
public sealed class RunnerServer : RunnerService, IRunnerServer
@@ -300,10 +296,10 @@ namespace GitHub.Runner.Common
// JobRequest
//-----------------------------------------------------------------
public Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId = null, CancellationToken cancellationToken = default(CancellationToken))
public Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, CancellationToken cancellationToken = default(CancellationToken))
{
CheckConnection(RunnerConnectionType.JobRequest);
return _requestTaskAgentClient.RenewAgentRequestAsync(poolId, requestId, lockToken, orchestrationId: orchestrationId, cancellationToken: cancellationToken);
return _requestTaskAgentClient.RenewAgentRequestAsync(poolId, requestId, lockToken, cancellationToken: cancellationToken);
}
public Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken = default(CancellationToken))
@@ -338,20 +334,5 @@ namespace GitHub.Runner.Common
CheckConnection(RunnerConnectionType.Generic);
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState);
}
//-----------------------------------------------------------------
// Runner Auth Url
//-----------------------------------------------------------------
public Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId)
{
CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.GetAgentAuthUrlAsync(runnerPoolId, runnerId);
}
public Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error)
{
CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.ReportAgentAuthUrlMigrationErrorAsync(runnerPoolId, runnerId, error);
}
}
}

View File

@@ -28,10 +28,14 @@ namespace GitHub.Runner.Listener
private readonly string[] validFlags =
{
Constants.Runner.CommandLine.Flags.Commit,
#if OS_WINDOWS
Constants.Runner.CommandLine.Flags.GitUseSChannel,
#endif
Constants.Runner.CommandLine.Flags.Help,
Constants.Runner.CommandLine.Flags.Replace,
Constants.Runner.CommandLine.Flags.RunAsService,
Constants.Runner.CommandLine.Flags.Once,
Constants.Runner.CommandLine.Flags.SslSkipCertValidation,
Constants.Runner.CommandLine.Flags.Unattended,
Constants.Runner.CommandLine.Flags.Version
};
@@ -39,10 +43,15 @@ namespace GitHub.Runner.Listener
private readonly string[] validArgs =
{
Constants.Runner.CommandLine.Args.Auth,
Constants.Runner.CommandLine.Args.Labels,
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
Constants.Runner.CommandLine.Args.Name,
Constants.Runner.CommandLine.Args.Password,
Constants.Runner.CommandLine.Args.Pool,
Constants.Runner.CommandLine.Args.SslCACert,
Constants.Runner.CommandLine.Args.SslClientCert,
Constants.Runner.CommandLine.Args.SslClientCertKey,
Constants.Runner.CommandLine.Args.SslClientCertArchive,
Constants.Runner.CommandLine.Args.SslClientCertPassword,
Constants.Runner.CommandLine.Args.StartupType,
Constants.Runner.CommandLine.Args.Token,
Constants.Runner.CommandLine.Args.Url,
@@ -64,6 +73,9 @@ namespace GitHub.Runner.Listener
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
#if OS_WINDOWS
public bool GitUseSChannel => TestFlag(Constants.Runner.CommandLine.Flags.GitUseSChannel);
#endif
public bool RunOnce => TestFlag(Constants.Runner.CommandLine.Flags.Once);
// Constructor.
@@ -148,6 +160,13 @@ namespace GitHub.Runner.Listener
defaultValue: false);
}
public bool GetAutoLaunchBrowser()
{
return TestFlagOrPrompt(
name: Constants.Runner.CommandLine.Flags.LaunchBrowser,
description: "Would you like to launch your browser for AAD Device Code Flow? (Y/N)",
defaultValue: true);
}
//
// Args.
//
@@ -160,6 +179,24 @@ namespace GitHub.Runner.Listener
validator: Validators.AuthSchemeValidator);
}
public string GetPassword()
{
return GetArgOrPrompt(
name: Constants.Runner.CommandLine.Args.Password,
description: "What is your GitHub password?",
defaultValue: string.Empty,
validator: Validators.NonEmptyValidator);
}
public string GetPool()
{
return GetArgOrPrompt(
name: Constants.Runner.CommandLine.Args.Pool,
description: "Enter the name of your runner pool:",
defaultValue: "default",
validator: Validators.NonEmptyValidator);
}
public string GetRunnerName()
{
return GetArgOrPrompt(
@@ -173,7 +210,7 @@ namespace GitHub.Runner.Listener
{
return GetArgOrPrompt(
name: Constants.Runner.CommandLine.Args.Token,
description: "What is your pool admin oauth access token?",
description: "Enter your personal access token:",
defaultValue: string.Empty,
validator: Validators.NonEmptyValidator);
}
@@ -182,16 +219,7 @@ namespace GitHub.Runner.Listener
{
return GetArgOrPrompt(
name: Constants.Runner.CommandLine.Args.Token,
description: "What is your runner register token?",
defaultValue: string.Empty,
validator: Validators.NonEmptyValidator);
}
public string GetRunnerDeletionToken()
{
return GetArgOrPrompt(
name: Constants.Runner.CommandLine.Args.Token,
description: "Enter runner remove token:",
description: "Enter runner register token:",
defaultValue: string.Empty,
validator: Validators.NonEmptyValidator);
}
@@ -212,6 +240,15 @@ namespace GitHub.Runner.Listener
validator: Validators.ServerUrlValidator);
}
public string GetUserName()
{
return GetArgOrPrompt(
name: Constants.Runner.CommandLine.Args.UserName,
description: "What is your GitHub username?",
defaultValue: string.Empty,
validator: Validators.NonEmptyValidator);
}
public string GetWindowsLogonAccount(string defaultValue, string descriptionMsg)
{
return GetArgOrPrompt(
@@ -250,22 +287,34 @@ namespace GitHub.Runner.Listener
return GetArg(Constants.Runner.CommandLine.Args.StartupType);
}
public ISet<string> GetLabels()
public bool GetSkipCertificateValidation()
{
var labelSet = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
string labels = GetArgOrPrompt(
name: Constants.Runner.CommandLine.Args.Labels,
description: $"This runner will have the following labels: 'self-hosted', '{VarUtil.OS}', '{VarUtil.OSArchitecture}' \nEnter any additional labels (ex. label-1,label-2):",
defaultValue: string.Empty,
validator: Validators.LabelsValidator,
isOptional: true);
return TestFlag(Constants.Runner.CommandLine.Flags.SslSkipCertValidation);
}
if (!string.IsNullOrEmpty(labels))
{
labelSet = labels.Split(',').Where(x => !string.IsNullOrEmpty(x)).ToHashSet<string>(StringComparer.OrdinalIgnoreCase);
}
public string GetCACertificate()
{
return GetArg(Constants.Runner.CommandLine.Args.SslCACert);
}
return labelSet;
public string GetClientCertificate()
{
return GetArg(Constants.Runner.CommandLine.Args.SslClientCert);
}
public string GetClientCertificatePrivateKey()
{
return GetArg(Constants.Runner.CommandLine.Args.SslClientCertKey);
}
public string GetClientCertificateArchrive()
{
return GetArg(Constants.Runner.CommandLine.Args.SslClientCertArchive);
}
public string GetClientCertificatePassword()
{
return GetArg(Constants.Runner.CommandLine.Args.SslClientCertPassword);
}
//
@@ -299,8 +348,7 @@ namespace GitHub.Runner.Listener
string name,
string description,
string defaultValue,
Func<string, bool> validator,
bool isOptional = false)
Func<string, bool> validator)
{
// Check for the arg in the command line parser.
ArgUtil.NotNull(validator, nameof(validator));
@@ -311,7 +359,7 @@ namespace GitHub.Runner.Listener
if (!string.IsNullOrEmpty(result))
{
// After read the arg from input commandline args, remove it from Arg dictionary,
// This will help if bad arg value passed through CommandLine arg, when ConfigurationManager ask CommandSetting the second time,
// This will help if bad arg value passed through CommandLine arg, when ConfigurationManager ask CommandSetting the second time,
// It will prompt for input instead of continue use the bad input.
_trace.Info($"Remove {name} from Arg dictionary.");
RemoveArg(name);
@@ -331,8 +379,7 @@ namespace GitHub.Runner.Listener
secret: Constants.Runner.CommandLine.Args.Secrets.Any(x => string.Equals(x, name, StringComparison.OrdinalIgnoreCase)),
defaultValue: defaultValue,
validator: validator,
unattended: Unattended,
isOptional: isOptional);
unattended: Unattended);
}
private string GetEnvArg(string name)

View File

@@ -1,18 +1,19 @@
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Threading.Tasks;
using System.Runtime.InteropServices;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System.Net.Http;
using System.Net.Http.Headers;
namespace GitHub.Runner.Listener.Configuration
{
@@ -84,19 +85,67 @@ namespace GitHub.Runner.Listener.Configuration
throw new InvalidOperationException("Cannot configure the runner because it is already configured. To reconfigure the runner, run 'config.cmd remove' or './config.sh remove' first.");
}
// Populate cert setting from commandline args
var runnerCertManager = HostContext.GetService<IRunnerCertificateManager>();
bool saveCertSetting = false;
bool skipCertValidation = command.GetSkipCertificateValidation();
string caCert = command.GetCACertificate();
string clientCert = command.GetClientCertificate();
string clientCertKey = command.GetClientCertificatePrivateKey();
string clientCertArchive = command.GetClientCertificateArchrive();
string clientCertPassword = command.GetClientCertificatePassword();
// We require all Certificate files are under agent root.
// So we can set ACL correctly when configure as service
if (!string.IsNullOrEmpty(caCert))
{
caCert = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), caCert);
ArgUtil.File(caCert, nameof(caCert));
}
if (!string.IsNullOrEmpty(clientCert) &&
!string.IsNullOrEmpty(clientCertKey) &&
!string.IsNullOrEmpty(clientCertArchive))
{
// Ensure all client cert pieces are there.
clientCert = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), clientCert);
clientCertKey = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), clientCertKey);
clientCertArchive = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), clientCertArchive);
ArgUtil.File(clientCert, nameof(clientCert));
ArgUtil.File(clientCertKey, nameof(clientCertKey));
ArgUtil.File(clientCertArchive, nameof(clientCertArchive));
}
else if (!string.IsNullOrEmpty(clientCert) ||
!string.IsNullOrEmpty(clientCertKey) ||
!string.IsNullOrEmpty(clientCertArchive))
{
// Print out which args are missing.
ArgUtil.NotNullOrEmpty(Constants.Runner.CommandLine.Args.SslClientCert, Constants.Runner.CommandLine.Args.SslClientCert);
ArgUtil.NotNullOrEmpty(Constants.Runner.CommandLine.Args.SslClientCertKey, Constants.Runner.CommandLine.Args.SslClientCertKey);
ArgUtil.NotNullOrEmpty(Constants.Runner.CommandLine.Args.SslClientCertArchive, Constants.Runner.CommandLine.Args.SslClientCertArchive);
}
if (skipCertValidation || !string.IsNullOrEmpty(caCert) || !string.IsNullOrEmpty(clientCert))
{
Trace.Info("Reset runner cert setting base on commandline args.");
(runnerCertManager as RunnerCertificateManager).SetupCertificate(skipCertValidation, caCert, clientCert, clientCertKey, clientCertArchive, clientCertPassword);
saveCertSetting = true;
}
RunnerSettings runnerSettings = new RunnerSettings();
bool isHostedServer = false;
// Loop getting url and creds until you can connect
ICredentialProvider credProvider = null;
VssCredentials creds = null;
_term.WriteSection("Authentication");
while (true)
{
// When testing against a dev deployment of Actions Service, set this environment variable
var useDevActionsServiceUrl = Environment.GetEnvironmentVariable("USE_DEV_ACTIONS_SERVICE_URL");
// Get the URL
var inputUrl = command.GetUrl();
if (inputUrl.Contains("codedev.ms", StringComparison.OrdinalIgnoreCase)
|| useDevActionsServiceUrl != null)
if (!inputUrl.Contains("github.com", StringComparison.OrdinalIgnoreCase) &&
!inputUrl.Contains("github.localhost", StringComparison.OrdinalIgnoreCase))
{
runnerSettings.ServerUrl = inputUrl;
// Get the credentials
@@ -108,7 +157,7 @@ namespace GitHub.Runner.Listener.Configuration
{
runnerSettings.GitHubUrl = inputUrl;
var githubToken = command.GetRunnerRegisterToken();
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken, Constants.RunnerEvent.Register);
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken);
runnerSettings.ServerUrl = authResult.TenantUrl;
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
@@ -117,7 +166,7 @@ namespace GitHub.Runner.Listener.Configuration
try
{
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
isHostedServer = await IsHostedServer(runnerSettings.ServerUrl, creds);
// Validate can connect.
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), creds);
@@ -168,9 +217,6 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteLine();
var userLabels = command.GetLabels();
_term.WriteLine();
var agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
Trace.Verbose("Returns {0} agents", agents.Count);
agent = agents.FirstOrDefault();
@@ -180,7 +226,7 @@ namespace GitHub.Runner.Listener.Configuration
if (command.GetReplace())
{
// Update existing agent with new PublicKey, agent version.
agent = UpdateExistingAgent(agent, publicKey, userLabels);
agent = UpdateExistingAgent(agent, publicKey);
try
{
@@ -202,8 +248,8 @@ namespace GitHub.Runner.Listener.Configuration
}
else
{
// Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels);
// Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey);
try
{
@@ -251,6 +297,14 @@ namespace GitHub.Runner.Listener.Configuration
{
UriBuilder configServerUrl = new UriBuilder(runnerSettings.ServerUrl);
UriBuilder oauthEndpointUrlBuilder = new UriBuilder(agent.Authorization.AuthorizationUrl);
if (!isHostedServer && Uri.Compare(configServerUrl.Uri, oauthEndpointUrlBuilder.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0)
{
oauthEndpointUrlBuilder.Scheme = configServerUrl.Scheme;
oauthEndpointUrlBuilder.Host = configServerUrl.Host;
oauthEndpointUrlBuilder.Port = configServerUrl.Port;
Trace.Info($"Set oauth endpoint url's scheme://host:port component to match runner configure url's scheme://host:port: '{oauthEndpointUrlBuilder.Uri.AbsoluteUri}'.");
}
var credentialData = new CredentialData
{
Scheme = Constants.Configuration.OAuth,
@@ -271,22 +325,19 @@ namespace GitHub.Runner.Listener.Configuration
throw new NotSupportedException("Message queue listen OAuth token.");
}
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
// Testing agent connection, detect any protential connection issue, like local clock skew that cause OAuth token expired.
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials credential = credMgr.LoadCredentials();
try
{
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
// ConnectAsync() hits _apis/connectionData which is an anonymous endpoint
// Need to hit an authenticate endpoint to trigger OAuth token exchange.
await _runnerServer.GetAgentPoolsAsync();
_term.WriteSuccessMessage("Runner connection is good");
}
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
{
// there are two exception messages server send that indicate clock skew.
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
Trace.Error("Catch exception during test agent connection.");
Trace.Error(ex);
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
@@ -301,10 +352,31 @@ namespace GitHub.Runner.Listener.Configuration
_store.SaveSettings(runnerSettings);
if (saveCertSetting)
{
Trace.Info("Save agent cert setting to disk.");
(runnerCertManager as RunnerCertificateManager).SaveCertificateSetting();
}
_term.WriteLine();
_term.WriteSuccessMessage("Settings Saved.");
_term.WriteLine();
bool saveRuntimeOptions = false;
var runtimeOptions = new RunnerRuntimeOptions();
#if OS_WINDOWS
if (command.GitUseSChannel)
{
saveRuntimeOptions = true;
runtimeOptions.GitUseSecureChannel = true;
}
#endif
if (saveRuntimeOptions)
{
Trace.Info("Save agent runtime options to disk.");
_store.SaveRunnerRuntimeOptions(runtimeOptions);
}
#if OS_WINDOWS
// config windows service
bool runAsService = command.GetRunAsService();
@@ -369,13 +441,14 @@ namespace GitHub.Runner.Listener.Configuration
}
else
{
var githubToken = command.GetRunnerDeletionToken();
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken, Constants.RunnerEvent.Remove);
var githubToken = command.GetToken();
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken);
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
}
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
bool isHostedServer = await IsHostedServer(settings.ServerUrl, creds);
await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
var agents = await _runnerServer.GetAgentsAsync(settings.PoolId, settings.AgentName);
@@ -398,7 +471,7 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteLine("Cannot connect to server, because config files are missing. Skipping removing runner from the server.");
}
//delete credential config files
//delete credential config files
currentAction = "Removing .credentials";
if (hasCredentials)
{
@@ -412,10 +485,17 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteLine("Does not exist. Skipping " + currentAction);
}
//delete settings config file
//delete settings config file
currentAction = "Removing .runner";
if (isConfigured)
{
// delete agent cert setting
(HostContext.GetService<IRunnerCertificateManager>() as RunnerCertificateManager).DeleteCertificateSetting();
// delete agent runtime option
_store.DeleteRunnerRuntimeOptions();
_store.DeleteSettings();
_term.WriteSuccessMessage("Removed .runner");
}
@@ -453,7 +533,7 @@ namespace GitHub.Runner.Listener.Configuration
}
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels)
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey)
{
ArgUtil.NotNull(agent, nameof(agent));
agent.Authorization = new TaskAgentAuthorization
@@ -461,25 +541,18 @@ namespace GitHub.Runner.Listener.Configuration
PublicKey = new TaskAgentPublicKey(publicKey.Exponent, publicKey.Modulus),
};
// update should replace the existing labels
// update - update instead of delete so we don't lose labels etc...
agent.Version = BuildConstants.RunnerPackage.Version;
agent.OSDescription = RuntimeInformation.OSDescription;
agent.Labels.Clear();
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
agent.Labels.Add("self-hosted");
agent.Labels.Add(VarUtil.OS);
agent.Labels.Add(VarUtil.OSArchitecture);
foreach (var userLabel in userLabels)
{
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
}
return agent;
}
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels)
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey)
{
TaskAgent agent = new TaskAgent(agentName)
{
@@ -492,51 +565,45 @@ namespace GitHub.Runner.Listener.Configuration
OSDescription = RuntimeInformation.OSDescription,
};
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
foreach (var userLabel in userLabels)
{
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
}
agent.Labels.Add("self-hosted");
agent.Labels.Add(VarUtil.OS);
agent.Labels.Add(VarUtil.OSArchitecture);
return agent;
}
private bool IsHostedServer(UriBuilder gitHubUrl)
private async Task<bool> IsHostedServer(string serverUrl, VssCredentials credentials)
{
return string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
var locationServer = HostContext.GetService<ILocationServer>();
VssConnection connection = VssUtil.CreateConnection(new Uri(serverUrl), credentials);
await locationServer.ConnectAsync(connection);
try
{
var connectionData = await locationServer.GetConnectionDataAsync();
Trace.Info($"Server deployment type: {connectionData.DeploymentType}");
return connectionData.DeploymentType.HasFlag(DeploymentFlags.Hosted);
}
catch (Exception ex)
{
// Since the DeploymentType is Enum, deserialization exception means there is a new Enum member been added.
// It's more likely to be Hosted since OnPremises is always behind and customer can update their agent if are on-prem
Trace.Error(ex);
return true;
}
}
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
if (IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runner-registration";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runner-registration";
}
var gitHubUrl = new UriBuilder(githubUrl);
var githubApiUrl = $"https://api.{gitHubUrl.Host}/repos/{gitHubUrl.Path.Trim('/')}/actions-runners/registration";
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
var bodyObject = new Dictionary<string, string>()
{
{"url", githubUrl},
{"runner_event", runnerEvent}
};
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
httpClient.DefaultRequestHeaders.UserAgent.Add(HostContext.UserAgent);
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.github.shuri-preview+json"));
var response = await httpClient.PostAsync(githubApiUrl, new StringContent("", null, "application/json"));
if (response.IsSuccessStatusCode)
{

View File

@@ -13,7 +13,7 @@ namespace GitHub.Runner.Listener.Configuration
public interface ICredentialManager : IRunnerService
{
ICredentialProvider GetCredentialProvider(string credType);
VssCredentials LoadCredentials(bool preferMigrated = true);
VssCredentials LoadCredentials();
}
public class CredentialManager : RunnerService, ICredentialManager
@@ -40,7 +40,7 @@ namespace GitHub.Runner.Listener.Configuration
return creds;
}
public VssCredentials LoadCredentials(bool preferMigrated = true)
public VssCredentials LoadCredentials()
{
IConfigurationStore store = HostContext.GetService<IConfigurationStore>();
@@ -50,16 +50,6 @@ namespace GitHub.Runner.Listener.Configuration
}
CredentialData credData = store.GetCredentials();
if (preferMigrated)
{
var migratedCred = store.GetMigratedCredentials();
if (migratedCred != null)
{
credData = migratedCred;
}
}
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);
credProv.CredentialData = credData;
@@ -88,7 +78,7 @@ namespace GitHub.Runner.Listener.Configuration
if (string.Equals(TokenSchema, "OAuthAccessToken", StringComparison.OrdinalIgnoreCase))
{
return new VssCredentials(new VssOAuthAccessTokenCredential(Token), CredentialPromptType.DoNotPrompt);
return new VssCredentials(null, new VssOAuthAccessTokenCredential(Token), CredentialPromptType.DoNotPrompt);
}
else
{

View File

@@ -48,7 +48,7 @@ namespace GitHub.Runner.Listener.Configuration
ArgUtil.NotNullOrEmpty(token, nameof(token));
trace.Info("token retrieved: {0} chars", token.Length);
VssCredentials creds = new VssCredentials(new VssOAuthAccessTokenCredential(token), CredentialPromptType.DoNotPrompt);
VssCredentials creds = new VssCredentials(null, new VssOAuthAccessTokenCredential(token), CredentialPromptType.DoNotPrompt);
trace.Info("cred created");
return creds;

View File

@@ -6,7 +6,7 @@ using GitHub.Runner.Common;
namespace GitHub.Runner.Listener.Configuration
{
/// <summary>
/// Manages an RSA key for the runner using the most appropriate store for the target platform.
/// Manages an RSA key for the agent using the most appropriate store for the target platform.
/// </summary>
#if OS_WINDOWS
[ServiceLocator(Default = typeof(RSAEncryptedFileKeyManager))]
@@ -16,10 +16,10 @@ namespace GitHub.Runner.Listener.Configuration
public interface IRSAKeyManager : IRunnerService
{
/// <summary>
/// Creates a new <c>RSACryptoServiceProvider</c> instance for the current runner. If a key file is found then the current
/// Creates a new <c>RSACryptoServiceProvider</c> instance for the current agent. If a key file is found then the current
/// key is returned to the caller.
/// </summary>
/// <returns>An <c>RSACryptoServiceProvider</c> instance representing the key for the runner</returns>
/// <returns>An <c>RSACryptoServiceProvider</c> instance representing the key for the agent</returns>
RSACryptoServiceProvider CreateKey();
/// <summary>
@@ -30,7 +30,7 @@ namespace GitHub.Runner.Listener.Configuration
/// <summary>
/// Gets the <c>RSACryptoServiceProvider</c> instance currently stored by the key manager.
/// </summary>
/// <returns>An <c>RSACryptoServiceProvider</c> instance representing the key for the runner</returns>
/// <returns>An <c>RSACryptoServiceProvider</c> instance representing the key for the agent</returns>
/// <exception cref="CryptographicException">No key exists in the store</exception>
RSACryptoServiceProvider GetKey();
}

View File

@@ -447,7 +447,7 @@ namespace GitHub.Runner.Listener.Configuration
{
Trace.Entering();
string runnerServiceExecutable = "\"" + Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), WindowsServiceControlManager.WindowsServiceControllerName) + "\"";
string agentServiceExecutable = "\"" + Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), WindowsServiceControlManager.WindowsServiceControllerName) + "\"";
IntPtr scmHndl = IntPtr.Zero;
IntPtr svcHndl = IntPtr.Zero;
IntPtr tmpBuf = IntPtr.Zero;
@@ -468,7 +468,7 @@ namespace GitHub.Runner.Listener.Configuration
};
processInvoker.ExecuteAsync(workingDirectory: string.Empty,
fileName: runnerServiceExecutable,
fileName: agentServiceExecutable,
arguments: "init",
environment: null,
requireExitCodeZero: true,
@@ -490,7 +490,7 @@ namespace GitHub.Runner.Listener.Configuration
SERVICE_WIN32_OWN_PROCESS,
ServiceBootFlag.AutoStart,
ServiceError.Normal,
runnerServiceExecutable,
agentServiceExecutable,
null,
IntPtr.Zero,
null,
@@ -678,17 +678,6 @@ namespace GitHub.Runner.Listener.Configuration
if (service != null)
{
service.Start();
try
{
_term.WriteLine("Waiting for service to start...");
service.WaitForStatus(ServiceControllerStatus.Running, TimeSpan.FromSeconds(60));
}
catch (System.ServiceProcess.TimeoutException)
{
throw new InvalidOperationException($"Cannot start the service {serviceName} in a timely fashion.");
}
_term.WriteLine($"Service {serviceName} started successfully");
}
else

View File

@@ -1,5 +1,6 @@
using System;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
@@ -28,7 +29,7 @@ namespace GitHub.Runner.Listener.Configuration
var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null);
// For back compat with .credential file that doesn't has 'oauthEndpointUrl' section
var oauthEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
var oathEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
ArgUtil.NotNullOrEmpty(clientId, nameof(clientId));
ArgUtil.NotNullOrEmpty(authorizationUrl, nameof(authorizationUrl));
@@ -38,11 +39,11 @@ namespace GitHub.Runner.Listener.Configuration
var keyManager = context.GetService<IRSAKeyManager>();
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
var clientCredential = new VssOAuthJwtBearerClientCredential(clientId, authorizationUrl, signingCredentials);
var agentCredential = new VssOAuthCredential(new Uri(oauthEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
var agentCredential = new VssOAuthCredential(new Uri(oathEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
// Construct a credentials cache with a single OAuth credential for communication. The windows credential
// is explicitly set to null to ensure we never do that negotiation.
return new VssCredentials(agentCredential, CredentialPromptType.DoNotPrompt);
return new VssCredentials(null, agentCredential, CredentialPromptType.DoNotPrompt);
}
}
}

View File

@@ -20,8 +20,7 @@ namespace GitHub.Runner.Listener.Configuration
bool secret,
string defaultValue,
Func<String, bool> validator,
bool unattended,
bool isOptional = false);
bool unattended);
}
public sealed class PromptManager : RunnerService, IPromptManager
@@ -57,8 +56,7 @@ namespace GitHub.Runner.Listener.Configuration
bool secret,
string defaultValue,
Func<string, bool> validator,
bool unattended,
bool isOptional = false)
bool unattended)
{
Trace.Info(nameof(ReadValue));
ArgUtil.NotNull(validator, nameof(validator));
@@ -72,10 +70,6 @@ namespace GitHub.Runner.Listener.Configuration
{
return defaultValue;
}
else if (isOptional)
{
return string.Empty;
}
// Otherwise throw.
throw new Exception($"Invalid configuration provided for {argName}. Terminating unattended configuration.");
@@ -91,28 +85,18 @@ namespace GitHub.Runner.Listener.Configuration
{
_terminal.Write($"[press Enter for {defaultValue}] ");
}
else if (isOptional){
_terminal.Write($"[press Enter to skip] ");
}
// Read and trim the value.
value = secret ? _terminal.ReadSecret() : _terminal.ReadLine();
value = value?.Trim() ?? string.Empty;
// Return the default if not specified.
if (string.IsNullOrEmpty(value))
if (string.IsNullOrEmpty(value) && !string.IsNullOrEmpty(defaultValue))
{
if (!string.IsNullOrEmpty(defaultValue))
{
Trace.Info($"Falling back to the default: '{defaultValue}'");
return defaultValue;
}
else if (isOptional)
{
return string.Empty;
}
Trace.Info($"Falling back to the default: '{defaultValue}'");
return defaultValue;
}
// Return the value if it is not empty and it is valid.
// Otherwise try the loop again.
if (!string.IsNullOrEmpty(value))

View File

@@ -1,7 +1,6 @@
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using System;
using System.Linq;
using System.IO;
using System.Security.Principal;
@@ -39,21 +38,17 @@ namespace GitHub.Runner.Listener.Configuration
return CredentialManager.CredentialTypes.ContainsKey(value);
}
public static bool BoolValidator(string value)
public static bool FilePathValidator(string value)
{
return string.Equals(value, "true", StringComparison.OrdinalIgnoreCase) ||
string.Equals(value, "false", StringComparison.OrdinalIgnoreCase) ||
string.Equals(value, "Y", StringComparison.CurrentCultureIgnoreCase) ||
string.Equals(value, "N", StringComparison.CurrentCultureIgnoreCase);
}
var directoryInfo = new DirectoryInfo(value);
public static bool LabelsValidator(string labels)
{
if (!string.IsNullOrEmpty(labels))
if (!directoryInfo.Exists)
{
var labelSet = labels.Split(',').Where(x => !string.IsNullOrEmpty(x)).ToHashSet<string>(StringComparer.OrdinalIgnoreCase);
if (labelSet.Any(x => x.Length > 256))
try
{
Directory.CreateDirectory(value);
}
catch (Exception)
{
return false;
}
@@ -62,6 +57,14 @@ namespace GitHub.Runner.Listener.Configuration
return true;
}
public static bool BoolValidator(string value)
{
return string.Equals(value, "true", StringComparison.OrdinalIgnoreCase) ||
string.Equals(value, "false", StringComparison.OrdinalIgnoreCase) ||
string.Equals(value, "Y", StringComparison.CurrentCultureIgnoreCase) ||
string.Equals(value, "N", StringComparison.CurrentCultureIgnoreCase);
}
public static bool NonEmptyValidator(string value)
{
return !string.IsNullOrEmpty(value);

View File

@@ -12,14 +12,12 @@ using System.Linq;
using GitHub.Services.Common;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Services.WebApi.Jwt;
namespace GitHub.Runner.Listener
{
[ServiceLocator(Default = typeof(JobDispatcher))]
public interface IJobDispatcher : IRunnerService
{
bool Busy { get; }
TaskCompletionSource<bool> RunOnceJobCompleted { get; }
void Run(Pipelines.AgentJobRequestMessage message, bool runOnce = false);
bool Cancel(JobCancelMessage message);
@@ -71,8 +69,6 @@ namespace GitHub.Runner.Listener
public TaskCompletionSource<bool> RunOnceJobCompleted => _runOnceJobCompleted;
public bool Busy { get; private set; }
public void Run(Pipelines.AgentJobRequestMessage jobRequestMessage, bool runOnce = false)
{
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
@@ -87,30 +83,15 @@ namespace GitHub.Runner.Listener
}
}
var orchestrationId = string.Empty;
var systemConnection = jobRequestMessage.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (systemConnection?.Authorization != null &&
systemConnection.Authorization.Parameters.TryGetValue("AccessToken", out var accessToken) &&
!string.IsNullOrEmpty(accessToken))
{
var jwt = JsonWebToken.Create(accessToken);
var claims = jwt.ExtractClaims();
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
if (!string.IsNullOrEmpty(orchestrationId))
{
Trace.Info($"Pull OrchestrationId {orchestrationId} from JWT claims");
}
}
WorkerDispatcher newDispatch = new WorkerDispatcher(jobRequestMessage.JobId, jobRequestMessage.RequestId);
if (runOnce)
{
Trace.Info("Start dispatcher for one time used runner.");
newDispatch.WorkerDispatch = RunOnceAsync(jobRequestMessage, orchestrationId, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
newDispatch.WorkerDispatch = RunOnceAsync(jobRequestMessage, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
}
else
{
newDispatch.WorkerDispatch = RunAsync(jobRequestMessage, orchestrationId, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
newDispatch.WorkerDispatch = RunAsync(jobRequestMessage, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
}
_jobInfos.TryAdd(newDispatch.JobId, newDispatch);
@@ -266,7 +247,7 @@ namespace GitHub.Runner.Listener
Task completedTask = await Task.WhenAny(jobDispatch.WorkerDispatch, Task.Delay(TimeSpan.FromSeconds(45)));
if (completedTask != jobDispatch.WorkerDispatch)
{
// at this point, the job execution might encounter some dead lock and even not able to be cancelled.
// at this point, the job exectuion might encounter some dead lock and even not able to be canclled.
// no need to localize the exception string should never happen.
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds.");
}
@@ -300,11 +281,11 @@ namespace GitHub.Runner.Listener
}
}
private async Task RunOnceAsync(Pipelines.AgentJobRequestMessage message, string orchestrationId, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
private async Task RunOnceAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
{
try
{
await RunAsync(message, orchestrationId, previousJobDispatch, jobRequestCancellationToken, workerCancelTimeoutKillToken);
await RunAsync(message, previousJobDispatch, jobRequestCancellationToken, workerCancelTimeoutKillToken);
}
finally
{
@@ -313,145 +294,252 @@ namespace GitHub.Runner.Listener
}
}
private async Task RunAsync(Pipelines.AgentJobRequestMessage message, string orchestrationId, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
private async Task RunAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
{
Busy = true;
try
if (previousJobDispatch != null)
{
if (previousJobDispatch != null)
Trace.Verbose($"Make sure the previous job request {previousJobDispatch.JobId} has successfully finished on worker.");
await EnsureDispatchFinished(previousJobDispatch);
}
else
{
Trace.Verbose($"This is the first job request.");
}
var term = HostContext.GetService<ITerminal>();
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
// first job request renew succeed.
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
var notification = HostContext.GetService<IJobNotification>();
// lock renew cancellation token.
using (var lockRenewalTokenSource = new CancellationTokenSource())
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
{
long requestId = message.RequestId;
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
// start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is canceled
// not even start worker if the first renew fail
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
if (renewJobRequest.IsCompleted)
{
Trace.Verbose($"Make sure the previous job request {previousJobDispatch.JobId} has successfully finished on worker.");
await EnsureDispatchFinished(previousJobDispatch);
}
else
{
Trace.Verbose($"This is the first job request.");
// renew job request task complete means we run out of retry for the first job request renew.
Trace.Info($"Unable to renew job request for job {message.JobId} for the first time, stop dispatching job to worker.");
return;
}
var term = HostContext.GetService<ITerminal>();
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
// first job request renew succeed.
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
var notification = HostContext.GetService<IJobNotification>();
// lock renew cancellation token.
using (var lockRenewalTokenSource = new CancellationTokenSource())
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
if (jobRequestCancellationToken.IsCancellationRequested)
{
long requestId = message.RequestId;
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// complete job request with result Cancelled
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
return;
}
// wait till first renew succeed or job request is canceled
// not even start worker if the first renew fail
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
HostContext.WritePerfCounter($"JobRequestRenewed_{requestId.ToString()}");
if (renewJobRequest.IsCompleted)
Task<int> workerProcessTask = null;
object _outputLock = new object();
List<string> workerOutput = new List<string>();
using (var processChannel = HostContext.CreateService<IProcessChannel>())
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
// Start the process channel.
// It's OK if StartServer bubbles an execption after the worker process has already started.
// The worker will shutdown after 30 seconds if it hasn't received the job message.
processChannel.StartServer(
// Delegate to start the child process.
startProcess: (string pipeHandleOut, string pipeHandleIn) =>
{
// Validate args.
ArgUtil.NotNullOrEmpty(pipeHandleOut, nameof(pipeHandleOut));
ArgUtil.NotNullOrEmpty(pipeHandleIn, nameof(pipeHandleIn));
// Save STDOUT from worker, worker will use STDOUT report unhandle exception.
processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
lock (_outputLock)
{
workerOutput.Add(stdout.Data);
}
}
};
// Save STDERR from worker, worker will use STDERR on crash.
processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
lock (_outputLock)
{
workerOutput.Add(stderr.Data);
}
}
};
// Start the child process.
HostContext.WritePerfCounter("StartingWorkerProcess");
var assemblyDirectory = HostContext.GetDirectory(WellKnownDirectory.Bin);
string workerFileName = Path.Combine(assemblyDirectory, _workerProcessName);
workerProcessTask = processInvoker.ExecuteAsync(
workingDirectory: assemblyDirectory,
fileName: workerFileName,
arguments: "spawnclient " + pipeHandleOut + " " + pipeHandleIn,
environment: null,
requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: workerProcessCancelTokenSource.Token);
});
// Send the job request message.
// Kill the worker process if sending the job message times out. The worker
// process may have successfully received the job message.
try
{
// renew job request task complete means we run out of retry for the first job request renew.
Trace.Info($"Unable to renew job request for job {message.JobId} for the first time, stop dispatching job to worker.");
return;
Trace.Info($"Send job request message to worker for job {message.JobId}.");
HostContext.WritePerfCounter($"RunnerSendingJobToWorker_{message.JobId}");
using (var csSendJobRequest = new CancellationTokenSource(_channelTimeout))
{
await processChannel.SendAsync(
messageType: MessageType.NewJobRequest,
body: JsonUtility.ToString(message),
cancellationToken: csSendJobRequest.Token);
}
}
if (jobRequestCancellationToken.IsCancellationRequested)
catch (OperationCanceledException)
{
// message send been cancelled.
// timeout 30 sec. kill worker.
Trace.Info($"Job request message sending for job {message.JobId} been cancelled, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// complete job request with result Cancelled
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
// not finish the job request since the job haven't run on worker at all, we will not going to set a result to server.
return;
}
HostContext.WritePerfCounter($"JobRequestRenewed_{requestId.ToString()}");
// we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
Task<int> workerProcessTask = null;
object _outputLock = new object();
List<string> workerOutput = new List<string>();
using (var processChannel = HostContext.CreateService<IProcessChannel>())
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}");
try
{
// Start the process channel.
// It's OK if StartServer bubbles an execption after the worker process has already started.
// The worker will shutdown after 30 seconds if it hasn't received the job message.
processChannel.StartServer(
// Delegate to start the child process.
startProcess: (string pipeHandleOut, string pipeHandleIn) =>
TaskResult resultOnAbandonOrCancel = TaskResult.Succeeded;
// wait for renewlock, worker process or cancellation token been fired.
var completedTask = await Task.WhenAny(renewJobRequest, workerProcessTask, Task.Delay(-1, jobRequestCancellationToken));
if (completedTask == workerProcessTask)
{
// worker finished successfully, complete job request with result, attach unhandled exception reported by worker, stop renew lock, job has finished.
int returnCode = await workerProcessTask;
Trace.Info($"Worker finished for job {message.JobId}. Code: " + returnCode);
string detailInfo = null;
if (!TaskResultUtil.IsValidReturnCode(returnCode))
{
// Validate args.
ArgUtil.NotNullOrEmpty(pipeHandleOut, nameof(pipeHandleOut));
ArgUtil.NotNullOrEmpty(pipeHandleIn, nameof(pipeHandleIn));
detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
await LogWorkerProcessUnhandledException(message, detailInfo);
}
// Save STDOUT from worker, worker will use STDOUT report unhandle exception.
processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
lock (_outputLock)
{
workerOutput.Add(stdout.Data);
}
}
};
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
Trace.Info($"finish job request for job {message.JobId} with result: {result}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}");
// Save STDERR from worker, worker will use STDERR on crash.
processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
lock (_outputLock)
{
workerOutput.Add(stderr.Data);
}
}
};
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// Start the child process.
HostContext.WritePerfCounter("StartingWorkerProcess");
var assemblyDirectory = HostContext.GetDirectory(WellKnownDirectory.Bin);
string workerFileName = Path.Combine(assemblyDirectory, _workerProcessName);
workerProcessTask = processInvoker.ExecuteAsync(
workingDirectory: assemblyDirectory,
fileName: workerFileName,
arguments: "spawnclient " + pipeHandleOut + " " + pipeHandleIn,
environment: null,
requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: workerProcessCancelTokenSource.Token);
});
// complete job request
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
// Send the job request message.
// Kill the worker process if sending the job message times out. The worker
// process may have successfully received the job message.
// print out unhandled exception happened in worker after we complete job request.
// when we run out of disk space, report back to server has higher priority.
if (!string.IsNullOrEmpty(detailInfo))
{
Trace.Error("Unhandled exception happened in worker:");
Trace.Error(detailInfo);
}
return;
}
else if (completedTask == renewJobRequest)
{
resultOnAbandonOrCancel = TaskResult.Abandoned;
}
else
{
resultOnAbandonOrCancel = TaskResult.Canceled;
}
// renew job request completed or job request cancellation token been fired for RunAsync(jobrequestmessage)
// cancel worker gracefully first, then kill it after worker cancel timeout
try
{
Trace.Info($"Send job request message to worker for job {message.JobId}.");
HostContext.WritePerfCounter($"RunnerSendingJobToWorker_{message.JobId}");
using (var csSendJobRequest = new CancellationTokenSource(_channelTimeout))
Trace.Info($"Send job cancellation message to worker for job {message.JobId}.");
using (var csSendCancel = new CancellationTokenSource(_channelTimeout))
{
var messageType = MessageType.CancelRequest;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
switch (HostContext.RunnerShutdownReason)
{
case ShutdownReason.UserCancelled:
messageType = MessageType.RunnerShutdown;
break;
case ShutdownReason.OperatingSystemShutdown:
messageType = MessageType.OperatingSystemShutdown;
break;
}
}
await processChannel.SendAsync(
messageType: MessageType.NewJobRequest,
body: JsonUtility.ToString(message),
cancellationToken: csSendJobRequest.Token);
messageType: messageType,
body: string.Empty,
cancellationToken: csSendCancel.Token);
}
}
catch (OperationCanceledException)
{
// message send been cancelled.
// timeout 30 sec. kill worker.
Trace.Info($"Job request message sending for job {message.JobId} been cancelled, kill running worker.");
Trace.Info($"Job cancel message sending for job {message.JobId} been cancelled, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
@@ -461,169 +549,50 @@ namespace GitHub.Runner.Listener
{
Trace.Info("worker process has been killed.");
}
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// not finish the job request since the job haven't run on worker at all, we will not going to set a result to server.
return;
}
// we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
// wait worker to exit
// if worker doesn't exit within timeout, then kill worker.
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}");
try
// worker haven't exit within cancellation timeout.
if (completedTask != workerProcessTask)
{
TaskResult resultOnAbandonOrCancel = TaskResult.Succeeded;
// wait for renewlock, worker process or cancellation token been fired.
var completedTask = await Task.WhenAny(renewJobRequest, workerProcessTask, Task.Delay(-1, jobRequestCancellationToken));
if (completedTask == workerProcessTask)
{
// worker finished successfully, complete job request with result, attach unhandled exception reported by worker, stop renew lock, job has finished.
int returnCode = await workerProcessTask;
Trace.Info($"Worker finished for job {message.JobId}. Code: " + returnCode);
string detailInfo = null;
if (!TaskResultUtil.IsValidReturnCode(returnCode))
{
detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
await LogWorkerProcessUnhandledException(message, detailInfo);
}
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
Trace.Info($"finish job request for job {message.JobId} with result: {result}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}");
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// complete job request
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
// print out unhandled exception happened in worker after we complete job request.
// when we run out of disk space, report back to server has higher priority.
if (!string.IsNullOrEmpty(detailInfo))
{
Trace.Error("Unhandled exception happened in worker:");
Trace.Error(detailInfo);
}
return;
}
else if (completedTask == renewJobRequest)
{
resultOnAbandonOrCancel = TaskResult.Abandoned;
}
else
{
resultOnAbandonOrCancel = TaskResult.Canceled;
}
// renew job request completed or job request cancellation token been fired for RunAsync(jobrequestmessage)
// cancel worker gracefully first, then kill it after worker cancel timeout
Trace.Info($"worker process for job {message.JobId} haven't exit within cancellation timout, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
Trace.Info($"Send job cancellation message to worker for job {message.JobId}.");
using (var csSendCancel = new CancellationTokenSource(_channelTimeout))
{
var messageType = MessageType.CancelRequest;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
switch (HostContext.RunnerShutdownReason)
{
case ShutdownReason.UserCancelled:
messageType = MessageType.RunnerShutdown;
break;
case ShutdownReason.OperatingSystemShutdown:
messageType = MessageType.OperatingSystemShutdown;
break;
}
}
await processChannel.SendAsync(
messageType: messageType,
body: string.Empty,
cancellationToken: csSendCancel.Token);
}
await workerProcessTask;
}
catch (OperationCanceledException)
{
// message send been cancelled.
Trace.Info($"Job cancel message sending for job {message.JobId} been cancelled, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
Trace.Info("worker process has been killed.");
}
// wait worker to exit
// if worker doesn't exit within timeout, then kill worker.
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
// worker haven't exit within cancellation timeout.
if (completedTask != workerProcessTask)
{
Trace.Info($"worker process for job {message.JobId} haven't exit within cancellation timout, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
// When worker doesn't exit within cancel timeout, the runner will kill the worker process and worker won't finish upload job logs.
// The runner will try to upload these logs at this time.
await TryUploadUnfinishedLogs(message);
}
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}");
// complete job request with cancel result, stop renew lock, job has finished.
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// complete job request
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
}
finally
{
// This should be the last thing to run so we don't notify external parties until actually finished
await notification.JobCompleted(message.JobId);
}
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}");
// complete job request with cancel result, stop renew lock, job has finished.
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// complete job request
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
}
finally
{
// This should be the last thing to run so we don't notify external parties until actually finished
await notification.JobCompleted(message.JobId);
}
}
}
finally
{
Busy = false;
}
}
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
var runnerServer = HostContext.GetService<IRunnerServer>();
TaskAgentJobRequest request = null;
@@ -636,7 +605,7 @@ namespace GitHub.Runner.Listener
{
try
{
request = await runnerServer.RenewAgentRequestAsync(poolId, requestId, lockToken, orchestrationId, token);
request = await runnerServer.RenewAgentRequestAsync(poolId, requestId, lockToken, token);
Trace.Info($"Successfully renew job request {requestId}, job is valid till {request.LockedUntil.Value}");
@@ -743,121 +712,7 @@ namespace GitHub.Runner.Listener
}
}
// Best effort upload any logs for this job.
private async Task TryUploadUnfinishedLogs(Pipelines.AgentJobRequestMessage message)
{
Trace.Entering();
var logFolder = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Diag), PagingLogger.PagingFolder);
if (!Directory.Exists(logFolder))
{
return;
}
var logs = Directory.GetFiles(logFolder);
if (logs.Length == 0)
{
return;
}
try
{
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection));
ArgUtil.NotNull(systemConnection, nameof(systemConnection));
var jobServer = HostContext.GetService<IJobServer>();
VssCredentials jobServerCredential = VssUtil.GetVssCredential(systemConnection);
VssConnection jobConnection = VssUtil.CreateConnection(systemConnection.Url, jobServerCredential);
await jobServer.ConnectAsync(jobConnection);
var timeline = await jobServer.GetTimelineAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, CancellationToken.None);
var updatedRecords = new List<TimelineRecord>();
var logPages = new Dictionary<Guid, Dictionary<int, string>>();
var logRecords = new Dictionary<Guid, TimelineRecord>();
foreach (var log in logs)
{
var logName = Path.GetFileNameWithoutExtension(log);
var logNameParts = logName.Split('_', StringSplitOptions.RemoveEmptyEntries);
if (logNameParts.Length != 3)
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var logPageSeperator = logName.IndexOf('_');
var logRecordId = Guid.Empty;
var pageNumber = 0;
if (!Guid.TryParse(logNameParts[0], out Guid timelineId) || timelineId != timeline.Id)
{
Trace.Warning($"log file '{log}' is not belongs to current job");
continue;
}
if (!Guid.TryParse(logNameParts[1], out logRecordId))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
if (!int.TryParse(logNameParts[2], out pageNumber))
{
Trace.Warning($"log file '{log}' doesn't follow naming convension 'GUID_GUID_INT'.");
continue;
}
var record = timeline.Records.FirstOrDefault(x => x.Id == logRecordId);
if (record != null)
{
if (!logPages.ContainsKey(record.Id))
{
logPages[record.Id] = new Dictionary<int, string>();
logRecords[record.Id] = record;
}
logPages[record.Id][pageNumber] = log;
}
}
foreach (var pages in logPages)
{
var record = logRecords[pages.Key];
if (record.Log == null)
{
// Create the log
record.Log = await jobServer.CreateLogAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, new TaskLog(String.Format(@"logs\{0:D}", record.Id)), default(CancellationToken));
// Need to post timeline record updates to reflect the log creation
updatedRecords.Add(record.Clone());
}
for (var i = 1; i <= pages.Value.Count; i++)
{
var logFile = pages.Value[i];
// Upload the contents
using (FileStream fs = File.Open(logFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
{
var logUploaded = await jobServer.AppendLogContentAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, record.Log.Id, fs, default(CancellationToken));
}
Trace.Info($"Uploaded unfinished log '{logFile}' for current job.");
IOUtil.DeleteFile(logFile);
}
}
if (updatedRecords.Count > 0)
{
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, updatedRecords, CancellationToken.None);
}
}
catch (Exception ex)
{
// Ignore any error during log upload since it's best effort
Trace.Error(ex);
}
}
// TODO: We need send detailInfo back to DT in order to add an issue for the job
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null)
{
Trace.Entering();
@@ -951,10 +806,8 @@ namespace GitHub.Runner.Listener
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(unhandledExceptionIssue);
jobRecord.Issues.Add(new Issue() { Type = IssueType.Error, Message = errorMessage });
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
}
catch (Exception ex)

View File

@@ -13,10 +13,7 @@ using System.Diagnostics;
using System.Runtime.InteropServices;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Services.WebApi;
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]
namespace GitHub.Runner.Listener
{
[ServiceLocator(Default = typeof(MessageListener))]
@@ -35,30 +32,18 @@ namespace GitHub.Runner.Listener
private ITerminal _term;
private IRunnerServer _runnerServer;
private TaskAgentSession _session;
private ICredentialManager _credMgr;
private IConfigurationStore _configStore;
private TimeSpan _getNextMessageRetryInterval;
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new Dictionary<string, int>();
// Whether load credentials from .credentials_migrated file
internal bool _useMigratedCredentials;
// need to check auth url if there is only .credentials and auth schema is OAuth
internal bool _needToCheckAuthorizationUrlUpdate;
internal Task<VssCredentials> _authorizationUrlMigrationBackgroundTask;
internal Task _authorizationUrlRollbackReattemptDelayBackgroundTask;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = HostContext.GetService<ITerminal>();
_runnerServer = HostContext.GetService<IRunnerServer>();
_credMgr = HostContext.GetService<ICredentialManager>();
_configStore = HostContext.GetService<IConfigurationStore>();
}
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
@@ -73,8 +58,8 @@ namespace GitHub.Runner.Listener
// Create connection.
Trace.Info("Loading Credentials");
_useMigratedCredentials = !StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_SPSAUTHURL"));
VssCredentials creds = _credMgr.LoadCredentials(_useMigratedCredentials);
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials creds = credMgr.LoadCredentials();
var agent = new TaskAgentReference
{
@@ -89,27 +74,16 @@ namespace GitHub.Runner.Listener
string errorMessage = string.Empty;
bool encounteringError = false;
var originalCreds = _configStore.GetCredentials();
var migratedCreds = _configStore.GetMigratedCredentials();
if (migratedCreds == null)
{
_useMigratedCredentials = false;
if (originalCreds.Scheme == Constants.Configuration.OAuth)
{
_needToCheckAuthorizationUrlUpdate = true;
}
}
while (true)
{
token.ThrowIfCancellationRequested();
Trace.Info($"Attempt to create session.");
try
{
Trace.Info("Connecting to the Runner Server...");
Trace.Info("Connecting to the Agent Server...");
await _runnerServer.ConnectAsync(new Uri(serverUrl), creds);
Trace.Info("VssConnection created");
_term.WriteLine();
_term.WriteSuccessMessage("Connected to GitHub");
_term.WriteLine();
@@ -127,12 +101,6 @@ namespace GitHub.Runner.Listener
encounteringError = false;
}
if (_needToCheckAuthorizationUrlUpdate)
{
// start background task try to get new authorization url
_authorizationUrlMigrationBackgroundTask = GetNewOAuthAuthorizationSetting(token);
}
return true;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
@@ -142,7 +110,7 @@ namespace GitHub.Runner.Listener
}
catch (TaskAgentAccessTokenExpiredException)
{
Trace.Info("Runner OAuth token has been revoked. Session creation failed.");
Trace.Info("Agent OAuth token has been revoked. Session creation failed.");
throw;
}
catch (Exception ex)
@@ -150,58 +118,10 @@ namespace GitHub.Runner.Listener
Trace.Error("Catch exception during create session.");
Trace.Error(ex);
if (ex is VssOAuthTokenRequestException && creds.Federated is VssOAuthCredential vssOAuthCred)
{
// Check whether we get 401 because the runner registration already removed by the service.
// If the runner registration get deleted, we can't exchange oauth token.
Trace.Error("Test oauth app registration.");
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl));
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
{
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
return false;
}
}
if (ex is TaskAgentSessionConflictException)
{
try
{
var newCred = await GetNewOAuthAuthorizationSetting(token, true);
if (newCred != null)
{
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), newCred);
Trace.Info("Updated connection to use migrated credential for next CreateSession call.");
_useMigratedCredentials = true;
_authorizationUrlMigrationBackgroundTask = null;
_needToCheckAuthorizationUrlUpdate = false;
}
}
catch (Exception e)
{
Trace.Error("Fail to refresh connection with new authorization url.");
Trace.Error(e);
}
}
if (!IsSessionCreationExceptionRetriable(ex))
{
if (_useMigratedCredentials && !(ex is TaskAgentSessionConflictException))
{
// migrated credentials might cause lose permission during permission check,
// we will force to use original credential and try again
_useMigratedCredentials = false;
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
creds = _credMgr.LoadCredentials(false);
Trace.Error("Fallback to original credentials and try again.");
}
else
{
_term.WriteError($"Failed to create session. {ex.Message}");
return false;
}
_term.WriteError($"Failed to create session. {ex.Message}");
return false;
}
if (!encounteringError) //print the message only on the first error
@@ -262,51 +182,6 @@ namespace GitHub.Runner.Listener
encounteringError = false;
continuousError = 0;
}
if (_needToCheckAuthorizationUrlUpdate &&
_authorizationUrlMigrationBackgroundTask?.IsCompleted == true)
{
if (HostContext.GetService<IJobDispatcher>().Busy ||
HostContext.GetService<ISelfUpdater>().Busy)
{
Trace.Info("Job or runner updates in progress, update credentials next time.");
}
else
{
try
{
var newCred = await _authorizationUrlMigrationBackgroundTask;
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), newCred);
Trace.Info("Updated connection to use migrated credential for next GetMessage call.");
_useMigratedCredentials = true;
_authorizationUrlMigrationBackgroundTask = null;
_needToCheckAuthorizationUrlUpdate = false;
}
catch (Exception ex)
{
Trace.Error("Fail to refresh connection with new authorization url.");
Trace.Error(ex);
}
}
}
if (_authorizationUrlRollbackReattemptDelayBackgroundTask?.IsCompleted == true)
{
try
{
// we rolled back to use original creds about 2 days before, now it's a good time to try migrated creds again.
Trace.Info("Re-attempt to use migrated credential");
var migratedCreds = _credMgr.LoadCredentials();
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedCreds);
_useMigratedCredentials = true;
_authorizationUrlRollbackReattemptDelayBackgroundTask = null;
}
catch (Exception ex)
{
Trace.Error("Fail to refresh connection with new authorization url on rollback reattempt.");
Trace.Error(ex);
}
}
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
@@ -315,7 +190,7 @@ namespace GitHub.Runner.Listener
}
catch (TaskAgentAccessTokenExpiredException)
{
Trace.Info("Runner OAuth token has been revoked. Unable to pull message.");
Trace.Info("Agent OAuth token has been revoked. Unable to pull message.");
throw;
}
catch (Exception ex)
@@ -330,21 +205,7 @@ namespace GitHub.Runner.Listener
}
else if (!IsGetNextMessageExceptionRetriable(ex))
{
if (_useMigratedCredentials)
{
// migrated credentials might cause lose permission during permission check,
// we will force to use original credential and try again
_useMigratedCredentials = false;
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
var originalCreds = _credMgr.LoadCredentials(false);
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), originalCreds);
Trace.Error("Fallback to original credentials and try again.");
}
else
{
throw;
}
throw;
}
else
{
@@ -475,7 +336,7 @@ namespace GitHub.Runner.Listener
{
if (ex is TaskAgentNotFoundException)
{
Trace.Info("The runner no longer exists on the server. Stopping the runner.");
Trace.Info("The agent no longer exists on the server. Stopping the runner.");
_term.WriteError("The runner no longer exists on the server. Please reconfigure the runner.");
return false;
}
@@ -503,7 +364,7 @@ namespace GitHub.Runner.Listener
}
else if (ex is VssOAuthTokenRequestException && ex.Message.Contains("Current server time is"))
{
Trace.Info("Local clock might be skewed.");
Trace.Info("Local clock might skewed.");
_term.WriteError("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
if (_sessionCreationExceptionTracker.ContainsKey(nameof(VssOAuthTokenRequestException)))
{
@@ -536,94 +397,5 @@ namespace GitHub.Runner.Listener
return true;
}
}
private async Task<VssCredentials> GetNewOAuthAuthorizationSetting(CancellationToken token, bool adhoc = false)
{
Trace.Info("Start checking oauth authorization url update.");
while (true)
{
try
{
var migratedAuthorizationUrl = await _runnerServer.GetRunnerAuthUrlAsync(_settings.PoolId, _settings.AgentId);
if (!string.IsNullOrEmpty(migratedAuthorizationUrl))
{
var credData = _configStore.GetCredentials();
var clientId = credData.Data.GetValueOrDefault("clientId", null);
var currentAuthorizationUrl = credData.Data.GetValueOrDefault("authorizationUrl", null);
Trace.Info($"Current authorization url: {currentAuthorizationUrl}, new authorization url: {migratedAuthorizationUrl}");
if (string.Equals(currentAuthorizationUrl, migratedAuthorizationUrl, StringComparison.OrdinalIgnoreCase))
{
// We don't need to update credentials.
Trace.Info("No needs to update authorization url");
if (adhoc)
{
return null;
}
else
{
await Task.Delay(TimeSpan.FromMilliseconds(-1), token);
}
}
var keyManager = HostContext.GetService<IRSAKeyManager>();
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
var migratedClientCredential = new VssOAuthJwtBearerClientCredential(clientId, migratedAuthorizationUrl, signingCredentials);
var migratedRunnerCredential = new VssOAuthCredential(new Uri(migratedAuthorizationUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, migratedClientCredential);
Trace.Info("Try connect service with Token Service OAuth endpoint.");
var runnerServer = HostContext.CreateService<IRunnerServer>();
await runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedRunnerCredential);
await runnerServer.GetAgentPoolsAsync();
Trace.Info($"Successfully connected service with new authorization url.");
var migratedCredData = new CredentialData
{
Scheme = Constants.Configuration.OAuth,
Data =
{
{ "clientId", clientId },
{ "authorizationUrl", migratedAuthorizationUrl },
{ "oauthEndpointUrl", migratedAuthorizationUrl },
},
};
_configStore.SaveMigratedCredential(migratedCredData);
return migratedRunnerCredential;
}
else
{
Trace.Verbose("No authorization url updates");
}
}
catch (Exception ex) when (!token.IsCancellationRequested)
{
Trace.Error("Fail to get/test new authorization url.");
Trace.Error(ex);
try
{
await _runnerServer.ReportRunnerAuthUrlErrorAsync(_settings.PoolId, _settings.AgentId, ex.ToString());
}
catch (Exception e)
{
// best effort
Trace.Error("Fail to report the migration error");
Trace.Error(e);
}
}
if (adhoc)
{
return null;
}
else
{
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromMinutes(30), TimeSpan.FromMinutes(45));
await HostContext.Delay(backoff, token);
}
}
}
}
}

View File

@@ -4,7 +4,6 @@ using GitHub.Runner.Sdk;
using System;
using System.Globalization;
using System.IO;
using System.Reflection;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
@@ -15,9 +14,6 @@ namespace GitHub.Runner.Listener
{
public static int Main(string[] args)
{
// Add environment variables from .env file
LoadAndSetEnv();
using (HostContext context = new HostContext("Runner"))
{
return MainAsync(context, args).GetAwaiter().GetResult();
@@ -29,7 +25,7 @@ namespace GitHub.Runner.Listener
// 1: Terminate failure
// 2: Retriable failure
// 3: Exit for self update
private async static Task<int> MainAsync(IHostContext context, string[] args)
public async static Task<int> MainAsync(IHostContext context, string[] args)
{
Tracing trace = context.GetTrace(nameof(GitHub.Runner.Listener));
trace.Info($"Runner is built for {Constants.Runner.Platform} ({Constants.Runner.PlatformArchitecture}) - {BuildConstants.RunnerPackage.PackageName}.");
@@ -87,6 +83,22 @@ namespace GitHub.Runner.Listener
return Constants.Runner.ReturnCode.TerminatedError;
}
// Add environment variables from .env file
string envFile = Path.Combine(context.GetDirectory(WellKnownDirectory.Root), ".env");
if (File.Exists(envFile))
{
var envContents = File.ReadAllLines(envFile);
foreach (var env in envContents)
{
if (!string.IsNullOrEmpty(env) && env.IndexOf('=') > 0)
{
string envKey = env.Substring(0, env.IndexOf('='));
string envValue = env.Substring(env.IndexOf('=') + 1);
Environment.SetEnvironmentVariable(envKey, envValue);
}
}
}
// Parse the command line args.
var command = new CommandSettings(context, args);
trace.Info("Arguments parsed");
@@ -124,34 +136,5 @@ namespace GitHub.Runner.Listener
return Constants.Runner.ReturnCode.RetryableError;
}
}
private static void LoadAndSetEnv()
{
var binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
var rootDir = new DirectoryInfo(binDir).Parent.FullName;
string envFile = Path.Combine(rootDir, ".env");
if (File.Exists(envFile))
{
var envContents = File.ReadAllLines(envFile);
foreach (var env in envContents)
{
if (!string.IsNullOrEmpty(env))
{
var separatorIndex = env.IndexOf('=');
if (separatorIndex > 0)
{
string envKey = env.Substring(0, separatorIndex);
string envValue = null;
if (env.Length > separatorIndex + 1)
{
envValue = env.Substring(separatorIndex + 1);
}
Environment.SetEnvironmentVariable(envKey, envValue);
}
}
}
}
}
}
}

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netcoreapp3.1</TargetFramework>
<TargetFramework>netcoreapp3.0</TargetFramework>
<OutputType>Exe</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>

View File

@@ -37,7 +37,8 @@ namespace GitHub.Runner.Listener
{
try
{
VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy);
var runnerCertManager = HostContext.GetService<IRunnerCertificateManager>();
VssUtil.InitializeVssClientSettings(HostContext.UserAgent, HostContext.WebProxy, runnerCertManager.VssClientCertificateManager);
_inConfigStage = true;
_completedCommand.Reset();
@@ -433,7 +434,7 @@ namespace GitHub.Runner.Listener
}
catch (TaskAgentAccessTokenExpiredException)
{
Trace.Info("Runner OAuth token has been revoked. Shutting down.");
Trace.Info("Agent OAuth token has been revoked. Shutting down.");
}
return Constants.Runner.ReturnCode.Success;
@@ -451,41 +452,16 @@ namespace GitHub.Runner.Listener
ext = "sh";
#endif
_term.WriteLine($@"
Commands:
.{separator}config.{ext} Configures the runner
.{separator}config.{ext} remove Unconfigures the runner
.{separator}run.{ext} Runs the runner interactively. Does not require any options.
Commands:,
.{separator}config.{ext} Configures the runner
.{separator}config.{ext} remove Unconfigures the runner
.{separator}run.{ext} Runs the runner interactively. Does not require any options.
Options:
--help Prints the help for each command
--version Prints the runner version
--commit Prints the runner commit
Config Options:
--unattended Disable interactive prompts for missing arguments. Defaults will be used for missing options
--url string Repository to add the runner to. Required if unattended
--token string Registration token. Required if unattended
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
--replace Replace any existing runner with the same name (default false)");
#if OS_WINDOWS
_term.WriteLine($@" --runasservice Run the runner as a service");
_term.WriteLine($@" --windowslogonaccount string Account to run the service as. Requires runasservice");
_term.WriteLine($@" --windowslogonpassword string Password for the service account. Requires runasservice");
#endif
_term.WriteLine($@"
Examples:
Configure a runner non-interactively:
.{separator}config.{ext} --unattended --url <url> --token <token>
Configure a runner non-interactively, replacing any existing runner with the same name:
.{separator}config.{ext} --unattended --url <url> --token <token> --replace [--name <name>]
Configure a runner non-interactively with three extra labels:
.{separator}config.{ext} --unattended --url <url> --token <token> --labels L1,L2,L3");
#if OS_WINDOWS
_term.WriteLine($@" Configure a runner to run as a service:");
_term.WriteLine($@" .{separator}config.{ext} --url <url> --token <token> --runasservice");
#endif
--help Prints the help for each command
");
}
}
}

View File

@@ -17,7 +17,6 @@ namespace GitHub.Runner.Listener
[ServiceLocator(Default = typeof(SelfUpdater))]
public interface ISelfUpdater : IRunnerService
{
bool Busy { get; }
Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token);
}
@@ -32,8 +31,6 @@ namespace GitHub.Runner.Listener
private int _poolId;
private int _agentId;
public bool Busy { get; private set; }
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -48,60 +45,52 @@ namespace GitHub.Runner.Listener
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
{
Busy = true;
try
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
{
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
{
Trace.Info($"Can't find available update package.");
return false;
}
Trace.Info($"Can't find available update package.");
return false;
}
Trace.Info($"An update is available.");
Trace.Info($"An update is available.");
// Print console line that warn user not shutdown runner.
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
// Print console line that warn user not shutdown runner.
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
await DownloadLatestRunner(token);
Trace.Info($"Download latest runner and unzip into runner root.");
await DownloadLatestRunner(token);
Trace.Info($"Download latest runner and unzip into runner root.");
// wait till all running job finish
await UpdateRunnerUpdateStateAsync("Waiting for current job finish running.");
// wait till all running job finish
await UpdateRunnerUpdateStateAsync("Waiting for current job finish running.");
await jobDispatcher.WaitAsync(token);
Trace.Info($"All running job has exited.");
await jobDispatcher.WaitAsync(token);
Trace.Info($"All running job has exited.");
// delete runner backup
DeletePreviousVersionRunnerBackup(token);
Trace.Info($"Delete old version runner backup.");
// delete runner backup
DeletePreviousVersionRunnerBackup(token);
Trace.Info($"Delete old version runner backup.");
// generate update script from template
await UpdateRunnerUpdateStateAsync("Generate and execute update script.");
// generate update script from template
await UpdateRunnerUpdateStateAsync("Generate and execute update script.");
string updateScript = GenerateUpdateScript(restartInteractiveRunner);
Trace.Info($"Generate update script into: {updateScript}");
string updateScript = GenerateUpdateScript(restartInteractiveRunner);
Trace.Info($"Generate update script into: {updateScript}");
// kick off update script
Process invokeScript = new Process();
// kick off update script
Process invokeScript = new Process();
#if OS_WINDOWS
invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace);
invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\"";
#elif (OS_OSX || OS_LINUX)
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
#endif
invokeScript.Start();
Trace.Info($"Update script start running");
invokeScript.Start();
Trace.Info($"Update script start running");
await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should back online within 10 seconds.");
await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should back online within 10 seconds.");
return true;
}
finally
{
Busy = false;
}
return true;
}
private async Task<bool> UpdateNeeded(string targetVersion, CancellationToken token)

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netcoreapp3.1</TargetFramework>
<TargetFramework>netcoreapp3.0</TargetFramework>
<OutputType>Exe</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>

View File

@@ -0,0 +1,58 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Sdk;
using GitHub.Services.WebApi;
using GitHub.Build.WebApi;
namespace GitHub.Runner.Plugins.Artifact
{
// A client wrapper interacting with Build's Artifact API
public class BuildServer
{
private readonly BuildHttpClient _buildHttpClient;
public BuildServer(VssConnection connection)
{
ArgUtil.NotNull(connection, nameof(connection));
_buildHttpClient = connection.GetClient<BuildHttpClient>();
}
// Associate the specified artifact with a build, along with custom data.
public async Task<BuildArtifact> AssociateArtifact(
Guid projectId,
int pipelineId,
string jobId,
string name,
string type,
string data,
Dictionary<string, string> propertiesDictionary,
CancellationToken cancellationToken = default(CancellationToken))
{
BuildArtifact artifact = new BuildArtifact()
{
Name = name,
Source = jobId,
Resource = new ArtifactResource()
{
Data = data,
Type = type,
Properties = propertiesDictionary
}
};
return await _buildHttpClient.CreateArtifactAsync(artifact, projectId, pipelineId, cancellationToken: cancellationToken);
}
// Get named artifact from a build
public async Task<BuildArtifact> GetArtifact(
Guid projectId,
int pipelineId,
string name,
CancellationToken cancellationToken)
{
return await _buildHttpClient.GetArtifactAsync(projectId, pipelineId, name, cancellationToken: cancellationToken);
}
}
}

View File

@@ -3,6 +3,7 @@ using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Build.WebApi;
using GitHub.Services.Common;
using GitHub.Runner.Sdk;
@@ -39,31 +40,70 @@ namespace GitHub.Runner.Plugins.Artifact
targetPath = Path.IsPathFullyQualified(targetPath) ? targetPath : Path.GetFullPath(Path.Combine(defaultWorkingDirectory, targetPath));
// Project ID
Guid projectId = new Guid(context.Variables.GetValueOrDefault(BuildVariables.TeamProjectId)?.Value ?? Guid.Empty.ToString());
// Build ID
string buildIdStr = context.Variables.GetValueOrDefault(SdkConstants.Variables.Build.BuildId)?.Value ?? string.Empty;
string buildIdStr = context.Variables.GetValueOrDefault(BuildVariables.BuildId)?.Value ?? string.Empty;
if (!int.TryParse(buildIdStr, out int buildId))
{
throw new ArgumentException($"Run Id is not an Int32: {buildIdStr}");
}
// Determine whether to call Pipelines or Build endpoint to publish artifact based on variable setting
string usePipelinesArtifactEndpointVar = context.Variables.GetValueOrDefault("Runner.UseActionsArtifactsApis")?.Value;
bool.TryParse(usePipelinesArtifactEndpointVar, out bool usePipelinesArtifactEndpoint);
string containerPath;
long containerId;
context.Output($"Downloading artifact '{artifactName}' to: '{targetPath}'");
// Definition ID is a dummy value only used by HTTP client routing purposes
int definitionId = 1;
var pipelinesHelper = new PipelinesServer(context.VssConnection);
var actionsStorageArtifact = await pipelinesHelper.GetActionsStorageArtifact(definitionId, buildId, artifactName, token);
if (actionsStorageArtifact == null)
if (usePipelinesArtifactEndpoint)
{
throw new Exception($"The actions storage artifact for '{artifactName}' could not be found, or is no longer available");
context.Debug("Downloading artifact using v2 endpoint");
// Definition ID is a dummy value only used by HTTP client routing purposes
int definitionId = 1;
var pipelinesHelper = new PipelinesServer(context.VssConnection);
var actionsStorageArtifact = await pipelinesHelper.GetActionsStorageArtifact(definitionId, buildId, artifactName, token);
if (actionsStorageArtifact == null)
{
throw new Exception($"The actions storage artifact for '{artifactName}' could not be found, or is no longer available");
}
containerPath = actionsStorageArtifact.Name; // In actions storage artifacts, name equals the path
containerId = actionsStorageArtifact.ContainerId;
}
else
{
context.Debug("Downloading artifact using v1 endpoint");
BuildServer buildHelper = new BuildServer(context.VssConnection);
BuildArtifact buildArtifact = await buildHelper.GetArtifact(projectId, buildId, artifactName, token);
if (string.Equals(buildArtifact.Resource.Type, "Container", StringComparison.OrdinalIgnoreCase) ||
// Artifact was published by Pipelines endpoint, check new type here to handle rollback scenario
string.Equals(buildArtifact.Resource.Type, "Actions_Storage", StringComparison.OrdinalIgnoreCase))
{
string containerUrl = buildArtifact.Resource.Data;
string[] parts = containerUrl.Split(new[] { '/' }, 3);
if (parts.Length < 3 || !long.TryParse(parts[1], out containerId))
{
throw new ArgumentOutOfRangeException($"Invalid container url '{containerUrl}' for artifact '{buildArtifact.Name}'");
}
containerPath = parts[2];
}
else
{
throw new NotSupportedException($"Invalid artifact type: {buildArtifact.Resource.Type}");
}
}
string containerPath = actionsStorageArtifact.Name; // In actions storage artifacts, name equals the path
long containerId = actionsStorageArtifact.ContainerId;
FileContainerServer fileContainerServer = new FileContainerServer(context.VssConnection, projectId: new Guid(), containerId, containerPath);
FileContainerServer fileContainerServer = new FileContainerServer(context.VssConnection, projectId, containerId, containerPath);
await fileContainerServer.DownloadFromContainerAsync(context, targetPath, token);
context.Output("Artifact download finished.");

View File

@@ -4,7 +4,9 @@ using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Build.WebApi;
using GitHub.Services.Common;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Plugins.Artifact
@@ -43,8 +45,11 @@ namespace GitHub.Runner.Plugins.Artifact
throw new ArgumentException($"Artifact name is not valid: {artifactName}. It cannot contain '\\', '/', \"', ':', '<', '>', '|', '*', and '?'");
}
// Project ID
Guid projectId = new Guid(context.Variables.GetValueOrDefault(BuildVariables.TeamProjectId)?.Value ?? Guid.Empty.ToString());
// Build ID
string buildIdStr = context.Variables.GetValueOrDefault(SdkConstants.Variables.Build.BuildId)?.Value ?? string.Empty;
string buildIdStr = context.Variables.GetValueOrDefault(BuildVariables.BuildId)?.Value ?? string.Empty;
if (!int.TryParse(buildIdStr, out int buildId))
{
throw new ArgumentException($"Run Id is not an Int32: {buildIdStr}");
@@ -60,7 +65,7 @@ namespace GitHub.Runner.Plugins.Artifact
}
// Container ID
string containerIdStr = context.Variables.GetValueOrDefault(SdkConstants.Variables.Build.ContainerId)?.Value ?? string.Empty;
string containerIdStr = context.Variables.GetValueOrDefault(BuildVariables.ContainerId)?.Value ?? string.Empty;
if (!long.TryParse(containerIdStr, out long containerId))
{
throw new ArgumentException($"Container Id is not an Int64: {containerIdStr}");
@@ -68,7 +73,7 @@ namespace GitHub.Runner.Plugins.Artifact
context.Output($"Uploading artifact '{artifactName}' from '{fullPath}' for run #{buildId}");
FileContainerServer fileContainerHelper = new FileContainerServer(context.VssConnection, projectId: Guid.Empty, containerId, artifactName);
FileContainerServer fileContainerHelper = new FileContainerServer(context.VssConnection, projectId, containerId, artifactName);
var propertiesDictionary = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
long size = 0;
@@ -84,20 +89,38 @@ namespace GitHub.Runner.Plugins.Artifact
// if any of the results were successful, make sure to attach them to the build
finally
{
// Definition ID is a dummy value only used by HTTP client routing purposes
int definitionId = 1;
// Determine whether to call Pipelines or Build endpoint to publish artifact based on variable setting
string usePipelinesArtifactEndpointVar = context.Variables.GetValueOrDefault("Runner.UseActionsArtifactsApis")?.Value;
bool.TryParse(usePipelinesArtifactEndpointVar, out bool usePipelinesArtifactEndpoint);
PipelinesServer pipelinesHelper = new PipelinesServer(context.VssConnection);
if (usePipelinesArtifactEndpoint)
{
// Definition ID is a dummy value only used by HTTP client routing purposes
int definitionId = 1;
var artifact = await pipelinesHelper.AssociateActionsStorageArtifactAsync(
definitionId,
buildId,
containerId,
artifactName,
size,
token);
PipelinesServer pipelinesHelper = new PipelinesServer(context.VssConnection);
context.Output($"Associated artifact {artifactName} ({artifact.ContainerId}) with run #{buildId}");
var artifact = await pipelinesHelper.AssociateActionsStorageArtifactAsync(
definitionId,
buildId,
containerId,
artifactName,
size,
token);
context.Output($"Associated artifact {artifactName} ({artifact.ContainerId}) with run #{buildId}");
context.Debug($"Associated artifact using v2 endpoint");
}
else
{
string fileContainerFullPath = StringUtil.Format($"#/{containerId}/{artifactName}");
BuildServer buildHelper = new BuildServer(context.VssConnection);
string jobId = context.Variables.GetValueOrDefault(WellKnownDistributedTaskVariables.JobId).Value ?? string.Empty;
var artifact = await buildHelper.AssociateArtifact(projectId, buildId, jobId, artifactName, ArtifactResourceTypes.Container, fileContainerFullPath, propertiesDictionary, token);
context.Output($"Associated artifact {artifactName} ({artifact.Id}) with run #{buildId}");
context.Debug($"Associated artifact using v1 endpoint");
}
}
}
}

View File

@@ -79,13 +79,13 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
{
// Validate args.
ArgUtil.NotNull(executionContext, nameof(executionContext));
executionContext.Output($"Syncing repository: {repoFullName}");
bool useSelfSignedCACert = false;
bool useClientCert = false;
string clientCertPrivateKeyAskPassFile = null;
bool acceptUntrustedCerts = false;
// Repository URL
var githubUrl = executionContext.GetGitHubContext("server_url");
var githubUri = new Uri(!string.IsNullOrEmpty(githubUrl) ? githubUrl : "https://github.com");
var portInfo = githubUri.IsDefaultPort ? string.Empty : $":{githubUri.Port}";
Uri repositoryUrl = new Uri($"{githubUri.Scheme}://{githubUri.Host}{portInfo}/{repoFullName}");
executionContext.Output($"Syncing repository: {repoFullName}");
Uri repositoryUrl = new Uri($"https://github.com/{repoFullName}");
if (!repositoryUrl.IsAbsoluteUri)
{
throw new InvalidOperationException("Repository url need to be an absolute uri.");
@@ -112,6 +112,9 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
}
}
var runnerCert = executionContext.GetCertConfiguration();
acceptUntrustedCerts = runnerCert?.SkipServerCertificateValidation ?? false;
executionContext.Debug($"repository url={repositoryUrl}");
executionContext.Debug($"targetPath={targetPath}");
executionContext.Debug($"sourceBranch={sourceBranch}");
@@ -121,6 +124,12 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
executionContext.Debug($"checkoutNestedSubmodules={checkoutNestedSubmodules}");
executionContext.Debug($"fetchDepth={fetchDepth}");
executionContext.Debug($"gitLfsSupport={gitLfsSupport}");
executionContext.Debug($"acceptUntrustedCerts={acceptUntrustedCerts}");
#if OS_WINDOWS
bool schannelSslBackend = StringUtil.ConvertToBoolean(executionContext.GetRunnerContext("gituseschannel"));
executionContext.Debug($"schannelSslBackend={schannelSslBackend}");
#endif
// Initialize git command manager with additional environment variables.
Dictionary<string, string> gitEnv = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
@@ -155,6 +164,54 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
// prepare askpass for client cert private key, if the repository's endpoint url match the runner config url
var systemConnection = executionContext.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (runnerCert != null && Uri.Compare(repositoryUrl, systemConnection.Url, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) == 0)
{
if (!string.IsNullOrEmpty(runnerCert.CACertificateFile))
{
useSelfSignedCACert = true;
}
if (!string.IsNullOrEmpty(runnerCert.ClientCertificateFile) &&
!string.IsNullOrEmpty(runnerCert.ClientCertificatePrivateKeyFile))
{
useClientCert = true;
// prepare askpass for client cert password
if (!string.IsNullOrEmpty(runnerCert.ClientCertificatePassword))
{
clientCertPrivateKeyAskPassFile = Path.Combine(executionContext.GetRunnerContext("temp"), $"{Guid.NewGuid()}.sh");
List<string> askPass = new List<string>();
askPass.Add("#!/bin/sh");
askPass.Add($"echo \"{runnerCert.ClientCertificatePassword}\"");
File.WriteAllLines(clientCertPrivateKeyAskPassFile, askPass);
#if !OS_WINDOWS
string toolPath = WhichUtil.Which("chmod", true);
string argLine = $"775 {clientCertPrivateKeyAskPassFile}";
executionContext.Command($"chmod {argLine}");
var processInvoker = new ProcessInvoker(executionContext);
processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
executionContext.Output(args.Data);
}
};
processInvoker.ErrorDataReceived += (object sender, ProcessDataReceivedEventArgs args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
executionContext.Output(args.Data);
}
};
string workingDirectory = executionContext.GetRunnerContext("workspace");
await processInvoker.ExecuteAsync(workingDirectory, toolPath, argLine, null, true, CancellationToken.None);
#endif
}
}
}
// Check the current contents of the root folder to see if there is already a repo
// If there is a repo, see if it matches the one we are expecting to be there based on the remote fetch url
@@ -304,6 +361,46 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
additionalFetchArgs.Add($"-c http.extraheader=\"AUTHORIZATION: {GenerateBasicAuthHeader(executionContext, accessToken)}\"");
}
// Prepare ignore ssl cert error config for fetch.
if (acceptUntrustedCerts)
{
additionalFetchArgs.Add($"-c http.sslVerify=false");
additionalLfsFetchArgs.Add($"-c http.sslVerify=false");
}
// Prepare self-signed CA cert config for fetch from server.
if (useSelfSignedCACert)
{
executionContext.Debug($"Use self-signed certificate '{runnerCert.CACertificateFile}' for git fetch.");
additionalFetchArgs.Add($"-c http.sslcainfo=\"{runnerCert.CACertificateFile}\"");
additionalLfsFetchArgs.Add($"-c http.sslcainfo=\"{runnerCert.CACertificateFile}\"");
}
// Prepare client cert config for fetch from server.
if (useClientCert)
{
executionContext.Debug($"Use client certificate '{runnerCert.ClientCertificateFile}' for git fetch.");
if (!string.IsNullOrEmpty(clientCertPrivateKeyAskPassFile))
{
additionalFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\" -c http.sslCertPasswordProtected=true -c core.askpass=\"{clientCertPrivateKeyAskPassFile}\"");
additionalLfsFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\" -c http.sslCertPasswordProtected=true -c core.askpass=\"{clientCertPrivateKeyAskPassFile}\"");
}
else
{
additionalFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\"");
additionalLfsFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\"");
}
}
#if OS_WINDOWS
if (schannelSslBackend)
{
executionContext.Debug("Use SChannel SslBackend for git fetch.");
additionalFetchArgs.Add("-c http.sslbackend=\"schannel\"");
additionalLfsFetchArgs.Add("-c http.sslbackend=\"schannel\"");
}
#endif
// Prepare gitlfs url for fetch and checkout
if (gitLfsSupport)
{
@@ -405,12 +502,55 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.extraheader=\"AUTHORIZATION: {GenerateBasicAuthHeader(executionContext, accessToken)}\"");
}
// Prepare ignore ssl cert error config for fetch.
if (acceptUntrustedCerts)
{
additionalSubmoduleUpdateArgs.Add($"-c http.sslVerify=false");
}
// Prepare self-signed CA cert config for submodule update.
if (useSelfSignedCACert)
{
executionContext.Debug($"Use self-signed CA certificate '{runnerCert.CACertificateFile}' for git submodule update.");
string authorityUrl = repositoryUrl.AbsoluteUri.Replace(repositoryUrl.PathAndQuery, string.Empty);
additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.sslcainfo=\"{runnerCert.CACertificateFile}\"");
}
// Prepare client cert config for submodule update.
if (useClientCert)
{
executionContext.Debug($"Use client certificate '{runnerCert.ClientCertificateFile}' for git submodule update.");
string authorityUrl = repositoryUrl.AbsoluteUri.Replace(repositoryUrl.PathAndQuery, string.Empty);
if (!string.IsNullOrEmpty(clientCertPrivateKeyAskPassFile))
{
additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.{authorityUrl}.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\" -c http.{authorityUrl}.sslCertPasswordProtected=true -c core.askpass=\"{clientCertPrivateKeyAskPassFile}\"");
}
else
{
additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.{authorityUrl}.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\"");
}
}
#if OS_WINDOWS
if (schannelSslBackend)
{
executionContext.Debug("Use SChannel SslBackend for git submodule update.");
additionalSubmoduleUpdateArgs.Add("-c http.sslbackend=\"schannel\"");
}
#endif
int exitCode_submoduleUpdate = await gitCommandManager.GitSubmoduleUpdate(executionContext, targetPath, fetchDepth, string.Join(" ", additionalSubmoduleUpdateArgs), checkoutNestedSubmodules, cancellationToken);
if (exitCode_submoduleUpdate != 0)
{
throw new InvalidOperationException($"Git submodule update failed with exit code: {exitCode_submoduleUpdate}");
}
}
if (useClientCert && !string.IsNullOrEmpty(clientCertPrivateKeyAskPassFile))
{
executionContext.Debug("Remove git.sslkey askpass file.");
IOUtil.DeleteFile(clientCertPrivateKeyAskPassFile);
}
}
private async Task<bool> IsRepositoryOriginUrlMatch(RunnerActionPluginExecutionContext context, GitCliManager gitCommandManager, string repositoryPath, Uri expectedRepositoryOriginUrl)

View File

@@ -65,6 +65,11 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
// Validate args.
ArgUtil.NotNull(executionContext, nameof(executionContext));
Dictionary<string, string> configModifications = new Dictionary<string, string>();
bool useSelfSignedCACert = false;
bool useClientCert = false;
string clientCertPrivateKeyAskPassFile = null;
bool acceptUntrustedCerts = false;
executionContext.Output($"Syncing repository: {repoFullName}");
Uri repositoryUrl = new Uri($"https://github.com/{repoFullName}");
if (!repositoryUrl.IsAbsoluteUri)
@@ -93,6 +98,9 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
}
}
var runnerCert = executionContext.GetCertConfiguration();
acceptUntrustedCerts = runnerCert?.SkipServerCertificateValidation ?? false;
executionContext.Debug($"repository url={repositoryUrl}");
executionContext.Debug($"targetPath={targetPath}");
executionContext.Debug($"sourceBranch={sourceBranch}");
@@ -102,6 +110,12 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
executionContext.Debug($"checkoutNestedSubmodules={checkoutNestedSubmodules}");
executionContext.Debug($"fetchDepth={fetchDepth}");
executionContext.Debug($"gitLfsSupport={gitLfsSupport}");
executionContext.Debug($"acceptUntrustedCerts={acceptUntrustedCerts}");
#if OS_WINDOWS
bool schannelSslBackend = StringUtil.ConvertToBoolean(executionContext.GetRunnerContext("gituseschannel"));
executionContext.Debug($"schannelSslBackend={schannelSslBackend}");
#endif
// Initialize git command manager with additional environment variables.
Dictionary<string, string> gitEnv = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
@@ -139,6 +153,54 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
// prepare askpass for client cert private key, if the repository's endpoint url match the runner config url
var systemConnection = executionContext.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (runnerCert != null && Uri.Compare(repositoryUrl, systemConnection.Url, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) == 0)
{
if (!string.IsNullOrEmpty(runnerCert.CACertificateFile))
{
useSelfSignedCACert = true;
}
if (!string.IsNullOrEmpty(runnerCert.ClientCertificateFile) &&
!string.IsNullOrEmpty(runnerCert.ClientCertificatePrivateKeyFile))
{
useClientCert = true;
// prepare askpass for client cert password
if (!string.IsNullOrEmpty(runnerCert.ClientCertificatePassword))
{
clientCertPrivateKeyAskPassFile = Path.Combine(executionContext.GetRunnerContext("temp"), $"{Guid.NewGuid()}.sh");
List<string> askPass = new List<string>();
askPass.Add("#!/bin/sh");
askPass.Add($"echo \"{runnerCert.ClientCertificatePassword}\"");
File.WriteAllLines(clientCertPrivateKeyAskPassFile, askPass);
#if !OS_WINDOWS
string toolPath = WhichUtil.Which("chmod", true);
string argLine = $"775 {clientCertPrivateKeyAskPassFile}";
executionContext.Command($"chmod {argLine}");
var processInvoker = new ProcessInvoker(executionContext);
processInvoker.OutputDataReceived += (object sender, ProcessDataReceivedEventArgs args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
executionContext.Output(args.Data);
}
};
processInvoker.ErrorDataReceived += (object sender, ProcessDataReceivedEventArgs args) =>
{
if (!string.IsNullOrEmpty(args.Data))
{
executionContext.Output(args.Data);
}
};
string workingDirectory = executionContext.GetRunnerContext("workspace");
await processInvoker.ExecuteAsync(workingDirectory, toolPath, argLine, null, true, CancellationToken.None);
#endif
}
}
}
// Check the current contents of the root folder to see if there is already a repo
// If there is a repo, see if it matches the one we are expecting to be there based on the remote fetch url
@@ -293,6 +355,46 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
throw new InvalidOperationException($"Git config failed with exit code: {exitCode_config}");
}
// Prepare ignore ssl cert error config for fetch.
if (acceptUntrustedCerts)
{
additionalFetchArgs.Add($"-c http.sslVerify=false");
additionalLfsFetchArgs.Add($"-c http.sslVerify=false");
}
// Prepare self-signed CA cert config for fetch from server.
if (useSelfSignedCACert)
{
executionContext.Debug($"Use self-signed certificate '{runnerCert.CACertificateFile}' for git fetch.");
additionalFetchArgs.Add($"-c http.sslcainfo=\"{runnerCert.CACertificateFile}\"");
additionalLfsFetchArgs.Add($"-c http.sslcainfo=\"{runnerCert.CACertificateFile}\"");
}
// Prepare client cert config for fetch from server.
if (useClientCert)
{
executionContext.Debug($"Use client certificate '{runnerCert.ClientCertificateFile}' for git fetch.");
if (!string.IsNullOrEmpty(clientCertPrivateKeyAskPassFile))
{
additionalFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\" -c http.sslCertPasswordProtected=true -c core.askpass=\"{clientCertPrivateKeyAskPassFile}\"");
additionalLfsFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\" -c http.sslCertPasswordProtected=true -c core.askpass=\"{clientCertPrivateKeyAskPassFile}\"");
}
else
{
additionalFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\"");
additionalLfsFetchArgs.Add($"-c http.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\"");
}
}
#if OS_WINDOWS
if (schannelSslBackend)
{
executionContext.Debug("Use SChannel SslBackend for git fetch.");
additionalFetchArgs.Add("-c http.sslbackend=\"schannel\"");
additionalLfsFetchArgs.Add("-c http.sslbackend=\"schannel\"");
}
#endif
// Prepare gitlfs url for fetch and checkout
if (gitLfsSupport)
{
@@ -382,6 +484,43 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
List<string> additionalSubmoduleUpdateArgs = new List<string>();
// Prepare ignore ssl cert error config for fetch.
if (acceptUntrustedCerts)
{
additionalSubmoduleUpdateArgs.Add($"-c http.sslVerify=false");
}
// Prepare self-signed CA cert config for submodule update.
if (useSelfSignedCACert)
{
executionContext.Debug($"Use self-signed CA certificate '{runnerCert.CACertificateFile}' for git submodule update.");
string authorityUrl = repositoryUrl.AbsoluteUri.Replace(repositoryUrl.PathAndQuery, string.Empty);
additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.sslcainfo=\"{runnerCert.CACertificateFile}\"");
}
// Prepare client cert config for submodule update.
if (useClientCert)
{
executionContext.Debug($"Use client certificate '{runnerCert.ClientCertificateFile}' for git submodule update.");
string authorityUrl = repositoryUrl.AbsoluteUri.Replace(repositoryUrl.PathAndQuery, string.Empty);
if (!string.IsNullOrEmpty(clientCertPrivateKeyAskPassFile))
{
additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.{authorityUrl}.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\" -c http.{authorityUrl}.sslCertPasswordProtected=true -c core.askpass=\"{clientCertPrivateKeyAskPassFile}\"");
}
else
{
additionalSubmoduleUpdateArgs.Add($"-c http.{authorityUrl}.sslcert=\"{runnerCert.ClientCertificateFile}\" -c http.{authorityUrl}.sslkey=\"{runnerCert.ClientCertificatePrivateKeyFile}\"");
}
}
#if OS_WINDOWS
if (schannelSslBackend)
{
executionContext.Debug("Use SChannel SslBackend for git submodule update.");
additionalSubmoduleUpdateArgs.Add("-c http.sslbackend=\"schannel\"");
}
#endif
int exitCode_submoduleUpdate = await gitCommandManager.GitSubmoduleUpdate(executionContext, targetPath, fetchDepth, string.Join(" ", additionalSubmoduleUpdateArgs), checkoutNestedSubmodules, cancellationToken);
if (exitCode_submoduleUpdate != 0)
{
@@ -389,6 +528,12 @@ namespace GitHub.Runner.Plugins.Repository.v1_1
}
}
if (useClientCert && !string.IsNullOrEmpty(clientCertPrivateKeyAskPassFile))
{
executionContext.Debug("Remove git.sslkey askpass file.");
IOUtil.DeleteFile(clientCertPrivateKeyAskPassFile);
}
// Set intra-task variable for post job cleanup
executionContext.SetIntraActionState("repositoryPath", targetPath);
executionContext.SetIntraActionState("modifiedgitconfig", JsonUtility.ToString(configModifications.Keys));

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netcoreapp3.1</TargetFramework>
<TargetFramework>netcoreapp3.0</TargetFramework>
<OutputType>Library</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>

View File

@@ -83,6 +83,21 @@ namespace GitHub.Runner.Sdk
}
VssClientHttpRequestSettings.Default.UserAgent = headerValues;
var certSetting = GetCertConfiguration();
if (certSetting != null)
{
if (!string.IsNullOrEmpty(certSetting.ClientCertificateArchiveFile))
{
VssClientHttpRequestSettings.Default.ClientCertificateManager = new RunnerClientCertificateManager(certSetting.ClientCertificateArchiveFile, certSetting.ClientCertificatePassword);
}
if (certSetting.SkipServerCertificateValidation)
{
VssClientHttpRequestSettings.Default.ServerCertificateValidationCallback = HttpClientHandler.DangerousAcceptAnyServerCertificateValidator;
}
}
VssHttpMessageHandler.DefaultWebProxy = this.WebProxy;
ServiceEndpoint systemConnection = this.Endpoints.FirstOrDefault(e => string.Equals(e.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
ArgUtil.NotNull(systemConnection, nameof(systemConnection));
@@ -212,6 +227,40 @@ namespace GitHub.Runner.Sdk
}
}
public RunnerCertificateSettings GetCertConfiguration()
{
bool skipCertValidation = StringUtil.ConvertToBoolean(GetRunnerContext("SkipCertValidation"));
string caFile = GetRunnerContext("CAInfo");
string clientCertFile = GetRunnerContext("ClientCert");
if (!string.IsNullOrEmpty(caFile) || !string.IsNullOrEmpty(clientCertFile) || skipCertValidation)
{
var certConfig = new RunnerCertificateSettings();
certConfig.SkipServerCertificateValidation = skipCertValidation;
certConfig.CACertificateFile = caFile;
if (!string.IsNullOrEmpty(clientCertFile))
{
certConfig.ClientCertificateFile = clientCertFile;
string clientCertKey = GetRunnerContext("ClientCertKey");
string clientCertArchive = GetRunnerContext("ClientCertArchive");
string clientCertPassword = GetRunnerContext("ClientCertPassword");
certConfig.ClientCertificatePrivateKeyFile = clientCertKey;
certConfig.ClientCertificateArchiveFile = clientCertArchive;
certConfig.ClientCertificatePassword = clientCertPassword;
certConfig.VssClientCertificateManager = new RunnerClientCertificateManager(clientCertArchive, clientCertPassword);
}
return certConfig;
}
else
{
return null;
}
}
private string Escape(string input)
{
foreach (var mapping in _commandEscapeMappings)

View File

@@ -271,14 +271,6 @@ namespace GitHub.Runner.Sdk
// Indicate GitHub Actions process.
_proc.StartInfo.Environment["GITHUB_ACTIONS"] = "true";
// Set CI=true when no one else already set it.
// CI=true is common set in most CI provider in GitHub
if (!_proc.StartInfo.Environment.ContainsKey("CI") &&
Environment.GetEnvironmentVariable("CI") == null)
{
_proc.StartInfo.Environment["CI"] = "true";
}
// Hook up the events.
_proc.EnableRaisingEvents = true;
_proc.Exited += ProcessExitedHandler;

View File

@@ -1,7 +1,7 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netcoreapp3.1</TargetFramework>
<TargetFramework>netcoreapp3.0</TargetFramework>
<OutputType>Library</OutputType>
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64</RuntimeIdentifiers>
<TargetLatestRuntimePatch>true</TargetLatestRuntimePatch>

View File

@@ -0,0 +1,40 @@

using System.Security.Cryptography.X509Certificates;
using GitHub.Services.Common;
namespace GitHub.Runner.Sdk
{
public class RunnerCertificateSettings
{
public bool SkipServerCertificateValidation { get; set; }
public string CACertificateFile { get; set; }
public string ClientCertificateFile { get; set; }
public string ClientCertificatePrivateKeyFile { get; set; }
public string ClientCertificateArchiveFile { get; set; }
public string ClientCertificatePassword { get; set; }
public IVssClientCertificateManager VssClientCertificateManager { get; set; }
}
public class RunnerClientCertificateManager : IVssClientCertificateManager
{
private readonly X509Certificate2Collection _clientCertificates = new X509Certificate2Collection();
public X509Certificate2Collection ClientCertificates => _clientCertificates;
public RunnerClientCertificateManager()
{
}
public RunnerClientCertificateManager(string clientCertificateArchiveFile, string clientCertificatePassword)
{
AddClientCertificate(clientCertificateArchiveFile, clientCertificatePassword);
}
public void AddClientCertificate(string clientCertificateArchiveFile, string clientCertificatePassword)
{
if (!string.IsNullOrEmpty(clientCertificateArchiveFile))
{
_clientCertificates.Add(new X509Certificate2(clientCertificateArchiveFile, clientCertificatePassword));
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More