Compare commits

..

3 Commits

Author SHA1 Message Date
TingluoHuang
edfad1a198 c 2020-05-11 14:28:43 -04:00
TingluoHuang
10bce3549c windows 2020-05-11 14:23:15 -04:00
TingluoHuang
68b180a229 run codeql on all platforms. 2020-05-11 14:09:58 -04:00
52 changed files with 1966 additions and 3498 deletions

View File

@@ -9,11 +9,11 @@ jobs:
CodeQL-Build:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
runs-on: ubuntu-latest
runs-on: ${{ matrix.os }}
steps:
- name: Checkout repository
@@ -22,13 +22,17 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
- name: Manual build
run : |
./dev.sh layout Release linux-x64
- name: Manual build (Windows)
if: runner.os == 'windows'
run : |
dev.cmd layout Release
working-directory: src
- name: Manual build (Non-Windows)
if: runner.os != 'windows'
run : |
./dev.sh layout Release
working-directory: src
- name: Perform CodeQL Analysis

View File

@@ -23,7 +23,7 @@ An ADR is an Architectural Decision Record. This allows consensus on the direct
### Required Dev Dependencies
![Win](res/win_sm.png) ![*nix](res/linux_sm.png) Git for Windows and Linux [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
![Win](res/win_sm.png) Git for Windows [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
### To Build, Test, Layout
@@ -43,7 +43,6 @@ Sample developer flow:
```bash
git clone https://github.com/actions/runner
cd runner
cd ./src
./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout
<make code changes>
@@ -51,23 +50,10 @@ cd ./src
./dev.(sh/cmd) test # run all unit tests before git commit/push
```
View logs:
```bash
cd runner/_layout/_diag
ls
cat (Runner/Worker)_TIMESTAMP.log # view your log file
```
Run Runner:
```bash
cd runner/_layout
./run.sh # run your custom runner
```
### Editors
[Using Visual Studio Code](https://code.visualstudio.com/)
[Using Visual Studio](https://code.visualstudio.com/docs)
[Using Visual Studio 2019](https://www.visualstudio.com/vs/)
### Styling

View File

@@ -1,18 +1,11 @@
## Features
- Resolve action download info from server (#508, #515, #550)
- Print runner and machine name to log. (#539)
- Runner support for GHES Alpha (#381 #386 #390 #393 $401)
- Allow secrets context in Container.env (#388)
## Bugs
- Reduce input validation warnings (#506)
- Fix null ref exception in SecretMasker caused by `hashfiles` timeout. (#516)
- Add libicu66 to `./installDependencies.sh` for Ubuntu 20.04 (#535)
- Fix DataContract with Token service (#532)
- Skip search $PATH on command with fully qualified path (#526)
- Restore SELinux context on service file when SELinux is enabled (#525)
- Raise warning when volume mount root. (#413)
- Fix typo (#394)
## Misc
- Remove SPS/Token migration code. Remove GHES url manipulate code. (#513)
- Add sub-step for developer flow for clarity (#523)
- Update Links and Language to Git + VSCode (#522)
- Update runner configuration exception message (#540)
- N/A
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.

View File

@@ -1 +1 @@
<Update to ./src/runnerversion when creating release>
2.168.0

View File

@@ -81,7 +81,7 @@ fi
export RUNNER_TOKEN=$(curl -s -X POST ${base_api_url}/${orgs_or_repos}/${runner_scope}/actions/runners/registration-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token')
if [ "null" == "$RUNNER_TOKEN" -o -z "$RUNNER_TOKEN" ]; then fatal "Failed to get a token"; fi
if [ -z "$RUNNER_TOKEN" ]; then fatal "Failed to get a token"; fi
#---------------------------------------
# Download latest released and extract

View File

@@ -154,16 +154,7 @@ function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [in
function Get-Machine-Architecture() {
Say-Invocation $MyInvocation
# On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems.
# To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432.
# PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE.
# Possible values: amd64, x64, x86, arm64, arm
if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null )
{
return $ENV:PROCESSOR_ARCHITEW6432
}
# possible values: amd64, x64, x86, arm64, arm
return $ENV:PROCESSOR_ARCHITECTURE
}
@@ -693,196 +684,3 @@ Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath
Say "Installation finished"
exit 0
# SIG # Begin signature block
# MIIjhwYJKoZIhvcNAQcCoIIjeDCCI3QCAQExDzANBglghkgBZQMEAgEFADB5Bgor
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCAiKYSY4KtkeThH
# d5M1aXqv1K0/pff07QwfUbYZ/qX5LqCCDYUwggYDMIID66ADAgECAhMzAAABiK9S
# 1rmSbej5AAAAAAGIMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy
# b3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNpZ25p
# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ4WhcNMjEwMzAzMTgzOTQ4WjB0MQsw
# CQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9u
# ZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMR4wHAYDVQQDExVNaWNy
# b3NvZnQgQ29ycG9yYXRpb24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
# AQCSCNryE+Cewy2m4t/a74wZ7C9YTwv1PyC4BvM/kSWPNs8n0RTe+FvYfU+E9uf0
# t7nYlAzHjK+plif2BhD+NgdhIUQ8sVwWO39tjvQRHjP2//vSvIfmmkRoML1Ihnjs
# 9kQiZQzYRDYYRp9xSQYmRwQjk5hl8/U7RgOiQDitVHaU7BT1MI92lfZRuIIDDYBd
# vXtbclYJMVOwqZtv0O9zQCret6R+fRSGaDNfEEpcILL+D7RV3M4uaJE4Ta6KAOdv
# V+MVaJp1YXFTZPKtpjHO6d9pHQPZiG7NdC6QbnRGmsa48uNQrb6AfmLKDI1Lp31W
# MogTaX5tZf+CZT9PSuvjOCLNAgMBAAGjggGCMIIBfjAfBgNVHSUEGDAWBgorBgEE
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUj9RJL9zNrPcL10RZdMQIXZN7MG8w
# VAYDVR0RBE0wS6RJMEcxLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
# dGlvbnMgTGltaXRlZDEWMBQGA1UEBRMNMjMwMDEyKzQ1ODM4NjAfBgNVHSMEGDAW
# gBRIbmTlUAXTgqoXNzcitW2oynUClTBUBgNVHR8ETTBLMEmgR6BFhkNodHRwOi8v
# d3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NybC9NaWNDb2RTaWdQQ0EyMDExXzIw
# MTEtMDctMDguY3JsMGEGCCsGAQUFBwEBBFUwUzBRBggrBgEFBQcwAoZFaHR0cDov
# L3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9jZXJ0cy9NaWNDb2RTaWdQQ0EyMDEx
# XzIwMTEtMDctMDguY3J0MAwGA1UdEwEB/wQCMAAwDQYJKoZIhvcNAQELBQADggIB
# ACnXo8hjp7FeT+H6iQlV3CcGnkSbFvIpKYafgzYCFo3UHY1VHYJVb5jHEO8oG26Q
# qBELmak6MTI+ra3WKMTGhE1sEIlowTcp4IAs8a5wpCh6Vf4Z/bAtIppP3p3gXk2X
# 8UXTc+WxjQYsDkFiSzo/OBa5hkdW1g4EpO43l9mjToBdqEPtIXsZ7Hi1/6y4gK0P
# mMiwG8LMpSn0n/oSHGjrUNBgHJPxgs63Slf58QGBznuXiRaXmfTUDdrvhRocdxIM
# i8nXQwWACMiQzJSRzBP5S2wUq7nMAqjaTbeXhJqD2SFVHdUYlKruvtPSwbnqSRWT
# GI8s4FEXt+TL3w5JnwVZmZkUFoioQDMMjFyaKurdJ6pnzbr1h6QW0R97fWc8xEIz
# LIOiU2rjwWAtlQqFO8KNiykjYGyEf5LyAJKAO+rJd9fsYR+VBauIEQoYmjnUbTXM
# SY2Lf5KMluWlDOGVh8q6XjmBccpaT+8tCfxpaVYPi1ncnwTwaPQvVq8RjWDRB7Pa
# 8ruHgj2HJFi69+hcq7mWx5nTUtzzFa7RSZfE5a1a5AuBmGNRr7f8cNfa01+tiWjV
# Kk1a+gJUBSP0sIxecFbVSXTZ7bqeal45XSDIisZBkWb+83TbXdTGMDSUFKTAdtC+
# r35GfsN8QVy59Hb5ZYzAXczhgRmk7NyE6jD0Ym5TKiW5MIIHejCCBWKgAwIBAgIK
# YQ6Q0gAAAAAAAzANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMCVVMxEzARBgNV
# BAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jv
# c29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJvb3QgQ2VydGlm
# aWNhdGUgQXV0aG9yaXR5IDIwMTEwHhcNMTEwNzA4MjA1OTA5WhcNMjYwNzA4MjEw
# OTA5WjB+MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UE
# BxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSgwJgYD
# VQQDEx9NaWNyb3NvZnQgQ29kZSBTaWduaW5nIFBDQSAyMDExMIICIjANBgkqhkiG
# 9w0BAQEFAAOCAg8AMIICCgKCAgEAq/D6chAcLq3YbqqCEE00uvK2WCGfQhsqa+la
# UKq4BjgaBEm6f8MMHt03a8YS2AvwOMKZBrDIOdUBFDFC04kNeWSHfpRgJGyvnkmc
# 6Whe0t+bU7IKLMOv2akrrnoJr9eWWcpgGgXpZnboMlImEi/nqwhQz7NEt13YxC4D
# dato88tt8zpcoRb0RrrgOGSsbmQ1eKagYw8t00CT+OPeBw3VXHmlSSnnDb6gE3e+
# lD3v++MrWhAfTVYoonpy4BI6t0le2O3tQ5GD2Xuye4Yb2T6xjF3oiU+EGvKhL1nk
# kDstrjNYxbc+/jLTswM9sbKvkjh+0p2ALPVOVpEhNSXDOW5kf1O6nA+tGSOEy/S6
# A4aN91/w0FK/jJSHvMAhdCVfGCi2zCcoOCWYOUo2z3yxkq4cI6epZuxhH2rhKEmd
# X4jiJV3TIUs+UsS1Vz8kA/DRelsv1SPjcF0PUUZ3s/gA4bysAoJf28AVs70b1FVL
# 5zmhD+kjSbwYuER8ReTBw3J64HLnJN+/RpnF78IcV9uDjexNSTCnq47f7Fufr/zd
# sGbiwZeBe+3W7UvnSSmnEyimp31ngOaKYnhfsi+E11ecXL93KCjx7W3DKI8sj0A3
# T8HhhUSJxAlMxdSlQy90lfdu+HggWCwTXWCVmj5PM4TasIgX3p5O9JawvEagbJjS
# 4NaIjAsCAwEAAaOCAe0wggHpMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBRI
# bmTlUAXTgqoXNzcitW2oynUClTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTAL
# BgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBRyLToCMZBD
# uRQFTuHqp8cx0SOJNDBaBgNVHR8EUzBRME+gTaBLhklodHRwOi8vY3JsLm1pY3Jv
# c29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
# MDNfMjIuY3JsMF4GCCsGAQUFBwEBBFIwUDBOBggrBgEFBQcwAoZCaHR0cDovL3d3
# dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
# MDNfMjIuY3J0MIGfBgNVHSAEgZcwgZQwgZEGCSsGAQQBgjcuAzCBgzA/BggrBgEF
# BQcCARYzaHR0cDovL3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9kb2NzL3ByaW1h
# cnljcHMuaHRtMEAGCCsGAQUFBwICMDQeMiAdAEwAZQBnAGEAbABfAHAAbwBsAGkA
# YwB5AF8AcwB0AGEAdABlAG0AZQBuAHQALiAdMA0GCSqGSIb3DQEBCwUAA4ICAQBn
# 8oalmOBUeRou09h0ZyKbC5YR4WOSmUKWfdJ5DJDBZV8uLD74w3LRbYP+vj/oCso7
# v0epo/Np22O/IjWll11lhJB9i0ZQVdgMknzSGksc8zxCi1LQsP1r4z4HLimb5j0b
# pdS1HXeUOeLpZMlEPXh6I/MTfaaQdION9MsmAkYqwooQu6SpBQyb7Wj6aC6VoCo/
# KmtYSWMfCWluWpiW5IP0wI/zRive/DvQvTXvbiWu5a8n7dDd8w6vmSiXmE0OPQvy
# CInWH8MyGOLwxS3OW560STkKxgrCxq2u5bLZ2xWIUUVYODJxJxp/sfQn+N4sOiBp
# mLJZiWhub6e3dMNABQamASooPoI/E01mC8CzTfXhj38cbxV9Rad25UAqZaPDXVJi
# hsMdYzaXht/a8/jyFqGaJ+HNpZfQ7l1jQeNbB5yHPgZ3BtEGsXUfFL5hYbXw3MYb
# BL7fQccOKO7eZS/sl/ahXJbYANahRr1Z85elCUtIEJmAH9AAKcWxm6U/RXceNcbS
# oqKfenoi+kiVH6v7RyOA9Z74v2u3S5fi63V4GuzqN5l5GEv/1rMjaHXmr/r8i+sL
# gOppO6/8MO0ETI7f33VtY5E90Z1WTk+/gFcioXgRMiF670EKsT/7qMykXcGhiJtX
# cVZOSEXAQsmbdlsKgEhr/Xmfwb1tbWrJUnMTDXpQzTGCFVgwghVUAgEBMIGVMH4x
# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01p
# Y3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTECEzMAAAGIr1LWuZJt6PkAAAAA
# AYgwDQYJYIZIAWUDBAIBBQCgga4wGQYJKoZIhvcNAQkDMQwGCisGAQQBgjcCAQQw
# HAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUwLwYJKoZIhvcNAQkEMSIEIFxZ
# Yezh3liQqiGQuXNa+zYfoSIbLqOpdEn2ZKskBkisMEIGCisGAQQBgjcCAQwxNDAy
# oBSAEgBNAGkAYwByAG8AcwBvAGYAdKEagBhodHRwOi8vd3d3Lm1pY3Jvc29mdC5j
# b20wDQYJKoZIhvcNAQEBBQAEggEAjLUrwCXJCPHZulZuKAQSX+MfnIRFAhlN7ru2
# 6H8rudvhkWgqMISkLb9gFDPR5FhR4sqdYgKW4P0ERao9ypCGi1FWDLqygC2XBbHj
# NEQHBxHJs5SMsMAXNSIcYHqVAvhF3nXoseaNBkhOTrkQ1FS/fW7AfDGRbsiiESzv
# lebf92shZylBFKOsKQLAL0mF/B7xrxHJIj5dgQoD1phATRNHOEQj3jgmkidFWowV
# 4r8MzbxRhAEORbnJexlUoDQJQH3YwxuUyXkTvrYMTKSbGJLlwRaZQbrcBU0k4gCH
# y8Sci+p9Rq+aOTzLCoNrZyh9E7OdwVDm1FJAtY30bV50T2WSFKGCEuIwghLeBgor
# BgEEAYI3AwMBMYISzjCCEsoGCSqGSIb3DQEHAqCCErswghK3AgEDMQ8wDQYJYIZI
# AWUDBAIBBQAwggFRBgsqhkiG9w0BCRABBKCCAUAEggE8MIIBOAIBAQYKKwYBBAGE
# WQoDATAxMA0GCWCGSAFlAwQCAQUABCD7JNcBBSfhlKPL1tN3CEKRKJuT/dZ8RO9K
# orYLXJeLTwIGXvN89YD7GBMyMDIwMDcwMTE0MTYyMC40MDVaMASAAgH0oIHQpIHN
# MIHKMQswCQYDVQQGEwJVUzELMAkGA1UECBMCV0ExEDAOBgNVBAcTB1JlZG1vbmQx
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEtMCsGA1UECxMkTWljcm9z
# b2Z0IElyZWxhbmQgT3BlcmF0aW9ucyBMaW1pdGVkMSYwJAYDVQQLEx1UaGFsZXMg
# VFNTIEVTTjoxNzlFLTRCQjAtODI0NjElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUt
# U3RhbXAgU2VydmljZaCCDjkwggTxMIID2aADAgECAhMzAAABDKp4btzMQkzBAAAA
# AAEMMA0GCSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNo
# aW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29y
# cG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEw
# MB4XDTE5MTAyMzIzMTkxNloXDTIxMDEyMTIzMTkxNlowgcoxCzAJBgNVBAYTAlVT
# MQswCQYDVQQIEwJXQTEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9z
# b2Z0IENvcnBvcmF0aW9uMS0wKwYDVQQLEyRNaWNyb3NvZnQgSXJlbGFuZCBPcGVy
# YXRpb25zIExpbWl0ZWQxJjAkBgNVBAsTHVRoYWxlcyBUU1MgRVNOOjE3OUUtNEJC
# MC04MjQ2MSUwIwYDVQQDExxNaWNyb3NvZnQgVGltZS1TdGFtcCBTZXJ2aWNlMIIB
# IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq5011+XqVJmQKtiw39igeEMv
# CLcZ1forbmxsDkpnCN1SrThKI+n2Pr3zqTzJVgdJFCoKm1ks1gtRJ7HaL6tDkrOw
# 8XJmfJaxyQAluCQ+e40NI+A4w+u59Gy89AVY5lJNrmCva6gozfg1kxw6abV5WWr+
# PjEpNCshO4hxv3UqgMcCKnT2YVSZzF1Gy7APub1fY0P1vNEuOFKrNCEEvWIKRrqs
# eyBB73G8KD2yw6jfz0VKxNSRAdhJV/ghOyrDt5a+L6C3m1rpr8sqiof3iohv3ANI
# gNqw6ex+4+G+B7JMbIHbGpPdebedL6ePbuBCnbgJoDn340k0aw6ij21GvvUnkQID
# AQABo4IBGzCCARcwHQYDVR0OBBYEFAlCOq9DDIa0A0oqgKtM5vjuZeK+MB8GA1Ud
# IwQYMBaAFNVjOlyKMZDzQ3t8RhvFM2hahW1VMFYGA1UdHwRPME0wS6BJoEeGRWh0
# dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kvY3JsL3Byb2R1Y3RzL01pY1RpbVN0
# YVBDQV8yMDEwLTA3LTAxLmNybDBaBggrBgEFBQcBAQROMEwwSgYIKwYBBQUHMAKG
# Pmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9wa2kvY2VydHMvTWljVGltU3RhUENB
# XzIwMTAtMDctMDEuY3J0MAwGA1UdEwEB/wQCMAAwEwYDVR0lBAwwCgYIKwYBBQUH
# AwgwDQYJKoZIhvcNAQELBQADggEBAET3xBg/IZ9zdOfwbDGK7cK3qKYt/qUOlbRB
# zgeNjb32K86nGeRGkBee10dVOEGWUw6KtBeWh1LQ70b64/tLtiLcsf9JzaAyDYb1
# sRmMi5fjRZ753TquaT8V7NJ7RfEuYfvZlubfQD0MVbU4tzsdZdYuxE37V2J9pN89
# j7GoFNtAnSnCn1MRxENAILgt9XzeQzTEDhFYW0N2DNphTkRPXGjpDmwi6WtkJ5fv
# 0iTyB4dwEC+/ed0lGbFLcytJoMwfTNMdH6gcnHlMzsniornGFZa5PPiV78XoZ9Fe
# upKo8ZKNGhLLLB5GTtqfHex5no3ioVSq+NthvhX0I/V+iXJsopowggZxMIIEWaAD
# AgECAgphCYEqAAAAAAACMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzET
# MBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMV
# TWljcm9zb2Z0IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBD
# ZXJ0aWZpY2F0ZSBBdXRob3JpdHkgMjAxMDAeFw0xMDA3MDEyMTM2NTVaFw0yNTA3
# MDEyMTQ2NTVaMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw
# DgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24x
# JjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwMIIBIjANBgkq
# hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqR0NvHcRijog7PwTl/X6f2mUa3RUENWl
# CgCChfvtfGhLLF/Fw+Vhwna3PmYrW/AVUycEMR9BGxqVHc4JE458YTBZsTBED/Fg
# iIRUQwzXTbg4CLNC3ZOs1nMwVyaCo0UN0Or1R4HNvyRgMlhgRvJYR4YyhB50YWeR
# X4FUsc+TTJLBxKZd0WETbijGGvmGgLvfYfxGwScdJGcSchohiq9LZIlQYrFd/Xcf
# PfBXday9ikJNQFHRD5wGPmd/9WbAA5ZEfu/QS/1u5ZrKsajyeioKMfDaTgaRtogI
# Neh4HLDpmc085y9Euqf03GS9pAHBIAmTeM38vMDJRF1eFpwBBU8iTQIDAQABo4IB
# 5jCCAeIwEAYJKwYBBAGCNxUBBAMCAQAwHQYDVR0OBBYEFNVjOlyKMZDzQ3t8RhvF
# M2hahW1VMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1UdDwQEAwIBhjAP
# BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNX2VsuP6KJcYmjRPZSQW9fOmhjE
# MFYGA1UdHwRPME0wS6BJoEeGRWh0dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kv
# Y3JsL3Byb2R1Y3RzL01pY1Jvb0NlckF1dF8yMDEwLTA2LTIzLmNybDBaBggrBgEF
# BQcBAQROMEwwSgYIKwYBBQUHMAKGPmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9w
# a2kvY2VydHMvTWljUm9vQ2VyQXV0XzIwMTAtMDYtMjMuY3J0MIGgBgNVHSABAf8E
# gZUwgZIwgY8GCSsGAQQBgjcuAzCBgTA9BggrBgEFBQcCARYxaHR0cDovL3d3dy5t
# aWNyb3NvZnQuY29tL1BLSS9kb2NzL0NQUy9kZWZhdWx0Lmh0bTBABggrBgEFBQcC
# AjA0HjIgHQBMAGUAZwBhAGwAXwBQAG8AbABpAGMAeQBfAFMAdABhAHQAZQBtAGUA
# bgB0AC4gHTANBgkqhkiG9w0BAQsFAAOCAgEAB+aIUQ3ixuCYP4FxAz2do6Ehb7Pr
# psz1Mb7PBeKp/vpXbRkws8LFZslq3/Xn8Hi9x6ieJeP5vO1rVFcIK1GCRBL7uVOM
# zPRgEop2zEBAQZvcXBf/XPleFzWYJFZLdO9CEMivv3/Gf/I3fVo/HPKZeUqRUgCv
# OA8X9S95gWXZqbVr5MfO9sp6AG9LMEQkIjzP7QOllo9ZKby2/QThcJ8ySif9Va8v
# /rbljjO7Yl+a21dA6fHOmWaQjP9qYn/dxUoLkSbiOewZSnFjnXshbcOco6I8+n99
# lmqQeKZt0uGc+R38ONiU9MalCpaGpL2eGq4EQoO4tYCbIjggtSXlZOz39L9+Y1kl
# D3ouOVd2onGqBooPiRa6YacRy5rYDkeagMXQzafQ732D8OE7cQnfXXSYIghh2rBQ
# Hm+98eEA3+cxB6STOvdlR3jo+KhIq/fecn5ha293qYHLpwmsObvsxsvYgrRyzR30
# uIUBHoD7G4kqVDmyW9rIDVWZeodzOwjmmC3qjeAzLhIp9cAvVCch98isTtoouLGp
# 25ayp0Kiyc8ZQU3ghvkqmqMRZjDTu3QyS99je/WZii8bxyGvWbWu3EQ8l1Bx16HS
# xVXjad5XwdHeMMD9zOZN+w2/XU/pnR4ZOC+8z1gFLu8NoFA12u8JJxzVs341Hgi6
# 2jbb01+P3nSISRKhggLLMIICNAIBATCB+KGB0KSBzTCByjELMAkGA1UEBhMCVVMx
# CzAJBgNVBAgTAldBMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
# ZnQgQ29ycG9yYXRpb24xLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
# dGlvbnMgTGltaXRlZDEmMCQGA1UECxMdVGhhbGVzIFRTUyBFU046MTc5RS00QkIw
# LTgyNDYxJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1wIFNlcnZpY2WiIwoB
# ATAHBgUrDgMCGgMVAMsg9FQ9pgPLXI2Ld5z7xDS0QAZ9oIGDMIGApH4wfDELMAkG
# A1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQx
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9z
# b2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAwDQYJKoZIhvcNAQEFBQACBQDipo0MMCIY
# DzIwMjAwNzAxMTIxODIwWhgPMjAyMDA3MDIxMjE4MjBaMHQwOgYKKwYBBAGEWQoE
# ATEsMCowCgIFAOKmjQwCAQAwBwIBAAICE70wBwIBAAICEeIwCgIFAOKn3owCAQAw
# NgYKKwYBBAGEWQoEAjEoMCYwDAYKKwYBBAGEWQoDAqAKMAgCAQACAwehIKEKMAgC
# AQACAwGGoDANBgkqhkiG9w0BAQUFAAOBgQCOPjlHOH8nYtgt2XnpKXenxPUR03ED
# xPBm8XR5Z1vIq53RU9jG6yYcYNTdK+q38SGZtu0W/SgagTfKCQhjhRakuv7rGSs2
# dlhx9LGCoc/q1vqmZpRSjkqWVcc/NzmldUWIWnLlV6rmLGoDmfCH5BcsiU6Eo6wU
# iUVwnnXoqsCaBzGCAw0wggMJAgEBMIGTMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQI
# EwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
# ZnQgQ29ycG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBD
# QSAyMDEwAhMzAAABDKp4btzMQkzBAAAAAAEMMA0GCWCGSAFlAwQCAQUAoIIBSjAa
# BgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwLwYJKoZIhvcNAQkEMSIEIDpwhjyu
# zgu3Kmxpnpz86ZlthBqEzG5vaEMOkYRyuFCaMIH6BgsqhkiG9w0BCRACLzGB6jCB
# 5zCB5DCBvQQgg5AWKX7M1+m2//+V7qmRvt1K/ww5Muu8XzGJBqygVCkwgZgwgYCk
# fjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYDVQQD
# Ex1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMAITMwAAAQyqeG7czEJMwQAA
# AAABDDAiBCD11urvv5vgo4gFVQ2NMVrzgxT87Yuiq16YdswYbaYeITANBgkqhkiG
# 9w0BAQsFAASCAQAi3q8hwcT2ft4b2EleaiyZxOImV/cKusmth1dtCh5/Jb0GbOld
# f5cSalrjf42MNPodWAtgmWozkYrQF6HxnsOiYiamfRA8E3E7xyRMy7AFfAhjcwMi
# xaW4Iye6E1Ec6LtULANxfDtG/KIdCWdZxKqOezL3nzFNQWmm1mXPV+UnKpnJkA3E
# DsQOUWk8J6ojDurhrP536WI+3arg8PcnppHBLd/xNKYdlsTb+6qndgzKXkDDt1CV
# 4zCyuZ7bO8eyZAmNoSZz22k7vus9UjBz/CDhXylo20N43nr29rWPItUgH4uvOGQn
# t26Y/yjBaQImz32psrfJEMbQ7cl789s8WOx8
# SIG # End signature block

View File

@@ -1,7 +1,6 @@
#!/bin/bash
SVC_NAME="{{SvcNameVar}}"
SVC_NAME=${SVC_NAME// /_}
SVC_DESCRIPTION="{{SvcDescription}}"
user_id=`id -u`

View File

@@ -70,8 +70,8 @@ then
exit 1
fi
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt install -y libicu66 || apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
if [ $? -ne 0 ]
then
echo "'apt' failed with exit code '$?'"
@@ -99,8 +99,8 @@ then
exit 1
fi
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt-get install -y libicu66 || apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
if [ $? -ne 0 ]
then
echo "'apt-get' failed with exit code '$?'"

View File

@@ -1,7 +1,6 @@
#!/bin/bash
SVC_NAME="{{SvcNameVar}}"
SVC_NAME=${SVC_NAME// /_}
SVC_DESCRIPTION="{{SvcDescription}}"
SVC_CMD=$1
@@ -63,25 +62,12 @@ function install()
sed "s/{{User}}/${run_as_user}/g; s/{{Description}}/$(echo ${SVC_DESCRIPTION} | sed -e 's/[\/&]/\\&/g')/g; s/{{RunnerRoot}}/$(echo ${RUNNER_ROOT} | sed -e 's/[\/&]/\\&/g')/g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
mv "${TEMP_PATH}" "${UNIT_PATH}" || failed "failed to copy unit file"
# Recent Fedora based Linux (CentOS/Redhat) has SELinux enabled by default
# We need to restore security context on the unit file we added otherwise SystemD have no access to it.
command -v getenforce > /dev/null
if [ $? -eq 0 ]
then
selinuxEnabled=$(getenforce)
if [[ $selinuxEnabled == "Enforcing" ]]
then
# SELinux is enabled, we will need to Restore SELinux Context for the service file
restorecon -r -v "${UNIT_PATH}" || failed "failed to restore SELinux context on ${UNIT_PATH}"
fi
fi
# unit file should not be executable and world writable
chmod 664 "${UNIT_PATH}" || failed "failed to set permissions on ${UNIT_PATH}"
chmod 664 ${UNIT_PATH} || failed "failed to set permissions on ${UNIT_PATH}"
systemctl daemon-reload || failed "failed to reload daemons"
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
cp ./bin/runsvc.sh ./runsvc.sh || failed "failed to copy runsvc.sh"
chown ${run_as_uid}:${run_as_gid} ./runsvc.sh || failed "failed to set owner for runsvc.sh"
chmod 755 ./runsvc.sh || failed "failed to set permission for runsvc.sh"

View File

@@ -108,9 +108,9 @@ namespace GitHub.Runner.Common
CredentialData GetMigratedCredentials();
RunnerSettings GetSettings();
void SaveCredential(CredentialData credential);
void SaveMigratedCredential(CredentialData credential);
void SaveSettings(RunnerSettings settings);
void DeleteCredential();
void DeleteMigratedCredential();
void DeleteSettings();
}
@@ -232,6 +232,21 @@ namespace GitHub.Runner.Common
File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden);
}
public void SaveMigratedCredential(CredentialData credential)
{
Trace.Info("Saving {0} migrated credential @ {1}", credential.Scheme, _migratedCredFilePath);
if (File.Exists(_migratedCredFilePath))
{
// Delete existing credential file first, since the file is hidden and not able to overwrite.
Trace.Info("Delete exist runner migrated credential file.");
IOUtil.DeleteFile(_migratedCredFilePath);
}
IOUtil.SaveObject(credential, _migratedCredFilePath);
Trace.Info("Migrated Credentials Saved.");
File.SetAttributes(_migratedCredFilePath, File.GetAttributes(_migratedCredFilePath) | FileAttributes.Hidden);
}
public void SaveSettings(RunnerSettings settings)
{
Trace.Info("Saving runner settings.");
@@ -253,11 +268,6 @@ namespace GitHub.Runner.Common
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
}
public void DeleteMigratedCredential()
{
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
}
public void DeleteSettings()
{
IOUtil.Delete(_configFilePath, default(CancellationToken));

View File

@@ -22,7 +22,6 @@ namespace GitHub.Runner.Common
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken);
}
public sealed class JobServer : RunnerService, IJobServer
@@ -114,14 +113,5 @@ namespace GitHub.Runner.Common
CheckConnection();
return _taskClient.GetTimelineAsync(scopeIdentifier, hubName, planId, timelineId, includeRecords: true, cancellationToken: cancellationToken);
}
//-----------------------------------------------------------------
// Action download info
//-----------------------------------------------------------------
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken)
{
CheckConnection();
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, actions, cancellationToken: cancellationToken);
}
}
}

View File

@@ -50,6 +50,10 @@ namespace GitHub.Runner.Common
// agent update
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState);
// runner authorization url
Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId);
Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error);
}
public sealed class RunnerServer : RunnerService, IRunnerServer

View File

@@ -96,14 +96,13 @@ namespace GitHub.Runner.Common
Trace.Info($"WRITE: {message}");
if (!Silent)
{
if (colorCode != null)
if(colorCode != null)
{
Console.ForegroundColor = colorCode.Value;
Console.Write(message);
Console.ResetColor();
}
else
{
else {
Console.Write(message);
}
}
@@ -121,14 +120,13 @@ namespace GitHub.Runner.Common
Trace.Info($"WRITE LINE: {line}");
if (!Silent)
{
if (colorCode != null)
if(colorCode != null)
{
Console.ForegroundColor = colorCode.Value;
Console.WriteLine(line);
Console.ResetColor();
}
else
{
else {
Console.WriteLine(line);
}
}

View File

@@ -92,11 +92,9 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteSection("Authentication");
while (true)
{
// When testing against a dev deployment of Actions Service, set this environment variable
var useDevActionsServiceUrl = Environment.GetEnvironmentVariable("USE_DEV_ACTIONS_SERVICE_URL");
// Get the URL
var inputUrl = command.GetUrl();
if (inputUrl.Contains("codedev.ms", StringComparison.OrdinalIgnoreCase)
|| useDevActionsServiceUrl != null)
if (inputUrl.Contains("codedev.ms", StringComparison.OrdinalIgnoreCase))
{
runnerSettings.ServerUrl = inputUrl;
// Get the credentials
@@ -119,19 +117,6 @@ namespace GitHub.Runner.Listener.Configuration
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
// Warn if the Actions server url and GHES server url has different Host
if (!runnerSettings.IsHostedServer)
{
// Example actionsServerUrl is https://my-ghes/_services/pipelines/[...]
// Example githubServerUrl is https://my-ghes
var actionsServerUrl = new Uri(runnerSettings.ServerUrl);
var githubServerUrl = new Uri(runnerSettings.GitHubUrl);
if (!string.Equals(actionsServerUrl.Authority, githubServerUrl.Authority, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"GitHub Actions is not properly configured in GHES. GHES url: {runnerSettings.GitHubUrl}, Actions url: {runnerSettings.ServerUrl}.");
}
}
// Validate can connect.
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), creds);
@@ -210,7 +195,7 @@ namespace GitHub.Runner.Listener.Configuration
else if (command.Unattended)
{
// if not replace and it is unattended config.
throw new TaskAgentExistsException($"A runner exists with the same name {runnerSettings.AgentName}.");
throw new TaskAgentExistsException($"Pool {runnerSettings.PoolId} already contains a runner with name {runnerSettings.AgentName}.");
}
}
else
@@ -234,11 +219,36 @@ namespace GitHub.Runner.Listener.Configuration
// Add Agent Id to settings
runnerSettings.AgentId = agent.Id;
// respect the serverUrl resolve by server.
// in case of agent configured using collection url instead of account url.
string agentServerUrl;
if (agent.Properties.TryGetValidatedValue<string>("ServerUrl", out agentServerUrl) &&
!string.IsNullOrEmpty(agentServerUrl))
{
Trace.Info($"Agent server url resolve by server: '{agentServerUrl}'.");
// we need make sure the Schema/Host/Port component of the url remain the same.
UriBuilder inputServerUrl = new UriBuilder(runnerSettings.ServerUrl);
UriBuilder serverReturnedServerUrl = new UriBuilder(agentServerUrl);
if (Uri.Compare(inputServerUrl.Uri, serverReturnedServerUrl.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0)
{
inputServerUrl.Path = serverReturnedServerUrl.Path;
Trace.Info($"Replace server returned url's scheme://host:port component with user input server url's scheme://host:port: '{inputServerUrl.Uri.AbsoluteUri}'.");
runnerSettings.ServerUrl = inputServerUrl.Uri.AbsoluteUri;
}
else
{
runnerSettings.ServerUrl = agentServerUrl;
}
}
// See if the server supports our OAuth key exchange for credentials
if (agent.Authorization != null &&
agent.Authorization.ClientId != Guid.Empty &&
agent.Authorization.AuthorizationUrl != null)
{
UriBuilder configServerUrl = new UriBuilder(runnerSettings.ServerUrl);
UriBuilder oauthEndpointUrlBuilder = new UriBuilder(agent.Authorization.AuthorizationUrl);
var credentialData = new CredentialData
{
Scheme = Constants.Configuration.OAuth,
@@ -246,6 +256,7 @@ namespace GitHub.Runner.Listener.Configuration
{
{ "clientId", agent.Authorization.ClientId.ToString("D") },
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
{ "oauthEndpointUrl", oauthEndpointUrlBuilder.Uri.AbsoluteUri },
},
};
@@ -451,7 +462,7 @@ namespace GitHub.Runner.Listener.Configuration
// update should replace the existing labels
agent.Version = BuildConstants.RunnerPackage.Version;
agent.OSDescription = RuntimeInformation.OSDescription;
agent.Labels.Clear();
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
@@ -462,7 +473,7 @@ namespace GitHub.Runner.Listener.Configuration
{
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
}
return agent;
}

View File

@@ -13,7 +13,7 @@ namespace GitHub.Runner.Listener.Configuration
public interface ICredentialManager : IRunnerService
{
ICredentialProvider GetCredentialProvider(string credType);
VssCredentials LoadCredentials();
VssCredentials LoadCredentials(bool preferMigrated = true);
}
public class CredentialManager : RunnerService, ICredentialManager
@@ -40,7 +40,7 @@ namespace GitHub.Runner.Listener.Configuration
return creds;
}
public VssCredentials LoadCredentials()
public VssCredentials LoadCredentials(bool preferMigrated = true)
{
IConfigurationStore store = HostContext.GetService<IConfigurationStore>();
@@ -50,16 +50,14 @@ namespace GitHub.Runner.Listener.Configuration
}
CredentialData credData = store.GetCredentials();
var migratedCred = store.GetMigratedCredentials();
if (migratedCred != null)
if (preferMigrated)
{
credData = migratedCred;
// Re-write .credentials with Token URL
store.SaveCredential(credData);
// Delete .credentials_migrated
store.DeleteMigratedCredential();
var migratedCred = store.GetMigratedCredentials();
if (migratedCred != null)
{
credData = migratedCred;
}
}
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);

View File

@@ -13,7 +13,10 @@ using System.Diagnostics;
using System.Runtime.InteropServices;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Services.WebApi;
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]
namespace GitHub.Runner.Listener
{
[ServiceLocator(Default = typeof(MessageListener))]
@@ -32,18 +35,30 @@ namespace GitHub.Runner.Listener
private ITerminal _term;
private IRunnerServer _runnerServer;
private TaskAgentSession _session;
private ICredentialManager _credMgr;
private IConfigurationStore _configStore;
private TimeSpan _getNextMessageRetryInterval;
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new Dictionary<string, int>();
// Whether load credentials from .credentials_migrated file
internal bool _useMigratedCredentials;
// need to check auth url if there is only .credentials and auth schema is OAuth
internal bool _needToCheckAuthorizationUrlUpdate;
internal Task<VssCredentials> _authorizationUrlMigrationBackgroundTask;
internal Task _authorizationUrlRollbackReattemptDelayBackgroundTask;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = HostContext.GetService<ITerminal>();
_runnerServer = HostContext.GetService<IRunnerServer>();
_credMgr = HostContext.GetService<ICredentialManager>();
_configStore = HostContext.GetService<IConfigurationStore>();
}
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
@@ -58,8 +73,8 @@ namespace GitHub.Runner.Listener
// Create connection.
Trace.Info("Loading Credentials");
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials creds = credMgr.LoadCredentials();
_useMigratedCredentials = !StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_SPSAUTHURL"));
VssCredentials creds = _credMgr.LoadCredentials(_useMigratedCredentials);
var agent = new TaskAgentReference
{
@@ -74,6 +89,17 @@ namespace GitHub.Runner.Listener
string errorMessage = string.Empty;
bool encounteringError = false;
var originalCreds = _configStore.GetCredentials();
var migratedCreds = _configStore.GetMigratedCredentials();
if (migratedCreds == null)
{
_useMigratedCredentials = false;
if (originalCreds.Scheme == Constants.Configuration.OAuth)
{
_needToCheckAuthorizationUrlUpdate = true;
}
}
while (true)
{
token.ThrowIfCancellationRequested();
@@ -101,6 +127,12 @@ namespace GitHub.Runner.Listener
encounteringError = false;
}
if (_needToCheckAuthorizationUrlUpdate)
{
// start background task try to get new authorization url
_authorizationUrlMigrationBackgroundTask = GetNewOAuthAuthorizationSetting(token);
}
return true;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
@@ -132,10 +164,44 @@ namespace GitHub.Runner.Listener
}
}
if (ex is TaskAgentSessionConflictException)
{
try
{
var newCred = await GetNewOAuthAuthorizationSetting(token, true);
if (newCred != null)
{
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), newCred);
Trace.Info("Updated connection to use migrated credential for next CreateSession call.");
_useMigratedCredentials = true;
_authorizationUrlMigrationBackgroundTask = null;
_needToCheckAuthorizationUrlUpdate = false;
}
}
catch (Exception e)
{
Trace.Error("Fail to refresh connection with new authorization url.");
Trace.Error(e);
}
}
if (!IsSessionCreationExceptionRetriable(ex))
{
_term.WriteError($"Failed to create session. {ex.Message}");
return false;
if (_useMigratedCredentials && !(ex is TaskAgentSessionConflictException))
{
// migrated credentials might cause lose permission during permission check,
// we will force to use original credential and try again
_useMigratedCredentials = false;
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
creds = _credMgr.LoadCredentials(false);
Trace.Error("Fallback to original credentials and try again.");
}
else
{
_term.WriteError($"Failed to create session. {ex.Message}");
return false;
}
}
if (!encounteringError) //print the message only on the first error
@@ -196,6 +262,51 @@ namespace GitHub.Runner.Listener
encounteringError = false;
continuousError = 0;
}
if (_needToCheckAuthorizationUrlUpdate &&
_authorizationUrlMigrationBackgroundTask?.IsCompleted == true)
{
if (HostContext.GetService<IJobDispatcher>().Busy ||
HostContext.GetService<ISelfUpdater>().Busy)
{
Trace.Info("Job or runner updates in progress, update credentials next time.");
}
else
{
try
{
var newCred = await _authorizationUrlMigrationBackgroundTask;
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), newCred);
Trace.Info("Updated connection to use migrated credential for next GetMessage call.");
_useMigratedCredentials = true;
_authorizationUrlMigrationBackgroundTask = null;
_needToCheckAuthorizationUrlUpdate = false;
}
catch (Exception ex)
{
Trace.Error("Fail to refresh connection with new authorization url.");
Trace.Error(ex);
}
}
}
if (_authorizationUrlRollbackReattemptDelayBackgroundTask?.IsCompleted == true)
{
try
{
// we rolled back to use original creds about 2 days before, now it's a good time to try migrated creds again.
Trace.Info("Re-attempt to use migrated credential");
var migratedCreds = _credMgr.LoadCredentials();
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedCreds);
_useMigratedCredentials = true;
_authorizationUrlRollbackReattemptDelayBackgroundTask = null;
}
catch (Exception ex)
{
Trace.Error("Fail to refresh connection with new authorization url on rollback reattempt.");
Trace.Error(ex);
}
}
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
@@ -219,7 +330,21 @@ namespace GitHub.Runner.Listener
}
else if (!IsGetNextMessageExceptionRetriable(ex))
{
throw;
if (_useMigratedCredentials)
{
// migrated credentials might cause lose permission during permission check,
// we will force to use original credential and try again
_useMigratedCredentials = false;
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
var originalCreds = _credMgr.LoadCredentials(false);
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), originalCreds);
Trace.Error("Fallback to original credentials and try again.");
}
else
{
throw;
}
}
else
{
@@ -411,5 +536,94 @@ namespace GitHub.Runner.Listener
return true;
}
}
private async Task<VssCredentials> GetNewOAuthAuthorizationSetting(CancellationToken token, bool adhoc = false)
{
Trace.Info("Start checking oauth authorization url update.");
while (true)
{
try
{
var migratedAuthorizationUrl = await _runnerServer.GetRunnerAuthUrlAsync(_settings.PoolId, _settings.AgentId);
if (!string.IsNullOrEmpty(migratedAuthorizationUrl))
{
var credData = _configStore.GetCredentials();
var clientId = credData.Data.GetValueOrDefault("clientId", null);
var currentAuthorizationUrl = credData.Data.GetValueOrDefault("authorizationUrl", null);
Trace.Info($"Current authorization url: {currentAuthorizationUrl}, new authorization url: {migratedAuthorizationUrl}");
if (string.Equals(currentAuthorizationUrl, migratedAuthorizationUrl, StringComparison.OrdinalIgnoreCase))
{
// We don't need to update credentials.
Trace.Info("No needs to update authorization url");
if (adhoc)
{
return null;
}
else
{
await Task.Delay(TimeSpan.FromMilliseconds(-1), token);
}
}
var keyManager = HostContext.GetService<IRSAKeyManager>();
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
var migratedClientCredential = new VssOAuthJwtBearerClientCredential(clientId, migratedAuthorizationUrl, signingCredentials);
var migratedRunnerCredential = new VssOAuthCredential(new Uri(migratedAuthorizationUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, migratedClientCredential);
Trace.Info("Try connect service with Token Service OAuth endpoint.");
var runnerServer = HostContext.CreateService<IRunnerServer>();
await runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedRunnerCredential);
await runnerServer.GetAgentPoolsAsync();
Trace.Info($"Successfully connected service with new authorization url.");
var migratedCredData = new CredentialData
{
Scheme = Constants.Configuration.OAuth,
Data =
{
{ "clientId", clientId },
{ "authorizationUrl", migratedAuthorizationUrl },
{ "oauthEndpointUrl", migratedAuthorizationUrl },
},
};
_configStore.SaveMigratedCredential(migratedCredData);
return migratedRunnerCredential;
}
else
{
Trace.Verbose("No authorization url updates");
}
}
catch (Exception ex) when (!token.IsCancellationRequested)
{
Trace.Error("Fail to get/test new authorization url.");
Trace.Error(ex);
try
{
await _runnerServer.ReportRunnerAuthUrlErrorAsync(_settings.PoolId, _settings.AgentId, ex.ToString());
}
catch (Exception e)
{
// best effort
Trace.Error("Fail to report the migration error");
Trace.Error(e);
}
}
if (adhoc)
{
return null;
}
else
{
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromMinutes(30), TimeSpan.FromMinutes(45));
await HostContext.Delay(backoff, token);
}
}
}
}
}

View File

@@ -102,9 +102,7 @@ namespace GitHub.Runner.Listener
IRunner runner = context.GetService<IRunner>();
try
{
var returnCode = await runner.ExecuteCommand(command);
trace.Info($"Runner execution has finished with return code {returnCode}");
return returnCode;
return await runner.ExecuteCommand(command);
}
catch (OperationCanceledException) when (context.RunnerShutdownToken.IsCancellationRequested)
{

View File

@@ -82,7 +82,7 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
executionContext.Output($"Syncing repository: {repoFullName}");
// Repository URL
var githubUrl = executionContext.GetGitHubContext("server_url");
var githubUrl = executionContext.GetGitHubContext("url");
var githubUri = new Uri(!string.IsNullOrEmpty(githubUrl) ? githubUrl : "https://github.com");
var portInfo = githubUri.IsDefaultPort ? string.Empty : $":{githubUri.Port}";
Uri repositoryUrl = new Uri($"{githubUri.Scheme}://{githubUri.Host}{portInfo}/{repoFullName}");

View File

@@ -318,12 +318,7 @@ namespace GitHub.Runner.Sdk
}
}
var cancellationFinished = new TaskCompletionSource<bool>();
using (var registration = cancellationToken.Register(async () =>
{
await CancelAndKillProcessTree(killProcessOnCancel);
cancellationFinished.TrySetResult(true);
}))
using (var registration = cancellationToken.Register(async () => await CancelAndKillProcessTree(killProcessOnCancel)))
{
Trace.Info($"Process started with process id {_proc.Id}, waiting for process exit.");
while (true)
@@ -346,13 +341,6 @@ namespace GitHub.Runner.Sdk
// data buffers one last time before returning
ProcessOutput();
if (cancellationToken.IsCancellationRequested)
{
// Ensure cancellation also finish on the cancellationToken.Register thread.
await cancellationFinished.Task;
Trace.Info($"Process Cancellation finished.");
}
Trace.Info($"Finished process {_proc.Id} with exit code {_proc.ExitCode}, and elapsed time {_stopWatch.Elapsed}.");
}

View File

@@ -11,11 +11,6 @@ namespace GitHub.Runner.Sdk
{
ArgUtil.NotNullOrEmpty(command, nameof(command));
trace?.Info($"Which: '{command}'");
if (Path.IsPathFullyQualified(command) && File.Exists(command))
{
trace?.Info($"Fully qualified path: '{command}'");
return command;
}
string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
if (string.IsNullOrEmpty(path))
{

View File

@@ -14,7 +14,6 @@ using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container;
using GitHub.Services.Common;
using WebApi = GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
@@ -74,11 +73,6 @@ namespace GitHub.Runner.Worker
// Clear the cache (for self-hosted runners)
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
var newActionMetadata = executionContext.Variables.GetBoolean("DistributedTask.NewActionMetadata") ?? false;
var repositoryActions = new List<Pipelines.ActionStep>();
foreach (var action in actions)
{
if (action.Reference.Type == Pipelines.ActionSourceType.ContainerRegistry)
@@ -96,8 +90,7 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'");
imagesToPull[containerReference.Image].Add(action.Id);
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && !newActionMetadata)
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository)
{
// only download the repository archive
await DownloadRepositoryActionAsync(executionContext, action);
@@ -131,81 +124,6 @@ namespace GitHub.Runner.Worker
}
}
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
{
var definition = LoadAction(executionContext, action);
if (definition.Data.Execution.HasPre)
{
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = action;
actionRunner.Stage = ActionRunStage.Pre;
actionRunner.Condition = definition.Data.Execution.InitCondition;
Trace.Info($"Add 'pre' execution for {action.Id}");
preStepTracker[action.Id] = actionRunner;
}
}
}
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && newActionMetadata)
{
repositoryActions.Add(action);
}
}
if (repositoryActions.Count > 0)
{
// Get the download info
var downloadInfos = await GetDownloadInfoAsync(executionContext, repositoryActions);
// Download each action
foreach (var action in repositoryActions)
{
var lookupKey = GetDownloadInfoLookupKey(action);
if (string.IsNullOrEmpty(lookupKey))
{
continue;
}
if (!downloadInfos.TryGetValue(lookupKey, out var downloadInfo))
{
throw new Exception($"Missing download info for {lookupKey}");
}
await DownloadRepositoryActionAsync(executionContext, downloadInfo);
}
// More preparation based on content in the repository (action.yml)
foreach (var action in repositoryActions)
{
var setupInfo = PrepareRepositoryActionAsync(executionContext, action);
if (setupInfo != null)
{
if (!string.IsNullOrEmpty(setupInfo.Image))
{
if (!imagesToPull.ContainsKey(setupInfo.Image))
{
imagesToPull[setupInfo.Image] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to pull image '{setupInfo.Image}'");
imagesToPull[setupInfo.Image].Add(action.Id);
}
else
{
ArgUtil.NotNullOrEmpty(setupInfo.ActionRepository, nameof(setupInfo.ActionRepository));
if (!imagesToBuild.ContainsKey(setupInfo.ActionRepository))
{
imagesToBuild[setupInfo.ActionRepository] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to build image '{setupInfo.Dockerfile}'");
imagesToBuild[setupInfo.ActionRepository].Add(action.Id);
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
}
}
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
{
@@ -395,12 +313,6 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}.");
}
}
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
var compositeAction = definition.Data.Execution as CompositeActionExecutionData;
Trace.Info($"Load {compositeAction.Steps.Count} action steps.");
Trace.Verbose($"Details: {StringUtil.ConvertToJson(compositeAction.Steps)}");
}
else
{
throw new NotSupportedException(definition.Data.Execution.ExecutionType.ToString());
@@ -552,80 +464,6 @@ namespace GitHub.Runner.Worker
}
}
// This implementation is temporary and will be replaced with a REST API call to the service to resolve
private async Task<IDictionary<string, WebApi.ActionDownloadInfo>> GetDownloadInfoAsync(IExecutionContext executionContext, List<Pipelines.ActionStep> actions)
{
executionContext.Output("Getting action download info");
// Convert to action reference
var actionReferences = actions
.GroupBy(x => GetDownloadInfoLookupKey(x))
.Where(x => !string.IsNullOrEmpty(x.Key))
.Select(x =>
{
var action = x.First();
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
return new WebApi.ActionReference
{
NameWithOwner = repositoryReference.Name,
Ref = repositoryReference.Ref,
};
})
.ToList();
// Nothing to resolve?
if (actionReferences.Count == 0)
{
return new Dictionary<string, WebApi.ActionDownloadInfo>();
}
// Resolve download info
var jobServer = HostContext.GetService<IJobServer>();
var actionDownloadInfos = default(WebApi.ActionDownloadInfoCollection);
for (var attempt = 1; attempt <= 3; attempt++)
{
try
{
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Plan.ScopeIdentifier, executionContext.Plan.PlanType, executionContext.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
break;
}
catch (Exception ex) when (attempt < 3)
{
executionContext.Output($"Failed to resolve action download info. Error: {ex.Message}");
executionContext.Debug(ex.ToString());
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
{
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
executionContext.Output($"Retrying in {backoff.TotalSeconds} seconds");
await Task.Delay(backoff);
}
}
}
ArgUtil.NotNull(actionDownloadInfos, nameof(actionDownloadInfos));
ArgUtil.NotNull(actionDownloadInfos.Actions, nameof(actionDownloadInfos.Actions));
var apiUrl = GetApiUrl(executionContext);
var defaultAccessToken = executionContext.GetGitHubContext("token");
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
foreach (var actionDownloadInfo in actionDownloadInfos.Actions.Values)
{
// Add secret
HostContext.SecretMasker.AddValue(actionDownloadInfo.Authentication?.Token);
// Default auth token
if (string.IsNullOrEmpty(actionDownloadInfo.Authentication?.Token))
{
actionDownloadInfo.Authentication = new WebApi.ActionDownloadAuthentication { Token = defaultAccessToken };
}
}
return actionDownloadInfos.Actions;
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
{
Trace.Entering();
@@ -669,8 +507,9 @@ namespace GitHub.Runner.Worker
{
string apiUrl = GetApiUrl(executionContext);
string archiveLink = BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref);
Trace.Info($"Download archive '{archiveLink}' to '{destDirectory}'.");
var downloadDetails = new ActionDownloadDetails(archiveLink, ConfigureAuthorizationFromContext);
await DownloadRepositoryActionAsync(executionContext, downloadDetails, null, destDirectory);
await DownloadRepositoryActionAsync(executionContext, downloadDetails, destDirectory);
return;
}
else
@@ -694,9 +533,10 @@ namespace GitHub.Runner.Worker
foreach (var downloadAttempt in downloadAttempts)
{
Trace.Info($"Download archive '{downloadAttempt.ArchiveLink}' to '{destDirectory}'.");
try
{
await DownloadRepositoryActionAsync(executionContext, downloadAttempt, null, destDirectory);
await DownloadRepositoryActionAsync(executionContext, downloadAttempt, destDirectory);
return;
}
catch (ActionNotFoundException)
@@ -709,32 +549,6 @@ namespace GitHub.Runner.Worker
}
}
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo)
{
Trace.Entering();
ArgUtil.NotNull(executionContext, nameof(executionContext));
ArgUtil.NotNull(downloadInfo, nameof(downloadInfo));
ArgUtil.NotNullOrEmpty(downloadInfo.NameWithOwner, nameof(downloadInfo.NameWithOwner));
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.Ref));
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), downloadInfo.NameWithOwner.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), downloadInfo.Ref);
string watermarkFile = GetWatermarkFilePath(destDirectory);
if (File.Exists(watermarkFile))
{
executionContext.Debug($"Action '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' already downloaded at '{destDirectory}'.");
return;
}
else
{
// make sure we get a clean folder ready to use.
IOUtil.DeleteDirectory(destDirectory, executionContext.CancellationToken);
Directory.CreateDirectory(destDirectory);
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}'");
}
await DownloadRepositoryActionAsync(executionContext, null, downloadInfo, destDirectory);
}
private string GetApiUrl(IExecutionContext executionContext)
{
string apiUrl = executionContext.GetGitHubContext("api_url");
@@ -755,8 +569,7 @@ namespace GitHub.Runner.Worker
#endif
}
// todo: Remove the parameter "actionDownloadDetails" when feature flag DistributedTask.NewActionMetadata is removed
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, ActionDownloadDetails actionDownloadDetails, WebApi.ActionDownloadInfo downloadInfo, string destDirectory)
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, ActionDownloadDetails actionDownloadDetails, string destDirectory)
{
//download and extract action in a temp folder and rename it on success
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
@@ -764,12 +577,11 @@ namespace GitHub.Runner.Worker
#if OS_WINDOWS
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.zip");
string link = downloadInfo?.ZipballUrl ?? actionDownloadDetails.ArchiveLink;
#else
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.tar.gz");
string link = downloadInfo?.TarballUrl ?? actionDownloadDetails.ArchiveLink;
#endif
string link = actionDownloadDetails.ArchiveLink;
Trace.Info($"Save archive '{link}' into {archiveFile}.");
try
{
@@ -789,16 +601,7 @@ namespace GitHub.Runner.Worker
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
// Legacy
if (downloadInfo == null)
{
actionDownloadDetails.ConfigureAuthorization(executionContext, httpClient);
}
// FF DistributedTask.NewActionMetadata
else
{
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadInfo.Authentication?.Token);
}
actionDownloadDetails.ConfigureAuthorization(executionContext, httpClient);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
using (var response = await httpClient.GetAsync(link))
@@ -938,7 +741,6 @@ namespace GitHub.Runner.Worker
}
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private void ConfigureAuthorizationFromContext(IExecutionContext executionContext, HttpClient httpClient)
{
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
@@ -1044,11 +846,6 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action plugin: {(actionDefinitionData.Execution as PluginActionExecutionData).Plugin}, no more preparation.");
return null;
}
else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
Trace.Info($"Action composite: {(actionDefinitionData.Execution as CompositeActionExecutionData).Steps}, no more preparation.");
return null;
}
else
{
throw new NotSupportedException(actionDefinitionData.Execution.ExecutionType.ToString());
@@ -1075,51 +872,6 @@ namespace GitHub.Runner.Worker
}
}
private static string GetDownloadInfoLookupKey(Pipelines.ActionStep action)
{
if (action.Reference.Type != Pipelines.ActionSourceType.Repository)
{
return null;
}
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
if (string.Equals(repositoryReference.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (!string.Equals(repositoryReference.RepositoryType, Pipelines.RepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase))
{
throw new NotSupportedException(repositoryReference.RepositoryType);
}
ArgUtil.NotNullOrEmpty(repositoryReference.Name, nameof(repositoryReference.Name));
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
}
private static string GetDownloadInfoLookupKey(WebApi.ActionDownloadInfo info)
{
ArgUtil.NotNullOrEmpty(info.NameWithOwner, nameof(info.NameWithOwner));
ArgUtil.NotNullOrEmpty(info.Ref, nameof(info.Ref));
return $"{info.NameWithOwner}@{info.Ref}";
}
private AuthenticationHeaderValue CreateAuthHeader(string token)
{
if (string.IsNullOrEmpty(token))
{
return null;
}
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
HostContext.SecretMasker.AddValue(base64EncodingToken);
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private class ActionDownloadDetails
{
public string ArchiveLink { get; }
@@ -1159,7 +911,6 @@ namespace GitHub.Runner.Worker
NodeJS,
Plugin,
Script,
Composite,
}
public sealed class ContainerActionExecutionData : ActionExecutionData
@@ -1216,14 +967,6 @@ namespace GitHub.Runner.Worker
public override bool HasPost => false;
}
public sealed class CompositeActionExecutionData : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Composite;
public override bool HasPre => false;
public override bool HasPost => false;
public List<Pipelines.ActionStep> Steps { get; set; }
}
public abstract class ActionExecutionData
{
private string _initCondition = $"{Constants.Expressions.Always}()";

View File

@@ -14,7 +14,6 @@ using YamlDotNet.Core;
using YamlDotNet.Core.Events;
using System.Globalization;
using System.Linq;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
@@ -33,6 +32,8 @@ namespace GitHub.Runner.Worker
public sealed class ActionManifestManager : RunnerService, IActionManifestManager
{
private TemplateSchema _actionManifestSchema;
private IReadOnlyList<String> _fileTable;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -53,39 +54,22 @@ namespace GitHub.Runner.Worker
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
{
var templateContext = CreateContext(executionContext);
var context = CreateContext(executionContext);
ActionDefinitionData actionDefinition = new ActionDefinitionData();
// Clean up file name real quick
// Instead of using Regex which can be computationally expensive,
// we can just remove the # of characters from the fileName according to the length of the basePath
string basePath = HostContext.GetDirectory(WellKnownDirectory.Actions);
string fileRelativePath = manifestFile;
if (manifestFile.Contains(basePath))
{
fileRelativePath = manifestFile.Remove(0, basePath.Length + 1);
}
try
{
var token = default(TemplateToken);
// Get the file ID
var fileId = templateContext.GetFileId(fileRelativePath);
// Add this file to the FileTable in executionContext if it hasn't been added already
// we use > since fileID is 1 indexed
if (fileId > executionContext.FileTable.Count)
{
executionContext.FileTable.Add(fileRelativePath);
}
var fileId = context.GetFileId(manifestFile);
_fileTable = context.GetFileTable();
// Read the file
var fileContent = File.ReadAllText(manifestFile);
using (var stringReader = new StringReader(fileContent))
{
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
token = TemplateReader.Read(templateContext, "action-root", yamlObjectReader, fileId, out _);
var yamlObjectReader = new YamlObjectReader(null, stringReader);
token = TemplateReader.Read(context, "action-root", yamlObjectReader, fileId, out _);
}
var actionMapping = token.AssertMapping("action manifest root");
@@ -104,11 +88,11 @@ namespace GitHub.Runner.Worker
break;
case "inputs":
ConvertInputs(templateContext, actionPair.Value, actionDefinition);
ConvertInputs(context, actionPair.Value, actionDefinition);
break;
case "runs":
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionPair.Value);
actionDefinition.Execution = ConvertRuns(context, actionPair.Value);
break;
default:
Trace.Info($"Ignore action property {propertyName}.");
@@ -119,24 +103,24 @@ namespace GitHub.Runner.Worker
catch (Exception ex)
{
Trace.Error(ex);
templateContext.Errors.Add(ex);
context.Errors.Add(ex);
}
if (templateContext.Errors.Count > 0)
if (context.Errors.Count > 0)
{
foreach (var error in templateContext.Errors)
foreach (var error in context.Errors)
{
Trace.Error($"Action.yml load error: {error.Message}");
executionContext.Error(error.Message);
}
throw new ArgumentException($"Fail to load {fileRelativePath}");
throw new ArgumentException($"Fail to load {manifestFile}");
}
if (actionDefinition.Execution == null)
{
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
throw new ArgumentException($"Top level 'runs:' section is required for {fileRelativePath}");
throw new ArgumentException($"Top level 'runs:' section is required for {manifestFile}");
}
else
{
@@ -297,17 +281,19 @@ namespace GitHub.Runner.Worker
result.ExpressionFunctions.Add(item);
}
// Add the file table from the Execution Context
for (var i = 0; i < executionContext.FileTable.Count; i++)
// Add the file table
if (_fileTable?.Count > 0)
{
result.GetFileId(executionContext.FileTable[i]);
for (var i = 0 ; i < _fileTable.Count ; i++)
{
result.GetFileId(_fileTable[i]);
}
}
return result;
}
private ActionExecutionData ConvertRuns(
IExecutionContext executionContext,
TemplateContext context,
TemplateToken inputsToken)
{
@@ -325,8 +311,6 @@ namespace GitHub.Runner.Worker
var postToken = default(StringToken);
var postEntrypointToken = default(StringToken);
var postIfToken = default(StringToken);
var stepsLoaded = default(List<Pipelines.ActionStep>);
foreach (var run in runsMapping)
{
var runsKey = run.Key.AssertString("runs key").Value;
@@ -371,15 +355,6 @@ namespace GitHub.Runner.Worker
case "pre-if":
preIfToken = run.Value.AssertString("pre-if");
break;
case "steps":
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
var steps = run.Value.AssertSequence("steps");
var evaluator = executionContext.ToPipelineTemplateEvaluator();
stepsLoaded = evaluator.LoadCompositeSteps(steps);
break;
}
throw new Exception("You aren't supposed to be using Composite Actions yet!");
default:
Trace.Info($"Ignore run property {runsKey}.");
break;
@@ -427,21 +402,6 @@ namespace GitHub.Runner.Worker
};
}
}
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase) && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
if (stepsLoaded == null)
{
// TODO: Add a more helpful error message + including file name, etc. to show user that it's because of their yaml file
throw new ArgumentNullException($"No steps provided.");
}
else
{
return new CompositeActionExecutionData()
{
Steps = stepsLoaded,
};
}
}
else
{
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker' or 'node12' instead.");

View File

@@ -94,13 +94,6 @@ namespace GitHub.Runner.Worker
if (handlerData.HasPost && (Stage == ActionRunStage.Pre || Stage == ActionRunStage.Main))
{
string postDisplayName = $"Post {this.DisplayName}";
if (Stage == ActionRunStage.Pre &&
this.DisplayName.StartsWith("Pre ", StringComparison.OrdinalIgnoreCase))
{
// Trim the leading `Pre ` from the display name.
// Otherwise, we will get `Post Pre xxx` as DisplayName for the Post step.
postDisplayName = $"Post {this.DisplayName.Substring("Pre ".Length)}";
}
var repositoryReference = Action.Reference as RepositoryPathReference;
var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}";
var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" :
@@ -162,13 +155,6 @@ namespace GitHub.Runner.Worker
}
var validInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
if (handlerData.ExecutionType == ActionExecutionType.Container)
{
// container action always accept 'entryPoint' and 'args' as inputs
// https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepswithargs
validInputs.Add("entryPoint");
validInputs.Add("args");
}
// Merge the default inputs from the definition
if (definition.Data?.Inputs != null)
{
@@ -184,21 +170,11 @@ namespace GitHub.Runner.Worker
}
}
// Validate inputs only for actions with action.yml
if (Action.Reference.Type == Pipelines.ActionSourceType.Repository)
foreach (var input in userInputs)
{
var unexpectedInputs = new List<string>();
foreach (var input in userInputs)
if (!validInputs.Contains(input))
{
if (!validInputs.Contains(input))
{
unexpectedInputs.Add(input);
}
}
if (unexpectedInputs.Count > 0)
{
ExecutionContext.Warning($"Unexpected input(s) '{string.Join("', '", unexpectedInputs)}', valid inputs are ['{string.Join("', '", validInputs)}']");
ExecutionContext.Warning($"Unexpected input '{input}', valid inputs are ['{string.Join("', '", validInputs)}']");
}
}

View File

@@ -44,7 +44,6 @@ namespace GitHub.Runner.Worker
TaskResult? CommandResult { get; set; }
CancellationToken CancellationToken { get; }
List<ServiceEndpoint> Endpoints { get; }
TaskOrchestrationPlanReference Plan { get; }
PlanFeatures Features { get; }
Variables Variables { get; }
@@ -63,7 +62,7 @@ namespace GitHub.Runner.Worker
JobContext JobContext { get; }
// Only job level ExecutionContext has JobSteps
List<IStep> JobSteps { get; }
Queue<IStep> JobSteps { get; }
// Only job level ExecutionContext has PostJobSteps
Stack<IStep> PostJobSteps { get; }
@@ -105,13 +104,11 @@ namespace GitHub.Runner.Worker
// others
void ForceTaskComplete();
void RegisterPostJobStep(IStep step);
void RegisterNestedStep(IStep step, DictionaryContextData inputsData, int location, Dictionary<string, string> envData);
}
public sealed class ExecutionContext : RunnerService, IExecutionContext
{
private const int _maxIssueCount = 10;
private const int _throttlingDelayReportThreshold = 10 * 1000; // Don't report throttling with less than 10 seconds delay
private readonly TimelineRecord _record = new TimelineRecord();
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>();
@@ -143,7 +140,6 @@ namespace GitHub.Runner.Worker
public Task ForceCompleted => _forceCompleted.Task;
public CancellationToken CancellationToken => _cancellationTokenSource.Token;
public List<ServiceEndpoint> Endpoints { get; private set; }
public TaskOrchestrationPlanReference Plan { get; private set; }
public Variables Variables { get; private set; }
public Dictionary<string, string> IntraActionState { get; private set; }
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; private set; }
@@ -160,7 +156,7 @@ namespace GitHub.Runner.Worker
public List<ContainerInfo> ServiceContainers { get; private set; }
// Only job level ExecutionContext has JobSteps
public List<IStep> JobSteps { get; private set; }
public Queue<IStep> JobSteps { get; private set; }
// Only job level ExecutionContext has PostJobSteps
public Stack<IStep> PostJobSteps { get; private set; }
@@ -170,6 +166,7 @@ namespace GitHub.Runner.Worker
public bool EchoOnActionCommand { get; set; }
public TaskResult? Result
{
get
@@ -266,35 +263,6 @@ namespace GitHub.Runner.Worker
Root.PostJobSteps.Push(step);
}
/// <summary>
/// Helper function used in CompositeActionHandler::RunAsync to
/// add a child node, aka a step, to the current job to the Root.JobSteps based on the location.
/// </summary>
public void RegisterNestedStep(IStep step, DictionaryContextData inputsData, int location, Dictionary<string, string> envData)
{
// TODO: For UI purposes, look at figuring out how to condense steps in one node => maybe use the same previous GUID
var newGuid = Guid.NewGuid();
step.ExecutionContext = Root.CreateChild(newGuid, step.DisplayName, newGuid.ToString("N"), null, null);
step.ExecutionContext.ExpressionValues["inputs"] = inputsData;
// Add the composite action environment variables to each step.
// If the key already exists, we override it since the composite action env variables will have higher precedence
// Note that for each composite action step, it's environment variables will be set in the StepRunner automatically
// step.ExecutionContext.SetEnvironmentVariables(envData);
#if OS_WINDOWS
var envContext = new DictionaryContextData();
#else
var envContext = new CaseSensitiveDictionaryContextData();
#endif
foreach (var pair in envData)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
step.ExecutionContext.ExpressionValues["env"] = envContext;
Root.JobSteps.Insert(location, step);
}
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null)
{
Trace.Entering();
@@ -306,7 +274,6 @@ namespace GitHub.Runner.Worker
child.Features = Features;
child.Variables = Variables;
child.Endpoints = Endpoints;
child.Plan = Plan;
if (intraActionState == null)
{
child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
@@ -368,7 +335,7 @@ namespace GitHub.Runner.Worker
}
// report total delay caused by server throttling.
if (_totalThrottlingDelayInMilliseconds > _throttlingDelayReportThreshold)
if (_totalThrottlingDelayInMilliseconds > 0)
{
this.Warning($"The job has experienced {TimeSpan.FromMilliseconds(_totalThrottlingDelayInMilliseconds).TotalSeconds} seconds total delay caused by server throttling.");
}
@@ -396,11 +363,7 @@ namespace GitHub.Runner.Worker
}
}
if (Root != this)
{
// only dispose TokenSource for step level ExecutionContext
_cancellationTokenSource?.Dispose();
}
_cancellationTokenSource?.Dispose();
_logger.End();
@@ -608,8 +571,7 @@ namespace GitHub.Runner.Worker
_cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
// Plan
Plan = message.Plan;
// Features
Features = PlanUtil.GetFeatures(message.Plan);
// Endpoints
@@ -689,7 +651,7 @@ namespace GitHub.Runner.Worker
PrependPath = new List<string>();
// JobSteps for job ExecutionContext
JobSteps = new List<IStep>();
JobSteps = new Queue<IStep>();
// PostJobSteps for job ExecutionContext
PostJobSteps = new Stack<IStep>();
@@ -889,8 +851,7 @@ namespace GitHub.Runner.Worker
{
Interlocked.Add(ref _totalThrottlingDelayInMilliseconds, Convert.ToInt64(data.Delay.TotalMilliseconds));
if (!_throttlingReported &&
_totalThrottlingDelayInMilliseconds > _throttlingDelayReportThreshold)
if (!_throttlingReported)
{
this.Warning(string.Format("The job is currently being throttled by the server. You may experience delays in console line output, job status reporting, and action log uploads."));

View File

@@ -12,8 +12,6 @@ namespace GitHub.Runner.Worker.Expressions
{
public sealed class HashFilesFunction : Function
{
private const int _hashFileTimeoutSeconds = 120;
protected sealed override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
@@ -91,29 +89,19 @@ namespace GitHub.Runner.Worker.Expressions
}
env["patterns"] = string.Join(Environment.NewLine, patterns);
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(_hashFileTimeoutSeconds)))
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
cancellationToken: new CancellationTokenSource(TimeSpan.FromSeconds(120)).Token).GetAwaiter().GetResult();
if (exitCode != 0)
{
try
{
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
cancellationToken: tokenSource.Token).GetAwaiter().GetResult();
if (exitCode != 0)
{
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
}
}
catch (OperationCanceledException) when (tokenSource.IsCancellationRequested)
{
throw new TimeoutException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') couldn't finish within {_hashFileTimeoutSeconds} seconds.");
}
return hashResult;
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
}
return hashResult;
}
private sealed class HashFilesTrace : ITraceWriter

View File

@@ -22,8 +22,8 @@ namespace GitHub.Runner.Worker
"repository_owner",
"run_id",
"run_number",
"server_url",
"sha",
"url",
"workflow",
"workspace",
};

View File

@@ -1,96 +0,0 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Handlers
{
[ServiceLocator(Default = typeof(CompositeActionHandler))]
public interface ICompositeActionHandler : IHandler
{
CompositeActionExecutionData Data { get; set; }
}
public sealed class CompositeActionHandler : Handler, ICompositeActionHandler
{
public CompositeActionExecutionData Data { get; set; }
public Task RunAsync(ActionRunStage stage)
{
// Validate args.
Trace.Entering();
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
ArgUtil.NotNull(Inputs, nameof(Inputs));
var githubContext = ExecutionContext.ExpressionValues["github"] as GitHubContext;
ArgUtil.NotNull(githubContext, nameof(githubContext));
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
// Resolve action steps
var actionSteps = Data.Steps;
// Create Context Data to reuse for each composite action step
var inputsData = new DictionaryContextData();
foreach (var i in Inputs)
{
inputsData[i.Key] = new StringContextData(i.Value);
}
// Add each composite action step to the front of the queue
int location = 0;
foreach (Pipelines.ActionStep aStep in actionSteps)
{
// Ex:
// runs:
// using: "composite"
// steps:
// - uses: example/test-composite@v2 (a)
// - run echo hello world (b)
// - run echo hello world 2 (c)
//
// ethanchewy/test-composite/action.yaml
// runs:
// using: "composite"
// steps:
// - run echo hello world 3 (d)
// - run echo hello world 4 (e)
//
// Steps processed as follow:
// | a |
// | a | => | d |
// (Run step d)
// | a |
// | a | => | e |
// (Run step e)
// | a |
// (Run step a)
// | b |
// (Run step b)
// | c |
// (Run step c)
// Done.
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = aStep;
actionRunner.Stage = stage;
actionRunner.Condition = aStep.Condition;
actionRunner.DisplayName = aStep.DisplayName;
ExecutionContext.RegisterNestedStep(actionRunner, inputsData, location, Environment);
location++;
}
return Task.CompletedTask;
}
}
}

View File

@@ -66,11 +66,6 @@ namespace GitHub.Runner.Worker.Handlers
handler = HostContext.CreateService<IRunnerPluginHandler>();
(handler as IRunnerPluginHandler).Data = data as PluginActionExecutionData;
}
else if (data.ExecutionType == ActionExecutionType.Composite)
{
handler = HostContext.CreateService<ICompositeActionHandler>();
(handler as ICompositeActionHandler).Data = data as CompositeActionExecutionData;
}
else
{
// This should never happen.

View File

@@ -352,24 +352,15 @@ namespace GitHub.Runner.Worker.Handlers
if (File.Exists(gitConfigPath))
{
// Check if the config contains the workflow repository url
var serverUrl = _executionContext.GetGitHubContext("server_url");
serverUrl = !string.IsNullOrEmpty(serverUrl) ? serverUrl : "https://github.com";
var host = new Uri(serverUrl, UriKind.Absolute).Host;
var nameWithOwner = _executionContext.GetGitHubContext("repository");
var patterns = new[] {
$"url = {serverUrl}/{nameWithOwner}",
$"url = git@{host}:{nameWithOwner}.git",
};
var qualifiedRepository = _executionContext.GetGitHubContext("repository");
var configMatch = $"url = https://github.com/{qualifiedRepository}";
var content = File.ReadAllText(gitConfigPath);
foreach (var line in content.Split("\n").Select(x => x.Trim()))
{
foreach (var pattern in patterns)
if (String.Equals(line, configMatch, StringComparison.OrdinalIgnoreCase))
{
if (String.Equals(line, pattern, StringComparison.OrdinalIgnoreCase))
{
repositoryPath = directoryPath;
break;
}
repositoryPath = directoryPath;
break;
}
}
}

View File

@@ -64,20 +64,6 @@ namespace GitHub.Runner.Worker
context.Debug($"Starting: Set up job");
context.Output($"Current runner version: '{BuildConstants.RunnerPackage.Version}'");
var setting = HostContext.GetService<IConfigurationStore>().GetSettings();
var credFile = HostContext.GetConfigFile(WellKnownConfigFile.Credentials);
if (File.Exists(credFile))
{
var credData = IOUtil.LoadObject<CredentialData>(credFile);
if (credData != null &&
credData.Data.TryGetValue("clientId", out var clientId))
{
// print out HostName for self-hosted runner
context.Output($"Runner name: '{setting.AgentName}'");
context.Output($"Machine name: '{Environment.MachineName}'");
}
}
var setupInfoFile = HostContext.GetConfigFile(WellKnownConfigFile.SetupInfo);
if (File.Exists(setupInfoFile))
{
@@ -145,13 +131,12 @@ namespace GitHub.Runner.Worker
// Temporary hack for GHES alpha
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
if (string.IsNullOrEmpty(context.GetGitHubContext("server_url")) && !runnerSettings.IsHostedServer && !string.IsNullOrEmpty(runnerSettings.GitHubUrl))
if (string.IsNullOrEmpty(context.GetGitHubContext("url")) && !runnerSettings.IsHostedServer && !string.IsNullOrEmpty(runnerSettings.GitHubUrl))
{
var url = new Uri(runnerSettings.GitHubUrl);
var portInfo = url.IsDefaultPort ? string.Empty : $":{url.Port.ToString(CultureInfo.InvariantCulture)}";
context.SetGitHubContext("server_url", $"{url.Scheme}://{url.Host}{portInfo}");
context.SetGitHubContext("url", $"{url.Scheme}://{url.Host}{portInfo}");
context.SetGitHubContext("api_url", $"{url.Scheme}://{url.Host}{portInfo}/api/v3");
context.SetGitHubContext("graphql_url", $"{url.Scheme}://{url.Host}{portInfo}/api/graphql");
}
// Evaluate the job-level environment variables

View File

@@ -5,13 +5,21 @@ using GitHub.Services.Common;
using GitHub.Services.WebApi;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Net.Http;
using System.Text;
using System.IO.Compression;
using System.Diagnostics;
using Newtonsoft.Json.Linq;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.ObjectTemplating;
namespace GitHub.Runner.Worker
{
@@ -114,6 +122,13 @@ namespace GitHub.Runner.Worker
_tempDirectoryManager = HostContext.GetService<ITempDirectoryManager>();
_tempDirectoryManager.InitializeTempDirectory(jobContext);
// // Expand container properties
// jobContext.Container?.ExpandProperties(jobContext.Variables);
// foreach (var sidecar in jobContext.SidecarContainers)
// {
// sidecar.ExpandProperties(jobContext.Variables);
// }
// Get the job extension.
Trace.Info("Getting job extension.");
IJobExtension jobExtension = HostContext.CreateService<IJobExtension>();
@@ -152,7 +167,7 @@ namespace GitHub.Runner.Worker
{
foreach (var step in jobSteps)
{
jobContext.JobSteps.Add(step);
jobContext.JobSteps.Enqueue(step);
}
await stepsRunner.RunAsync(jobContext);
@@ -239,12 +254,6 @@ namespace GitHub.Runner.Worker
Trace.Error(ex);
return TaskResult.Failed;
}
catch (TaskOrchestrationPlanTerminatedException ex)
{
Trace.Error($"TaskOrchestrationPlanTerminatedException received, while attempting to raise JobCompletedEvent for job {message.JobId}.");
Trace.Error(ex);
return TaskResult.Failed;
}
catch (Exception ex)
{
Trace.Error($"Catch exception while attempting to raise JobCompletedEvent for job {message.JobId}, job request {message.RequestId}.");

View File

@@ -59,15 +59,14 @@ namespace GitHub.Runner.Worker
checkPostJobActions = true;
while (jobContext.PostJobSteps.TryPop(out var postStep))
{
jobContext.JobSteps.Add(postStep);
jobContext.JobSteps.Enqueue(postStep);
}
continue;
}
var step = jobContext.JobSteps[0];
jobContext.JobSteps.RemoveAt(0);
var nextStep = jobContext.JobSteps.Count > 0 ? jobContext.JobSteps[0] : null;
var step = jobContext.JobSteps.Dequeue();
var nextStep = jobContext.JobSteps.Count > 0 ? jobContext.JobSteps.Peek() : null;
Trace.Info($"Processing step: DisplayName='{step.DisplayName}'");
ArgUtil.NotNull(step.ExecutionContext, nameof(step.ExecutionContext));
@@ -93,161 +92,130 @@ namespace GitHub.Runner.Worker
#else
var envContext = new CaseSensitiveDictionaryContextData();
#endif
// Global env
step.ExecutionContext.ExpressionValues["env"] = envContext;
foreach (var pair in step.ExecutionContext.EnvironmentVariables)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
// Stomps over with outside step env
if (step.ExecutionContext.ExpressionValues.TryGetValue("env", out var envContextData))
{
#if OS_WINDOWS
var dict = envContextData as DictionaryContextData;
#else
var dict = envContextData as CaseSensitiveDictionaryContextData;
#endif
foreach (var pair in dict)
{
envContext[pair.Key] = pair.Value;
}
}
step.ExecutionContext.ExpressionValues["env"] = envContext;
bool evaluateStepEnvFailed = false;
if (step is IActionRunner actionStep)
{
// Set GITHUB_ACTION
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
try
// Evaluate and merge action's env block to env context
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
foreach (var env in actionEnvironment)
{
// Evaluate and merge action's env block to env context
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
foreach (var env in actionEnvironment)
{
envContext[env.Key] = new StringContextData(env.Value ?? string.Empty);
}
}
catch (Exception ex)
{
// fail the step since there is an evaluate error.
Trace.Info("Caught exception from expression for step.env");
evaluateStepEnvFailed = true;
step.ExecutionContext.Error(ex);
CompleteStep(step, nextStep, TaskResult.Failed);
envContext[env.Key] = new StringContextData(env.Value ?? string.Empty);
}
}
if (!evaluateStepEnvFailed)
try
{
try
// Register job cancellation call back only if job cancellation token not been fire before each step run
if (!jobContext.CancellationToken.IsCancellationRequested)
{
// Register job cancellation call back only if job cancellation token not been fire before each step run
if (!jobContext.CancellationToken.IsCancellationRequested)
// Test the condition again. The job was canceled after the condition was originally evaluated.
jobCancelRegister = jobContext.CancellationToken.Register(() =>
{
// Test the condition again. The job was canceled after the condition was originally evaluated.
jobCancelRegister = jobContext.CancellationToken.Register(() =>
{
// mark job as cancelled
jobContext.Result = TaskResult.Canceled;
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
// mark job as cancelled
jobContext.Result = TaskResult.Canceled;
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only
var conditionReTestResult = false;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
step.ExecutionContext.Debug($"Skip Re-evaluate condition on runner shutdown.");
}
else
{
try
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionReTestTraceWriter);
var condition = new BasicExpressionToken(null, null, null, step.Condition);
conditionReTestResult = templateEvaluator.EvaluateStepIf(condition, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, step.ExecutionContext.ToExpressionState());
}
catch (Exception ex)
{
// Cancel the step since we get exception while re-evaluate step condition.
Trace.Info("Caught exception from expression when re-test condition on job cancellation.");
step.ExecutionContext.Error(ex);
}
}
if (!conditionReTestResult)
{
// Cancel the step.
Trace.Info("Cancel current running step.");
step.ExecutionContext.CancelToken();
}
});
}
else
{
if (jobContext.Result != TaskResult.Canceled)
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only
var conditionReTestResult = false;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
// mark job as cancelled
jobContext.Result = TaskResult.Canceled;
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
step.ExecutionContext.Debug($"Skip Re-evaluate condition on runner shutdown.");
}
}
// Evaluate condition.
step.ExecutionContext.Debug($"Evaluating condition for step: '{step.DisplayName}'");
var conditionTraceWriter = new ConditionTraceWriter(Trace, step.ExecutionContext);
var conditionResult = false;
var conditionEvaluateError = default(Exception);
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
step.ExecutionContext.Debug($"Skip evaluate condition on runner shutdown.");
}
else
{
try
else
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionTraceWriter);
var condition = new BasicExpressionToken(null, null, null, step.Condition);
conditionResult = templateEvaluator.EvaluateStepIf(condition, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, step.ExecutionContext.ToExpressionState());
try
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionReTestTraceWriter);
var condition = new BasicExpressionToken(null, null, null, step.Condition);
conditionReTestResult = templateEvaluator.EvaluateStepIf(condition, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, step.ExecutionContext.ToExpressionState());
}
catch (Exception ex)
{
// Cancel the step since we get exception while re-evaluate step condition.
Trace.Info("Caught exception from expression when re-test condition on job cancellation.");
step.ExecutionContext.Error(ex);
}
}
catch (Exception ex)
{
Trace.Info("Caught exception from expression.");
Trace.Error(ex);
conditionEvaluateError = ex;
}
}
// no evaluate error but condition is false
if (!conditionResult && conditionEvaluateError == null)
if (!conditionReTestResult)
{
// Cancel the step.
Trace.Info("Cancel current running step.");
step.ExecutionContext.CancelToken();
}
});
}
else
{
if (jobContext.Result != TaskResult.Canceled)
{
// Condition == false
Trace.Info("Skipping step due to condition evaluation.");
CompleteStep(step, nextStep, TaskResult.Skipped, resultCode: conditionTraceWriter.Trace);
}
else if (conditionEvaluateError != null)
{
// fail the step since there is an evaluate error.
step.ExecutionContext.Error(conditionEvaluateError);
CompleteStep(step, nextStep, TaskResult.Failed);
}
else
{
// Run the step.
await RunStepAsync(step, jobContext.CancellationToken);
CompleteStep(step, nextStep);
// mark job as cancelled
jobContext.Result = TaskResult.Canceled;
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
}
}
finally
// Evaluate condition.
step.ExecutionContext.Debug($"Evaluating condition for step: '{step.DisplayName}'");
var conditionTraceWriter = new ConditionTraceWriter(Trace, step.ExecutionContext);
var conditionResult = false;
var conditionEvaluateError = default(Exception);
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
if (jobCancelRegister != null)
step.ExecutionContext.Debug($"Skip evaluate condition on runner shutdown.");
}
else
{
try
{
jobCancelRegister?.Dispose();
jobCancelRegister = null;
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionTraceWriter);
var condition = new BasicExpressionToken(null, null, null, step.Condition);
conditionResult = templateEvaluator.EvaluateStepIf(condition, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, step.ExecutionContext.ToExpressionState());
}
catch (Exception ex)
{
Trace.Info("Caught exception from expression.");
Trace.Error(ex);
conditionEvaluateError = ex;
}
}
// no evaluate error but condition is false
if (!conditionResult && conditionEvaluateError == null)
{
// Condition == false
Trace.Info("Skipping step due to condition evaluation.");
CompleteStep(step, nextStep, TaskResult.Skipped, resultCode: conditionTraceWriter.Trace);
}
else if (conditionEvaluateError != null)
{
// fail the step since there is an evaluate error.
step.ExecutionContext.Error(conditionEvaluateError);
CompleteStep(step, nextStep, TaskResult.Failed);
}
else
{
// Run the step.
await RunStepAsync(step, jobContext.CancellationToken);
CompleteStep(step, nextStep);
}
}
finally
{
if (jobCancelRegister != null)
{
jobCancelRegister?.Dispose();
jobCancelRegister = null;
}
}
}
@@ -426,11 +394,7 @@ namespace GitHub.Runner.Worker
scope = scopesToInitialize.Pop();
executionContext.Debug($"Initializing scope '{scope.Name}'");
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.ParentName);
// TODO: Fix this temporary workaround for Composite Actions
if (!executionContext.ExpressionValues.ContainsKey("inputs") && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
executionContext.ExpressionValues["inputs"] = !String.IsNullOrEmpty(scope.ParentName) ? scopeInputs[scope.ParentName] : null;
}
executionContext.ExpressionValues["inputs"] = !String.IsNullOrEmpty(scope.ParentName) ? scopeInputs[scope.ParentName] : null;
var templateEvaluator = executionContext.ToPipelineTemplateEvaluator();
var inputs = default(DictionaryContextData);
try
@@ -453,11 +417,7 @@ namespace GitHub.Runner.Worker
// Setup expression values
var scopeName = executionContext.ScopeName;
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scopeName);
// TODO: Fix this temporary workaround for Composite Actions
if (!executionContext.ExpressionValues.ContainsKey("inputs") && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
executionContext.ExpressionValues["inputs"] = string.IsNullOrEmpty(scopeName) ? null : scopeInputs[scopeName];
}
executionContext.ExpressionValues["inputs"] = string.IsNullOrEmpty(scopeName) ? null : scopeInputs[scopeName];
return true;
}

View File

@@ -32,8 +32,7 @@
"one-of": [
"container-runs",
"node12-runs",
"plugin-runs",
"composite-runs"
"plugin-runs"
]
},
"container-runs": {
@@ -84,36 +83,6 @@
}
}
},
"composite-runs": {
"mapping": {
"properties": {
"using": "non-empty-string",
"steps": "composite-steps"
}
}
},
"composite-steps": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"inputs",
"job",
"runner",
"env",
"always(0,0)",
"failure(0,0)",
"cancelled(0,0)",
"success(0,0)",
"hashFiles(1,255)"
],
"sequence": {
"item-type": "any"
}
},
"container-runs-context": {
"context": [
"inputs"

View File

@@ -317,37 +317,5 @@ namespace GitHub.DistributedTask.WebApi
userState: userState,
cancellationToken: cancellationToken);
}
/// <summary>
/// [Preview API] Resolves information required to download actions (URL, token) defined in an orchestration.
/// </summary>
/// <param name="scopeIdentifier">The project GUID to scope the request</param>
/// <param name="hubName">The name of the server hub: "build" for the Build server or "rm" for the Release Management server</param>
/// <param name="planId"></param>
/// <param name="actionReferenceList"></param>
/// <param name="userState"></param>
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
public virtual Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(
Guid scopeIdentifier,
string hubName,
Guid planId,
ActionReferenceList actionReferenceList,
object userState = null,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
Guid locationId = new Guid("27d7f831-88c1-4719-8ca1-6a061dad90eb");
object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId };
HttpContent content = new ObjectContent<ActionReferenceList>(actionReferenceList, new VssJsonMediaTypeFormatter(true));
return SendAsync<ActionDownloadInfoCollection>(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1),
userState: userState,
cancellationToken: cancellationToken,
content: content);
}
}
}

View File

@@ -42,12 +42,7 @@ namespace GitHub.DistributedTask.Pipelines.ContextData
var floored = Math.Floor(m_value);
if (m_value == floored && m_value <= (Double)Int32.MaxValue && m_value >= (Double)Int32.MinValue)
{
var flooredInt = (Int32)floored;
return (JToken)flooredInt;
}
else if (m_value == floored && m_value <= (Double)Int64.MaxValue && m_value >= (Double)Int64.MinValue)
{
var flooredInt = (Int64)floored;
Int32 flooredInt = (Int32)floored;
return (JToken)flooredInt;
}
else

View File

@@ -65,7 +65,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String StepEnv = "step-env";
public const String StepIfResult = "step-if-result";
public const String Steps = "steps";
public const String StepsInTemplate = "steps-in-template";
public const String StepsScopeInputs = "steps-scope-inputs";
public const String StepsScopeOutputs = "steps-scope-outputs";
public const String StepsTemplateRoot = "steps-template-root";

View File

@@ -29,6 +29,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
var evaluationResult = EvaluationResult.CreateIntermediateResult(null, ifResult);
return evaluationResult.IsTruthy;
}
internal static Boolean? ConvertToStepContinueOnError(
TemplateContext context,
TemplateToken token,
@@ -263,351 +264,5 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
//Note: originally was List<Step> but we need to change to List<ActionStep> to use the "Inputs" attribute
internal static List<ActionStep> ConvertToSteps(
TemplateContext context,
TemplateToken steps)
{
var stepsSequence = steps.AssertSequence($"job {PipelineTemplateConstants.Steps}");
var result = new List<ActionStep>();
foreach (var stepsItem in stepsSequence)
{
var step = ConvertToStep(context, stepsItem);
if (step != null) // step = null means we are hitting error during step conversion, there should be an error in context.errors
{
if (step.Enabled)
{
result.Add(step);
}
}
}
return result;
}
private static ActionStep ConvertToStep(
TemplateContext context,
TemplateToken stepsItem)
{
var step = stepsItem.AssertMapping($"{PipelineTemplateConstants.Steps} item");
var continueOnError = default(ScalarToken);
var env = default(TemplateToken);
var id = default(StringToken);
var ifCondition = default(String);
var ifToken = default(ScalarToken);
var name = default(ScalarToken);
var run = default(ScalarToken);
var scope = default(StringToken);
var timeoutMinutes = default(ScalarToken);
var uses = default(StringToken);
var with = default(TemplateToken);
var workingDir = default(ScalarToken);
var path = default(ScalarToken);
var clean = default(ScalarToken);
var fetchDepth = default(ScalarToken);
var lfs = default(ScalarToken);
var submodules = default(ScalarToken);
var shell = default(ScalarToken);
foreach (var stepProperty in step)
{
var propertyName = stepProperty.Key.AssertString($"{PipelineTemplateConstants.Steps} item key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Clean:
clean = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Clean}");
break;
case PipelineTemplateConstants.ContinueOnError:
ConvertToStepContinueOnError(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
continueOnError = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} {PipelineTemplateConstants.ContinueOnError}");
break;
case PipelineTemplateConstants.Env:
ConvertToStepEnvironment(context, stepProperty.Value, StringComparer.Ordinal, allowExpressions: true); // Validate early if possible
env = stepProperty.Value;
break;
case PipelineTemplateConstants.FetchDepth:
fetchDepth = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.FetchDepth}");
break;
case PipelineTemplateConstants.Id:
id = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Id}");
if (!NameValidation.IsValid(id.Value, true))
{
context.Error(id, $"Step id {id.Value} is invalid. Ids must start with a letter or '_' and contain only alphanumeric characters, '-', or '_'");
}
break;
case PipelineTemplateConstants.If:
ifToken = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.If}");
break;
case PipelineTemplateConstants.Lfs:
lfs = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Lfs}");
break;
case PipelineTemplateConstants.Name:
name = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Name}");
break;
case PipelineTemplateConstants.Path:
path = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Path}");
break;
case PipelineTemplateConstants.Run:
run = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Run}");
break;
case PipelineTemplateConstants.Shell:
shell = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Shell}");
break;
case PipelineTemplateConstants.Scope:
scope = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Scope}");
break;
case PipelineTemplateConstants.Submodules:
submodules = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Submodules}");
break;
case PipelineTemplateConstants.TimeoutMinutes:
ConvertToStepTimeout(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
timeoutMinutes = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.TimeoutMinutes}");
break;
case PipelineTemplateConstants.Uses:
uses = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}");
break;
case PipelineTemplateConstants.With:
ConvertToStepInputs(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
with = stepProperty.Value;
break;
case PipelineTemplateConstants.WorkingDirectory:
workingDir = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.WorkingDirectory}");
break;
default:
propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Steps} item key"); // throws
break;
}
}
// Fixup the if-condition
var isDefaultScope = String.IsNullOrEmpty(scope?.Value);
ifCondition = ConvertToIfCondition(context, ifToken, false, isDefaultScope);
if (run != null)
{
var result = new ActionStep
{
ScopeName = scope?.Value,
ContextName = id?.Value,
ContinueOnError = continueOnError,
DisplayNameToken = name,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes,
Environment = env,
Reference = new ScriptReference(),
};
var inputs = new MappingToken(null, null, null);
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script), run);
if (workingDir != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.WorkingDirectory), workingDir);
}
if (shell != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Shell), shell);
}
result.Inputs = inputs;
return result;
}
else
{
uses.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}");
var result = new ActionStep
{
ScopeName = scope?.Value,
ContextName = id?.Value,
ContinueOnError = continueOnError,
DisplayNameToken = name,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes,
Inputs = with,
Environment = env,
};
if (uses.Value.StartsWith("docker://", StringComparison.Ordinal))
{
var image = uses.Value.Substring("docker://".Length);
result.Reference = new ContainerRegistryReference { Image = image };
}
else if (uses.Value.StartsWith("./") || uses.Value.StartsWith(".\\"))
{
result.Reference = new RepositoryPathReference
{
RepositoryType = PipelineConstants.SelfAlias,
Path = uses.Value
};
}
else
{
var usesSegments = uses.Value.Split('@');
var pathSegments = usesSegments[0].Split(new[] { '/', '\\' }, StringSplitOptions.RemoveEmptyEntries);
var gitRef = usesSegments.Length == 2 ? usesSegments[1] : String.Empty;
if (usesSegments.Length != 2 ||
pathSegments.Length < 2 ||
String.IsNullOrEmpty(pathSegments[0]) ||
String.IsNullOrEmpty(pathSegments[1]) ||
String.IsNullOrEmpty(gitRef))
{
// todo: loc
context.Error(uses, $"Expected format {{org}}/{{repo}}[/path]@ref. Actual '{uses.Value}'");
}
else
{
var repositoryName = $"{pathSegments[0]}/{pathSegments[1]}";
var directoryPath = pathSegments.Length > 2 ? String.Join("/", pathSegments.Skip(2)) : String.Empty;
result.Reference = new RepositoryPathReference
{
RepositoryType = RepositoryTypes.GitHub,
Name = repositoryName,
Ref = gitRef,
Path = directoryPath,
};
}
}
return result;
}
}
/// <summary>
/// When empty, default to "success()".
/// When a status function is not referenced, format as "success() &amp;&amp; &lt;CONDITION&gt;".
/// </summary>
private static String ConvertToIfCondition(
TemplateContext context,
TemplateToken token,
Boolean isJob,
Boolean isDefaultScope)
{
String condition;
if (token is null)
{
condition = null;
}
else if (token is BasicExpressionToken expressionToken)
{
condition = expressionToken.Expression;
}
else
{
var stringToken = token.AssertString($"{(isJob ? "job" : "step")} {PipelineTemplateConstants.If}");
condition = stringToken.Value;
}
if (String.IsNullOrWhiteSpace(condition))
{
return $"{PipelineTemplateConstants.Success}()";
}
var expressionParser = new ExpressionParser();
var functions = default(IFunctionInfo[]);
var namedValues = default(INamedValueInfo[]);
if (isJob)
{
namedValues = s_jobIfNamedValues;
// TODO: refactor into seperate functions
// functions = PhaseCondition.FunctionInfo;
}
else
{
namedValues = isDefaultScope ? s_stepNamedValues : s_stepInTemplateNamedValues;
functions = s_stepConditionFunctions;
}
var node = default(ExpressionNode);
try
{
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
}
catch (Exception ex)
{
context.Error(token, ex);
return null;
}
if (node == null)
{
return $"{PipelineTemplateConstants.Success}()";
}
var hasStatusFunction = node.Traverse().Any(x =>
{
if (x is Function function)
{
return String.Equals(function.Name, PipelineTemplateConstants.Always, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Cancelled, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Failure, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Success, StringComparison.OrdinalIgnoreCase);
}
return false;
});
return hasStatusFunction ? condition : $"{PipelineTemplateConstants.Success}() && ({condition})";
}
private static readonly INamedValueInfo[] s_jobIfNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly INamedValueInfo[] s_stepNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly INamedValueInfo[] s_stepInTemplateNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Inputs),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly IFunctionInfo[] s_stepConditionFunctions = new IFunctionInfo[]
{
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Always, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Cancelled, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Failure, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Success, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.HashFiles, 1, Byte.MaxValue),
};
}
}

View File

@@ -159,31 +159,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
public List<ActionStep> LoadCompositeSteps(
TemplateToken token)
{
var result = default(List<ActionStep>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(null, null, setMissingContext: false);
// TODO: we might want to to have a bool to prevent it from filling in with missing context w/ dummy variables
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsInTemplate, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToSteps(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public Dictionary<String, String> EvaluateStepEnvironment(
TemplateToken token,
DictionaryContextData contextData,
@@ -425,8 +400,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
private TemplateContext CreateContext(
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState = null,
bool setMissingContext = true)
IEnumerable<KeyValuePair<String, Object>> expressionState = null)
{
var result = new TemplateContext
{
@@ -475,21 +449,18 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
// - Evaluating early when all referenced contexts are available, even though all allowed
// contexts may not yet be available. For example, evaluating step display name can often
// be performed early.
if (setMissingContext)
foreach (var name in s_expressionValueNames)
{
foreach (var name in s_expressionValueNames)
if (!result.ExpressionValues.ContainsKey(name))
{
if (!result.ExpressionValues.ContainsKey(name))
{
result.ExpressionValues[name] = null;
}
result.ExpressionValues[name] = null;
}
foreach (var name in s_expressionFunctionNames)
}
foreach (var name in s_expressionFunctionNames)
{
if (!functionNames.Contains(name))
{
if (!functionNames.Contains(name))
{
result.ExpressionFunctions.Add(new FunctionInfo<NoOperation>(name, 0, Int32.MaxValue));
}
result.ExpressionFunctions.Add(new FunctionInfo<NoOperation>(name, 0, Int32.MaxValue));
}
}

View File

@@ -94,12 +94,5 @@ namespace GitHub.DistributedTask.Pipelines
public static readonly String Resources = "resources";
public static readonly String All = "all";
}
public static class ScriptStepInputs
{
public static readonly String Script = "script";
public static readonly String WorkingDirectory = "workingDirectory";
public static readonly String Shell = "shell";
}
}
}

View File

@@ -1,40 +0,0 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionDownloadInfo
{
[DataMember(EmitDefaultValue = false)]
public ActionDownloadAuthentication Authentication { get; set; }
[DataMember(EmitDefaultValue = false)]
public string NameWithOwner { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ResolvedNameWithOwner { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ResolvedSha { get; set; }
[DataMember(EmitDefaultValue = false)]
public string TarballUrl { get; set; }
[DataMember(EmitDefaultValue = false)]
public string Ref { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ZipballUrl { get; set; }
}
[DataContract]
public class ActionDownloadAuthentication
{
[DataMember(EmitDefaultValue = false)]
public DateTime ExpiresAt { get; set; }
[DataMember(EmitDefaultValue = false)]
public string Token { get; set; }
}
}

View File

@@ -1,16 +0,0 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionDownloadInfoCollection
{
[DataMember]
public IDictionary<string, ActionDownloadInfo> Actions
{
get;
set;
}
}
}

View File

@@ -1,22 +0,0 @@
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionReference
{
[DataMember]
public string NameWithOwner
{
get;
set;
}
[DataMember]
public string Ref
{
get;
set;
}
}
}

View File

@@ -1,16 +0,0 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionReferenceList
{
[DataMember]
public IList<ActionReference> Actions
{
get;
set;
}
}
}

View File

@@ -39,7 +39,7 @@ namespace GitHub.Services.OAuth
/// <summary>
/// Gets or sets the error description for the response.
/// </summary>
[DataMember(Name = "error_description", EmitDefaultValue = false)]
[DataMember(Name = "errordescription", EmitDefaultValue = false)]
public String ErrorDescription
{
get;

File diff suppressed because it is too large Load Diff

View File

@@ -70,24 +70,5 @@ namespace GitHub.Runner.Common.Tests.Util
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void WhichHandleFullyQualifiedPath()
{
using (TestHostContext hc = new TestHostContext(this))
{
//Arrange
Tracing trace = hc.GetTrace();
// Act.
var gitPath = WhichUtil.Which("git", require: true, trace: trace);
var gitPath2 = WhichUtil.Which(gitPath, require: true, trace: trace);
// Assert.
Assert.Equal(gitPath, gitPath2);
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -759,7 +759,6 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Setup(x => x.Variables).Returns(new Variables(_hc, new Dictionary<string, VariableValue>()));
_ec.Setup(x => x.ExpressionValues).Returns(new DictionaryContextData());
_ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
_ec.Setup(x => x.FileTable).Returns(new List<String>());
_ec.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { _hc.GetTrace().Info($"{tag}{message}"); });
_ec.Setup(x => x.AddIssue(It.IsAny<Issue>(), It.IsAny<string>())).Callback((Issue issue, string message) => { _hc.GetTrace().Info($"[{issue.Type}]{issue.Message ?? message}"); });
}

View File

@@ -295,10 +295,9 @@ namespace GitHub.Runner.Common.Tests.Worker
{
Name = "action",
Id = actionId,
Reference = new Pipelines.RepositoryPathReference()
Reference = new Pipelines.ContainerRegistryReference()
{
Name = "actions/runner",
Ref = "v1"
Image = "ubuntu:16.04"
},
Inputs = actionInputs
};
@@ -328,7 +327,8 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal("invalid1", finialInputs["invalid1"]);
Assert.Equal("invalid2", finialInputs["invalid2"]);
_ec.Verify(x => x.AddIssue(It.Is<Issue>(s => s.Message.Contains("Unexpected input(s) 'invalid1', 'invalid2'")), It.IsAny<string>()), Times.Once);
_ec.Verify(x => x.AddIssue(It.Is<Issue>(s => s.Message.Contains("Unexpected input 'invalid1'")), It.IsAny<string>()), Times.Once);
_ec.Verify(x => x.AddIssue(It.Is<Issue>(s => s.Message.Contains("Unexpected input 'invalid2'")), It.IsAny<string>()), Times.Once);
}
private void Setup([CallerMemberName] string name = "")
@@ -379,7 +379,6 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
_ec.Setup(x => x.IntraActionState).Returns(new Dictionary<string, string>());
_ec.Setup(x => x.EnvironmentVariables).Returns(new Dictionary<string, string>());
_ec.Setup(x => x.FileTable).Returns(new List<String>());
_ec.Setup(x => x.SetGitHubContext(It.IsAny<string>(), It.IsAny<string>()));
_ec.Setup(x => x.GetGitHubContext(It.IsAny<string>())).Returns("{\"foo\":\"bar\"}");
_ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token);

View File

@@ -686,17 +686,14 @@ namespace GitHub.Runner.Common.Tests.Worker
// <WORKSPACE>/workflow-repo/nested-other-repo
// <WORKSPACE>/other-repo
// <WORKSPACE>/other-repo/nested-workflow-repo
// <WORKSPACE>/workflow-repo-using-ssh
var workflowRepository = Path.Combine(workspaceDirectory, "workflow-repo");
var nestedOtherRepository = Path.Combine(workspaceDirectory, "workflow-repo", "nested-other-repo");
var otherRepository = Path.Combine(workspaceDirectory, workflowRepository, "nested-other-repo");
var nestedWorkflowRepository = Path.Combine(workspaceDirectory, "other-repo", "nested-workflow-repo");
var workflowRepositoryUsingSsh = Path.Combine(workspaceDirectory, "workflow-repo-using-ssh");
await CreateRepository(hostContext, workflowRepository, "https://github.com/my-org/workflow-repo");
await CreateRepository(hostContext, nestedOtherRepository, "https://github.com/my-org/other-repo");
await CreateRepository(hostContext, otherRepository, "https://github.com/my-org/other-repo");
await CreateRepository(hostContext, nestedWorkflowRepository, "https://github.com/my-org/workflow-repo");
await CreateRepository(hostContext, workflowRepositoryUsingSsh, "git@github.com:my-org/workflow-repo.git");
// Create test files
var file_noRepository = Path.Combine(workspaceDirectory, "no-repo.txt");
@@ -706,8 +703,7 @@ namespace GitHub.Runner.Common.Tests.Worker
var file_nestedOtherRepository = Path.Combine(nestedOtherRepository, "nested-other-repo");
var file_otherRepository = Path.Combine(otherRepository, "other-repo.txt");
var file_nestedWorkflowRepository = Path.Combine(nestedWorkflowRepository, "nested-workflow-repo.txt");
var file_workflowRepositoryUsingSsh = Path.Combine(workflowRepositoryUsingSsh, "workflow-repo-using-ssh.txt");
foreach (var file in new[] { file_noRepository, file_workflowRepository, file_workflowRepository_nestedDirectory, file_workflowRepository_failsafe, file_nestedOtherRepository, file_otherRepository, file_nestedWorkflowRepository, file_workflowRepositoryUsingSsh })
foreach (var file in new[] { file_noRepository, file_workflowRepository, file_workflowRepository_nestedDirectory, file_workflowRepository_failsafe, file_nestedOtherRepository, file_otherRepository, file_nestedWorkflowRepository })
{
Directory.CreateDirectory(Path.GetDirectoryName(file));
File.WriteAllText(file, "");
@@ -722,9 +718,8 @@ namespace GitHub.Runner.Common.Tests.Worker
Process($"{file_nestedOtherRepository}: some error 6");
Process($"{file_otherRepository}: some error 7");
Process($"{file_nestedWorkflowRepository}: some error 8");
Process($"{file_workflowRepositoryUsingSsh}: some error 9");
Assert.Equal(9, _issues.Count);
Assert.Equal(8, _issues.Count);
Assert.Equal("some error 1", _issues[0].Item1.Message);
Assert.False(_issues[0].Item1.Data.ContainsKey("file"));
@@ -749,9 +744,6 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal("some error 8", _issues[7].Item1.Message);
Assert.Equal(file_nestedWorkflowRepository.Substring(nestedWorkflowRepository.Length + 1).Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar), _issues[7].Item1.Data["file"]);
Assert.Equal("some error 9", _issues[8].Item1.Message);
Assert.Equal(file_workflowRepositoryUsingSsh.Substring(workflowRepositoryUsingSsh.Length + 1).Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar), _issues[8].Item1.Data["file"]);
}
Environment.SetEnvironmentVariable("RUNNER_TEST_GET_REPOSITORY_PATH_FAILSAFE", "");

View File

@@ -80,7 +80,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -115,7 +115,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -154,7 +154,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -208,7 +208,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -287,7 +287,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -330,7 +330,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Step.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Step.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -361,7 +361,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -391,7 +391,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -417,7 +417,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object }));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -426,11 +426,11 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded);
#if OS_WINDOWS
Assert.Equal("100", step1.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("100"));
Assert.Equal("github_actions", step1.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env2"].AssertString("github_actions"));
Assert.Equal("100", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("100"));
Assert.Equal("github_actions", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env2"].AssertString("github_actions"));
#else
Assert.Equal("100", step1.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("100"));
Assert.Equal("github_actions", step1.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env2"].AssertString("github_actions"));
Assert.Equal("100", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("100"));
Assert.Equal("github_actions", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env2"].AssertString("github_actions"));
#endif
}
}
@@ -455,7 +455,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object }));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -463,13 +463,13 @@ namespace GitHub.Runner.Common.Tests.Worker
// Assert.
Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded);
#if OS_WINDOWS
Assert.Equal("1000", step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("github_actions", step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env3"].AssertString("github_actions"));
Assert.False(step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env").ContainsKey("env2"));
Assert.Equal("1000", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("github_actions", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env3"].AssertString("github_actions"));
Assert.False(_ec.Object.ExpressionValues["env"].AssertDictionary("env").ContainsKey("env2"));
#else
Assert.Equal("1000", step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("github_actions", step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env3"].AssertString("github_actions"));
Assert.False(step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env").ContainsKey("env2"));
Assert.Equal("1000", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("github_actions", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env3"].AssertString("github_actions"));
Assert.False(_ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env").ContainsKey("env2"));
#endif
}
}
@@ -493,7 +493,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object }));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -501,11 +501,11 @@ namespace GitHub.Runner.Common.Tests.Worker
// Assert.
Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded);
#if OS_WINDOWS
Assert.Equal("1000", step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("something", step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env2"].AssertString("something"));
Assert.Equal("1000", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("something", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env2"].AssertString("something"));
#else
Assert.Equal("1000", step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("something", step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env2"].AssertString("something"));
Assert.Equal("1000", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("something", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env2"].AssertString("something"));
#endif
}
}
@@ -524,7 +524,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -560,7 +560,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -602,7 +602,7 @@ namespace GitHub.Runner.Common.Tests.Worker
stepContext.Setup(x => x.WriteDebug).Returns(true);
stepContext.Setup(x => x.Variables).Returns(_variables);
stepContext.Setup(x => x.EnvironmentVariables).Returns(_env);
stepContext.Setup(x => x.ExpressionValues).Returns(new DictionaryContextData());
stepContext.Setup(x => x.ExpressionValues).Returns(_contexts);
stepContext.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
stepContext.Setup(x => x.JobContext).Returns(_jobContext);
stepContext.Setup(x => x.StepsContext).Returns(_stepContext);

View File

@@ -1 +1 @@
2.267.0
2.169.0