Compare commits

..

77 Commits

Author SHA1 Message Date
TingluoHuang
c8f3726265 c 2020-10-19 14:46:46 -04:00
TingluoHuang
ef72239ff8 c 2020-09-12 23:40:47 -04:00
TingluoHuang
993357df7d 2 versions 2020-09-11 13:46:31 -04:00
TingluoHuang
c62ab23bdd new script 2020-09-11 00:42:28 -04:00
TingluoHuang
9d48d2be87 new scripe 2020-09-11 00:41:58 -04:00
TingluoHuang
69aa8d8984 fix node 2020-09-03 17:24:59 -04:00
TingluoHuang
7da6739eae dind 2020-09-03 01:12:46 -04:00
TingluoHuang
58afa42109 enterprise 2020-09-01 00:34:58 -04:00
TingluoHuang
3dc52b28af update dockerfile 2020-08-30 00:29:28 -04:00
TingluoHuang
993edc3172 config via pat. 2020-08-29 00:21:17 -04:00
TingluoHuang
6395efe7e0 k8s prototype. 2020-08-14 11:20:12 -04:00
Tingluo Huang
e7b0844772 Add manual trigger 2020-07-23 00:34:36 -04:00
Ethan Chiu
d5a5550649 Fix Timeout-minutes for Whole Composite Action Step (#599)
* Exploring child Linked Cancellation Tokens

* Preliminary Timeout-minutes fix

* Final Solution for resolving cancellation token's timeout vs. cancellation

* Clean up + Fix error handling

* Use linked tokens instead

* Clean up

* one liner

* Remove JobExecutionContext => Replace with public Root accessor

* Move CreateLinkedTokenSource in the CreateCompositeStep Function
2020-07-22 18:01:50 -04:00
Ethan Chiu
3d0147d322 Improve Debugging Messages for Empty Tokens (#609)
* Improve Debugging Messages for Empty Tokens

* fix tests
2020-07-22 17:34:40 -04:00
Ethan Chiu
bd1f245aac Clarify details for defaults, shell, and working-dir (#607) 2020-07-22 16:11:55 -04:00
Steven Maude
005f1c15b1 Fix "propogate" typo in ADR 0549 (#600) 2020-07-22 16:10:57 -04:00
David Kale
da3cb5506f Fold logs for intermediate docker commands (#608) 2020-07-22 14:55:49 -04:00
dependabot[bot]
32d439070b Bump lodash in /src/Misc/expressionFunc/hashFiles (#603)
Bumps [lodash](https://github.com/lodash/lodash) from 4.17.15 to 4.17.19.
- [Release notes](https://github.com/lodash/lodash/releases)
- [Commits](https://github.com/lodash/lodash/compare/4.17.15...4.17.19)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-07-20 10:21:01 -04:00
jeffrey
ec9f8f1682 dbl quotes around variable so CD works if path contains spaces (#602) 2020-07-20 10:19:37 -04:00
eric sciple
0921af735a move shared ExecutionContext properties under .Global (#594) 2020-07-19 19:05:47 -04:00
eric sciple
1cc3c08cf2 Prepare to switch GITHUB_ACTION to use ContextName instead of refname (#593)
This PR changes GITHUB_ACTION to use the step ContextName, instead of refname. The behavior is behind a feature flag. Refname is an otherwise deprecated property.

Primary motivation: For composite actions, we need a distinct GITHUB_ACTION for each nested step. This PR adds code to generate a default context name for nested steps.

For nested steps, GITHUB_ACTION will be set to "{ScopeName}.{ContextName}" to ensure no collisions.

A corresponding change will be made on the server so context name is never empty. Generated context names will start with "__".

A follow-up PR is required to avoid tracking "step" context values (outputs/conclusion/result) for generated context names. Waiting on telemetry from the server to confirm it's safe to assume leading "__" is a generate context name.
2020-07-19 17:19:13 -04:00
Ethan Chiu
f9dca15c63 Composite Run Steps Refactoring (#591)
* Add basic framework for baby steps runner

* Basic logic for adding steps / invoking composite action steps

* Composite Steps Runner MVP

* Fix null object reference error

* intialize composiute

* Comment out code that is handled by stepsrunner

* Add composite clean up step

* Remove previous 'workarounds' from StepsRunner. Clean Up PR

* Remove todo

* Remove todo

* Fix using unitialized object yikes

* Remove time delay

* Format handler

* Move output handler into action handler

* Add try to evaluate display name

* Remove while loop yikes

* Abstract away the windows encoding check during step running

* Github context set to {ScopeName}.{ContextName} or {ContextName} if ScopeName is null

* Remove setting result to sucess since result defaults to sucess

* Fix windows error

* Fix windows

* revert:

* Windows fix

* Fix Windows Error in Abstraction

* Remove Composite Steps Runner => consolidate into Composite Steps Runner

* Remove unn. attribute in ExecutionContext

* Change protection levels, plus change function name to more clear meaning

* Remove location param

* location pt.2 fix

* Remove outputs step

* Remove temp directory

* new line

* Add arguitl not null

* better comment

* Change encoding name

* Check count > 0 for composite steps, import System.Threading

* Change function header encodingutil

* Add TODO

* Add await

* Handle Failed Step

* Move over SetAllCompositeOutputs to the handler

* Remove timeout-minutes setting in steps-level

* Use only ExecutionContext

* Move using to the top

* Remove redundant check

* Change function name

* Remove testing code

* Consolidate error code

* Consolidate code

* Change HandleOutput => ProcessCompositeActionOutputs

* Remove set the timeout comment

* Add Cancelling functionality + Remove unn. parameter
2020-07-17 16:31:48 -04:00
eric sciple
0877d9a533 Update StringUtil.cs 2020-07-16 10:30:42 -04:00
eric sciple
d5e40c6a60 Update 0549-composite-run-steps.md 2020-07-15 20:00:45 -04:00
eric sciple
391bc35bb9 Update 0549-composite-run-steps.md 2020-07-15 19:59:13 -04:00
eric sciple
e4267b8434 Update 0549-composite-run-steps.md 2020-07-15 19:57:22 -04:00
TingluoHuang
2709cbc0ea rename master to main. 2020-07-14 13:37:20 -04:00
Ethan Chiu
5e0cde8649 Composite Actions UI (#578)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Initial Start for FileTable stuff

* Progress towards passing FileTable or FileID or FileName

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Pass fileID to all children token of root action token

* Change confusing term context => templateContext, Eliminate _fileTable and only use ExecutionContext.FileTable + update this table when need be

* Remove unnessary FileID attribute from CompositeActionExecutionData

* Clean up file path for error message

* Remove todo

* Initial start/framework for output handling

* Outline different class vs Handler approach

* Remove InitializeScope

* Remove InitializeScope

* Fix Workflow Step Env overiding Parent Env

* First Approach for Attaching ID + Group ID to each Composite Action Step

* Add GroupID to the ActionDefinitionData

* starting foundation for handling clean up outputs step

* Pass outputs data to each composite action step to enable set-output functionality

* Create ScopeName for whole composite action.

This will enable us to add to the StepsContext[ScopeName] for the composite action which will allow us to use all these outputs in the cleanup step

* Hook up composite output step to handler => tmmrw implement composite output handler

* Add post composite action step to cleanup outputs => triggers composite output cleanup handler

* Fix Outputs Token handling start. Add individual step scope names.

* Set up Scope Name and Context Name naming system{

* Figured out how to pass Parent Execution Context to clean up step

* Figured out how to pass Parent Execution Context and scope names to
clean up step

* Add GetOutput function for StepsContext

* Generate child scope name correctly if parent scope name is null

* Simplify InitializeScope()

* Outputs are set correctly and able to get all final outputs in handler

* Parse through Action Outputs

* Fix null ScopeName + ContextName in CompositeOutputHandler

* Shift over handling of Action Outputs to output handler

* First attempt to fix null retrievals for output variables

* Basic Support for Outputs Done.

* Clean up pt.1

* Refactor outputs to avoid using Action Reference

* Clean up code

* Clean up part 2

* Add clarifying comments for the output handler

* Remove TODO

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Add file length check

* Clean up

* Fix logging issue

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error

* Fix period

* Sanity check for fileTable add + remove unn. code

* revert back

* Add back line break

* Fix null errors

* Address situation if FileTable is null + add sanity check for adding file to fileTable

* add line

* Revert

* Fix unit tests to instantiate a FileTable

* Fix logic for trimming manifestfile path

* Add null check

* Revert

* Revert

* revert

* spacing

* Add filetable to testing file, remove ? since we know filetable should never be non null

* Fix Throw logic

* Clarify template outputs token

* Add another type support for outputs to avoid container unit tests errors

* Add mapping for parity

* Build support for new outputs format

* Build support for new outputs format

* Refactor to avoid duplication of action yaml for workflow yaml

* revert

* revert

* revert

* spacing
2020-07-13 17:55:15 -04:00
Ethan Chiu
cb2b323781 Composite Run Steps Outputs (#568)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Initial Start for FileTable stuff

* Progress towards passing FileTable or FileID or FileName

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Pass fileID to all children token of root action token

* Change confusing term context => templateContext, Eliminate _fileTable and only use ExecutionContext.FileTable + update this table when need be

* Remove unnessary FileID attribute from CompositeActionExecutionData

* Clean up file path for error message

* Remove todo

* Initial start/framework for output handling

* Outline different class vs Handler approach

* Remove InitializeScope

* Remove InitializeScope

* Fix Workflow Step Env overiding Parent Env

* First Approach for Attaching ID + Group ID to each Composite Action Step

* Add GroupID to the ActionDefinitionData

* starting foundation for handling clean up outputs step

* Pass outputs data to each composite action step to enable set-output functionality

* Create ScopeName for whole composite action.

This will enable us to add to the StepsContext[ScopeName] for the composite action which will allow us to use all these outputs in the cleanup step

* Hook up composite output step to handler => tmmrw implement composite output handler

* Add post composite action step to cleanup outputs => triggers composite output cleanup handler

* Fix Outputs Token handling start. Add individual step scope names.

* Set up Scope Name and Context Name naming system{

* Figured out how to pass Parent Execution Context to clean up step

* Figured out how to pass Parent Execution Context and scope names to
clean up step

* Add GetOutput function for StepsContext

* Generate child scope name correctly if parent scope name is null

* Simplify InitializeScope()

* Outputs are set correctly and able to get all final outputs in handler

* Parse through Action Outputs

* Fix null ScopeName + ContextName in CompositeOutputHandler

* Shift over handling of Action Outputs to output handler

* First attempt to fix null retrievals for output variables

* Basic Support for Outputs Done.

* Clean up pt.1

* Refactor outputs to avoid using Action Reference

* Clean up code

* Clean up part 2

* Add clarifying comments for the output handler

* Remove TODO

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Add file length check

* Clean up

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error

* Fix period

* Sanity check for fileTable add + remove unn. code

* revert back

* Add back line break

* Fix null errors

* Address situation if FileTable is null + add sanity check for adding file to fileTable

* add line

* Revert

* Fix unit tests to instantiate a FileTable

* Fix logic for trimming manifestfile path

* Add null check

* Revert

* Revert

* revert

* spacing

* Add filetable to testing file, remove ? since we know filetable should never be non null

* Fix Throw logic

* Clarify template outputs token

* Add another type support for outputs to avoid container unit tests errors

* Add mapping for parity

* Build support for new outputs format

* Refactor to avoid duplication of action yaml for workflow yaml

* Move SDK work in ActionManifestManager, Condense Code

* Defer runs evaluation till after for loop to ensure order doesn't matter

* Fix logic error in setting scope and context names

* Add Regex + Add Child Context name null resolution

* move private function to bottom of class
2020-07-13 17:23:19 -04:00
Ethan Chiu
6c3958f365 Composite Run Steps ADR (#554)
* start

* Inputs + Outputs

* Clarify docs

* Finish Environment

* Add if condition

* Clarify language

* Update 0549-composite-run-steps.md

* timeout-minutes

* Finish

* add relevant example

* Fix syntax

* fix env example

* fix yaml syntax

* Update 0549-composite-run-steps.md

* Update file names, add more relevant example if condition

* Add note to continue-on-error

* Apply changes to If Condition

* bolding

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Syntax support + spacing

* Add guiding principles.

* Update 0549-composite-run-steps.md

* Reverse order.

* Update 0549-composite-run-steps.md

* change from job to step

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Add Secrets

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Fix output example

* Fix output example

* Fix action examples to use using.

* fix output variable name

* update workingDir + env

* Defaults + continue-on-error

* Update Outputs Section

* Eliminate Env

* Secrets

* Update timeout-minutes

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Fix example.

* Remove TODOs

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md
2020-07-13 12:30:31 -04:00
Ethan Chiu
9d7bd4706b Improve Error Messaging for Actions by Using ExecutionContext's FileTable as Single Source of Truth and by Passing FileID to All Children Tokens. (#564)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Initial Start for FileTable stuff

* Progress towards passing FileTable or FileID or FileName

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Pass fileID to all children token of root action token

* Change confusing term context => templateContext, Eliminate _fileTable and only use ExecutionContext.FileTable + update this table when need be

* Remove unnessary FileID attribute from CompositeActionExecutionData

* Clean up file path for error message

* Remove todo

* Fix Workflow Step Env overiding Parent Env

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Add file length check

* Clean up

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error

* Fix period

* Sanity check for fileTable add + remove unn. code

* revert back

* Add back line break

* Fix null errors

* Address situation if FileTable is null + add sanity check for adding file to fileTable

* add line

* Revert

* Fix unit tests to instantiate a FileTable

* Fix logic for trimming manifestfile path

* Add null check

* Add filetable to testing file, remove ? since we know filetable should never be non null
2020-07-08 17:15:16 -04:00
Ethan Chiu
5822a38c39 Add bash command for running custom runner (#569) 2020-07-08 11:20:38 -04:00
Ethan Chiu
d42c9da2d7 Composite Actions: Support Env Flow (#557)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Fix Workflow Step Env overiding Parent Env

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error
2020-07-08 10:16:51 -04:00
eric sciple
121deedeb5 Fix trailing '.0' for Int64 values (#572) 2020-06-30 17:25:47 -04:00
Ethan Chiu
a0942ed345 Composite Actions Support for Multiple Run Steps (#549)
* Composite Action Run Steps

* Clean up trace messages + add Trace debug in ActionManager

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Add verbose trace logs which are only viewable by devs

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps
2020-06-23 15:35:32 -04:00
TingluoHuang
7cef9a27ca release 2.267.0 runner. 2020-06-23 14:05:28 -04:00
Tingluo Huang
df7e16954e print runner and machine name to log. (#539) 2020-06-23 13:57:37 -04:00
eric sciple
4e7d27a53c remove temporary logic when resolving action download info (#550) 2020-06-15 13:13:47 -04:00
Lokesh Gopu
89d1418e48 Update exception message (#540) 2020-06-11 17:25:50 -04:00
Tingluo Huang
e728b8594d fix race condition. (#538) 2020-06-11 16:17:24 -04:00
Tingluo Huang
de4490d06d Restore SELinux context on service file when SELinux is enabled (#525) 2020-06-11 15:40:09 -04:00
Tingluo Huang
2e800f857e Skip search $PATH on command with fully qualified path (#526) 2020-06-11 13:52:42 -04:00
Tingluo Huang
312c7668a8 Fix DataContract with Token service (#532) 2020-06-11 12:11:35 -04:00
Tingluo Huang
eaf39bb058 add libicu66 for Ubuntu 20.04 (#535) 2020-06-11 12:11:13 -04:00
eric sciple
5815819f24 Resolve action download info (#515) 2020-06-09 08:53:28 -04:00
Ethan Chiu
1aea046932 Add substep for developer flow for clarity (#523) 2020-06-08 10:47:58 -04:00
Ethan Chiu
eda463601c Update Links and Language to Git + VSCode (#522) 2020-06-08 10:19:17 -04:00
Nick Fields
f994ae0542 Reduce input validation warnings (#506)
* Only raise a single warning for unexpected inputs

* Update invalid input test to raise single warning
2020-06-05 23:09:14 -04:00
Tingluo Huang
3c5aef791c Fix null ref exception in SecretMasker caused by hashfiles timeout. (#516) 2020-06-05 23:02:10 -04:00
Tingluo Huang
c4626d0c3a Remove SPS/Token migration code. Remove GHES url manipulate code. (#513)
* Remove SPS/Token migration code. Remove GHES url manipulate code.

* feedback.
2020-06-03 23:24:53 -04:00
eric sciple
416a7ac4b8 prepare to switch to service resolves archive download info (#508) 2020-06-02 17:21:50 -04:00
TingluoHuang
11435857e4 prepare 2.263.0 runner release. 2020-05-21 15:49:10 -04:00
Tingluo Huang
6f260012a3 Fix inputs validation warning, fix post step display name, fix worker crash due to error in step.env (#490) 2020-05-21 11:09:50 -04:00
eric sciple
4fc87ddfc6 fix problem matcher for GHES (#488) 2020-05-19 16:15:03 -04:00
eric sciple
b45c1b9440 switch GITHUB_URL to GITHUB_SERVER_URL (#482) 2020-05-18 13:02:30 -04:00
Quan TRAN
73307c0a30 jq returns "null" if the field does not exist (#478) 2020-05-14 11:09:20 -04:00
Tingluo Huang
cd8e4ddba1 Validate inputs only for repo action, no warning for small delay. (#476)
* validate inputs only for repo action, no warning for small delay.

* l0
2020-05-12 16:09:13 -04:00
Tingluo Huang
abf59bdcb6 Fix configure as service with runner name has space. (#474) 2020-05-12 16:08:50 -04:00
Brian Cristante
09cf59c1e0 Use an env var to point to an Actions Service dev instance (#468)
* Use an environment variable for the testing backdoor

* Make the env var a boolean

We'll still have to pass the URL on the command line

* Empty commit to rerun CI
2020-05-11 17:14:02 -04:00
TingluoHuang
7a65236022 prepare 2.262.0 runner release. 2020-05-11 15:05:59 -04:00
David Kale
462b5117c8 docker build using -f instead of implied default (#471)
* pass -f to docker build

* Wrong place

* build path

* Also pass docker context path

* Tidy up format

* PR Feedback
2020-05-11 13:57:31 -04:00
Tingluo Huang
6922f3cb86 sps/token migration tweak, ActionResult casing. (#462) 2020-05-11 12:36:35 -04:00
Tingluo Huang
911135e66c add help info for '--labels' (#472) 2020-05-11 12:36:16 -04:00
PJ Quirk
01c9a8a8af Remove community actions munging and add dotcom fallback for downloading actions (#469)
* Fallback to dotcom rather than munged community actions

* Encapsulate the link and the auth details

* Rename the method to be clearer

* Remove BOM
2020-05-11 12:23:02 -04:00
eric sciple
33d2d2c328 update checkout@v1 for GHES (#470) 2020-05-11 11:41:16 -04:00
Justin Hutchings
a246b3b29d Add CodeQL Analysis workflow (#459)
* Add CodeQL Analysis workflow

* Fix path

* Add manual build step

Import manual build step from build.yml
2020-05-07 11:17:34 -04:00
Brian Cristante
c7768d4a7b Adapt create-latest-svc.sh to work with GHES (#452)
* Add README

* Adapt create-latest-svc to work with GHES

* Fix inverted conditions
2020-04-27 23:53:53 -04:00
Tingluo Huang
70729fb3c4 Help trace worker crash in Kusto. (#450)
* Help trace worker crash in Kusto.

* more

* feedback.
2020-04-27 23:44:17 -04:00
Tingluo Huang
1470a3b6e2 better error when runner removed from service. (#441) 2020-04-27 23:02:00 -04:00
eric sciple
2fadf430e4 post-alpha fixes for github.url github.api_url and github.graphql_url (#451) 2020-04-24 13:38:59 -04:00
PJ Quirk
f798f5606b Use the API_URL and munge action URLs for GHES (#437)
* First pass at logic for GHES, not all correct

* Need to mock out file downloading

* Allowed for mocking of HTTP responses

* Added test for builtin GHES action download

* More tests

* Don't retry on action 404

* Remove commented out code

* Add a using statement back, because Windows

* Make windows happy again

* Another windows fix

* Always delete the cache since it isn't fully implemented

* Use RunnerService base class

* Add examples, update URL path

* Remove forceDotCom

* Fix a bug

* Remove a test that's no longer relevant

* PR feedback

* Add missing return

* More trace info

* Use the new agreed-upon format

* Use the auth token since we're hitting GHES directly

* Fixing tests on windows

* Fixed one more test
2020-04-23 17:02:13 -04:00
Tingluo Huang
3f7a01af93 add secret masker for trimming double qoutes. (#440) 2020-04-21 22:07:55 -04:00
Tingluo Huang
d5c54f9819 Raise warning when action input does not match action.yml. (#429) 2020-04-21 19:42:53 -04:00
Mathias LANG
9f78ad3b34 Make release notes code blocks copy-paste-able (#430)
The release notes used C-style comments (`//`) instead of shell-style (`#`),
causing the code snippet to not be easily copy-paste-able.

Additionally, the code fence for Windows had the extra `powershell` added,
as well as a comment to direct users towards `powershell` over `cmd`.
2020-04-19 22:11:44 -04:00
Jan Pazdziora
97883c8cd5 Fix spelling of RHEL and CentOS. (#436) 2020-04-19 16:53:06 -04:00
Tingluo Huang
c5fa9fb062 Print node version in debug instead of output. (#433) 2020-04-17 11:53:51 -04:00
Bryan MacFarlane
b2dcdc21dc Sample scripts to automate scaleable runners (#427) 2020-04-17 11:08:45 -04:00
119 changed files with 6211 additions and 2817 deletions

View File

@@ -1,9 +1,10 @@
name: Runner CI
on:
workflow_dispatch:
push:
branches:
- master
- main
- releases/*
paths-ignore:
- '**.md'
@@ -17,28 +18,12 @@ jobs:
build:
strategy:
matrix:
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
runtime: [ linux-x64 ]
include:
- runtime: linux-x64
os: ubuntu-latest
devScript: ./dev.sh
- runtime: linux-arm64
os: ubuntu-latest
devScript: ./dev.sh
- runtime: linux-arm
os: ubuntu-latest
devScript: ./dev.sh
- runtime: osx-x64
os: macOS-latest
devScript: ./dev.sh
- runtime: win-x64
os: windows-latest
devScript: ./dev
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v1
@@ -49,13 +34,6 @@ jobs:
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
working-directory: src
# Run tests
- name: L0
run: |
${{ matrix.devScript }} test
working-directory: src
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
# Create runner package tar.gz/zip
- name: Package Release
if: github.event_name != 'pull_request'
@@ -70,3 +48,18 @@ jobs:
with:
name: runner-package-${{ matrix.runtime }}
path: _package
- name: Build old version
run: |
echo 2.270.0 > runnerversion
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
${{ matrix.devScript }} package Release
working-directory: src
# Upload runner package tar.gz/zip as artifact
- name: Publish Artifact old
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v1
with:
name: runner-package-${{ matrix.runtime }}-old
path: _package

35
.github/workflows/codeql.yml vendored Normal file
View File

@@ -0,0 +1,35 @@
name: "Code Scanning - Action"
on:
push:
schedule:
- cron: '0 0 * * 0'
jobs:
CodeQL-Build:
strategy:
fail-fast: false
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
- name: Manual build
run : |
./dev.sh layout Release linux-x64
working-directory: src
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@@ -7,7 +7,7 @@ on:
jobs:
check:
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/master'
if: startsWith(github.ref, 'refs/heads/releases/') || github.ref == 'refs/heads/main'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2

57
Dockerfile Normal file
View File

@@ -0,0 +1,57 @@
FROM mcr.microsoft.com/dotnet/core/runtime-deps:3.1-buster-slim
ENV RUNNER_CONFIG_URL=""
ENV GITHUB_PAT=""
ENV RUNNER_NAME=""
ENV RUNNER_GROUP=""
ENV RUNNER_LABELS=""
# ENV GITHUB_RUNNER_SCOPE=""
# ENV GITHUB_SERVER_URL=""
# ENV GITHUB_API_URL=""
# ENV K8S_HOST_IP=""
RUN apt-get update --fix-missing \
&& apt-get install -y --no-install-recommends \
curl \
jq \
apt-utils \
apt-transport-https \
unzip \
net-tools\
gnupg2\
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Install kubectl
RUN curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \
echo "deb https://apt.kubernetes.io/ kubernetes-xenial main" | tee -a /etc/apt/sources.list.d/kubernetes.list && \
apt-get update && apt-get -y install --no-install-recommends kubectl
# Install docker
RUN curl -fsSL https://get.docker.com -o get-docker.sh
RUN sh get-docker.sh
# Allow runner to run as root
ENV RUNNER_ALLOW_RUNASROOT=1
# Directory for runner to operate in
RUN mkdir /actions-runner
WORKDIR /actions-runner
COPY ./src/Misc/download-runner.sh /actions-runner/download-runner.sh
COPY ./src/Misc/entrypoint.sh /actions-runner/entrypoint.sh
# COPY ./src/Misc/jobstart.sh /actions-runner/jobstart.sh
# COPY ./src/Misc/jobrunning.sh /actions-runner/jobrunning.sh
# COPY ./src/Misc/jobcomplete.sh /actions-runner/jobcomplete.sh
COPY ./src/Misc/runner_lifecycle.sh /actions-runner/runner_lifecycle.sh
RUN /actions-runner/download-runner.sh
RUN rm -f /actions-runner/download-runner.sh
# ENV _INTERNAL_JOBSTART_NOTIFICATION=/actions-runner/jobstart.sh
# ENV _INTERNAL_JOBRUNNING_NOTIFICATION=/actions-runner/jobrunning.sh
# ENV _INTERNAL_JOBCOMPLETE_NOTIFICATION=/actions-runner/jobcomplete.sh
ENV _INTERNAL_RUNNER_LIFECYCLE_NOTIFICATION=/actions-runner/runner_lifecycle.sh
ENTRYPOINT ["./entrypoint.sh"]

46
Dockerfile.dind Normal file
View File

@@ -0,0 +1,46 @@
FROM docker:19.03
# https://github.com/docker/docker/blob/master/project/PACKAGERS.md#runtime-dependencies
RUN set -eux; \
apk add --no-cache \
btrfs-progs \
e2fsprogs \
e2fsprogs-extra \
iptables \
openssl \
shadow-uidmap \
xfsprogs \
xz \
# pigz: https://github.com/moby/moby/pull/35697 (faster gzip implementation)
pigz \
; \
# only install zfs if it's available for the current architecture
# https://git.alpinelinux.org/cgit/aports/tree/main/zfs/APKBUILD?h=3.6-stable#n9 ("all !armhf !ppc64le" as of 2017-11-01)
# "apk info XYZ" exits with a zero exit code but no output when the package exists but not for this arch
if zfs="$(apk info --no-cache --quiet zfs)" && [ -n "$zfs" ]; then \
apk add --no-cache zfs; \
fi
# TODO aufs-tools
# set up subuid/subgid so that "--userns-remap=default" works out-of-the-box
RUN set -x \
&& addgroup -S dockremap \
&& adduser -S -G dockremap dockremap \
&& echo 'dockremap:165536:65536' >> /etc/subuid \
&& echo 'dockremap:165536:65536' >> /etc/subgid
# https://github.com/docker/docker/tree/master/hack/dind
ENV DIND_COMMIT ed89041433a031cafc0a0f19cfe573c31688d377
RUN set -eux; \
wget -O /usr/local/bin/dind "https://raw.githubusercontent.com/docker/docker/${DIND_COMMIT}/hack/dind"; \
chmod +x /usr/local/bin/dind
COPY dockerd-entrypoint.sh /usr/local/bin/
VOLUME /var/lib/docker
EXPOSE 6788 6789
ENTRYPOINT ["dockerd-entrypoint.sh"]
CMD []

14
autoscalev0.yaml Normal file
View File

@@ -0,0 +1,14 @@
apiVersion: actions.summerwind.dev/v1alpha1
kind: RunnerDeployment
metadata:
name: auto-scale-runners
spec:
replicas: 1
maxRunnerLimit: 5
template:
spec:
configURL: https://github.com/bbq-beets/ting-test
githubTokenSecretKeyRef:
name: githubtoken
key: GITHUB_PAT

63
deployment.yaml Normal file
View File

@@ -0,0 +1,63 @@
apiVersion: v1
kind: Pod
metadata:
name: runner-pod
labels:
name: runner-pod
spec:
containers:
- name: runner-pod
image: huangtingluo/autoscale-runner:v0.0
imagePullPolicy: Always
env:
- name: GITHUB_PAT
value: 62c13e14e947958516c103a9584f66227697c447
- name: GITHUB_RUNNER_SCOPE
value: monalisa/main123
- name: K8S_HOST_IP
value: "192.168.120.1"
# apiVersion: apps/v1
# kind: Deployment
# metadata:
# name: runner-deployment
# spec:
# replicas: 1
# selector:
# matchLabels:
# app: runners
# template:
# metadata:
# labels:
# app: runners
# spec:
# # hostNetwork: true
# # volumes:
# # - name: docker-storage
# # emptyDir: {}
# # containers:
# # - name: docker-host
# # image: docker:18.05-dind
# # imagePullPolicy: Always
# # securityContext:
# # privileged: true
# # volumeMounts:
# # - name: docker-storage
# # mountPath: /var/lib/docker
# # hostNetwork: true
# containers:
# - name: runner
# image: huangtingluo/autoscale-runner:v0.0
# imagePullPolicy: Always
# env:
# - name: GITHUB_PAT
# value: 62c13e14e947958516c103a9584f66227697c447
# - name: GITHUB_RUNNER_SCOPE
# value: monalisa/main123
# - name: K8S_HOST_IP
# value: "192.168.120.1"
# resources:
# limits:
# memory: "128Mi"
# cpu: "500m"

186
dockerd-entrypoint.sh Executable file
View File

@@ -0,0 +1,186 @@
#!/bin/sh
set -eu
_tls_ensure_private() {
local f="$1"; shift
[ -s "$f" ] || openssl genrsa -out "$f" 4096
}
_tls_san() {
{
ip -oneline address | awk '{ gsub(/\/.+$/, "", $4); print "IP:" $4 }'
{
cat /etc/hostname
echo 'docker'
echo 'localhost'
hostname -f
hostname -s
} | sed 's/^/DNS:/'
[ -z "${DOCKER_TLS_SAN:-}" ] || echo "$DOCKER_TLS_SAN"
} | sort -u | xargs printf '%s,' | sed "s/,\$//"
}
_tls_generate_certs() {
local dir="$1"; shift
# if ca/key.pem || !ca/cert.pem, generate CA public if necessary
# if ca/key.pem, generate server public
# if ca/key.pem, generate client public
# (regenerating public certs every startup to account for SAN/IP changes and/or expiration)
# https://github.com/FiloSottile/mkcert/issues/174
local certValidDays='825'
if [ -s "$dir/ca/key.pem" ] || [ ! -s "$dir/ca/cert.pem" ]; then
# if we either have a CA private key or do *not* have a CA public key, then we should create/manage the CA
mkdir -p "$dir/ca"
_tls_ensure_private "$dir/ca/key.pem"
openssl req -new -key "$dir/ca/key.pem" \
-out "$dir/ca/cert.pem" \
-subj '/CN=docker:dind CA' -x509 -days "$certValidDays"
fi
if [ -s "$dir/ca/key.pem" ]; then
# if we have a CA private key, we should create/manage a server key
mkdir -p "$dir/server"
_tls_ensure_private "$dir/server/key.pem"
openssl req -new -key "$dir/server/key.pem" \
-out "$dir/server/csr.pem" \
-subj '/CN=docker:dind server'
cat > "$dir/server/openssl.cnf" <<-EOF
[ x509_exts ]
subjectAltName = $(_tls_san)
EOF
openssl x509 -req \
-in "$dir/server/csr.pem" \
-CA "$dir/ca/cert.pem" \
-CAkey "$dir/ca/key.pem" \
-CAcreateserial \
-out "$dir/server/cert.pem" \
-days "$certValidDays" \
-extfile "$dir/server/openssl.cnf" \
-extensions x509_exts
cp "$dir/ca/cert.pem" "$dir/server/ca.pem"
openssl verify -CAfile "$dir/server/ca.pem" "$dir/server/cert.pem"
fi
if [ -s "$dir/ca/key.pem" ]; then
# if we have a CA private key, we should create/manage a client key
mkdir -p "$dir/client"
_tls_ensure_private "$dir/client/key.pem"
chmod 0644 "$dir/client/key.pem" # openssl defaults to 0600 for the private key, but this one needs to be shared with arbitrary client contexts
openssl req -new \
-key "$dir/client/key.pem" \
-out "$dir/client/csr.pem" \
-subj '/CN=docker:dind client'
cat > "$dir/client/openssl.cnf" <<-'EOF'
[ x509_exts ]
extendedKeyUsage = clientAuth
EOF
openssl x509 -req \
-in "$dir/client/csr.pem" \
-CA "$dir/ca/cert.pem" \
-CAkey "$dir/ca/key.pem" \
-CAcreateserial \
-out "$dir/client/cert.pem" \
-days "$certValidDays" \
-extfile "$dir/client/openssl.cnf" \
-extensions x509_exts
cp "$dir/ca/cert.pem" "$dir/client/ca.pem"
openssl verify -CAfile "$dir/client/ca.pem" "$dir/client/cert.pem"
fi
}
# no arguments passed
# or first arg is `-f` or `--some-option`
if [ "$#" -eq 0 ] || [ "${1#-}" != "$1" ]; then
# set "dockerSocket" to the default "--host" *unix socket* value (for both standard or rootless)
uid="$(id -u)"
if [ "$uid" = '0' ]; then
dockerSocket='unix:///var/run/docker.sock'
else
# if we're not root, we must be trying to run rootless
: "${XDG_RUNTIME_DIR:=/run/user/$uid}"
dockerSocket="unix://$XDG_RUNTIME_DIR/docker.sock"
fi
case "${DOCKER_HOST:-}" in
unix://*)
dockerSocket="$DOCKER_HOST"
;;
esac
# add our default arguments
if [ -n "${DOCKER_TLS_CERTDIR:-}" ] \
&& _tls_generate_certs "$DOCKER_TLS_CERTDIR" \
&& [ -s "$DOCKER_TLS_CERTDIR/server/ca.pem" ] \
&& [ -s "$DOCKER_TLS_CERTDIR/server/cert.pem" ] \
&& [ -s "$DOCKER_TLS_CERTDIR/server/key.pem" ] \
; then
# generate certs and use TLS if requested/possible (default in 19.03+)
set -- dockerd \
--host="$dockerSocket" \
--host=tcp://0.0.0.0:6789 \
--tlsverify \
--tlscacert "$DOCKER_TLS_CERTDIR/server/ca.pem" \
--tlscert "$DOCKER_TLS_CERTDIR/server/cert.pem" \
--tlskey "$DOCKER_TLS_CERTDIR/server/key.pem" \
"$@"
DOCKERD_ROOTLESS_ROOTLESSKIT_FLAGS="${DOCKERD_ROOTLESS_ROOTLESSKIT_FLAGS:-} -p 0.0.0.0:6789:6789/tcp"
else
# TLS disabled (-e DOCKER_TLS_CERTDIR='') or missing certs
set -- dockerd \
--host="$dockerSocket" \
--host=tcp://0.0.0.0:6788 \
"$@"
DOCKERD_ROOTLESS_ROOTLESSKIT_FLAGS="${DOCKERD_ROOTLESS_ROOTLESSKIT_FLAGS:-} -p 0.0.0.0:6788:6788/tcp"
fi
fi
if [ "$1" = 'dockerd' ]; then
# explicitly remove Docker's default PID file to ensure that it can start properly if it was stopped uncleanly (and thus didn't clean up the PID file)
find /run /var/run -iname 'docker*.pid' -delete || :
uid="$(id -u)"
if [ "$uid" != '0' ]; then
# if we're not root, we must be trying to run rootless
if ! command -v rootlesskit > /dev/null; then
echo >&2 "error: attempting to run rootless dockerd but missing 'rootlesskit' (perhaps the 'docker:dind-rootless' image variant is intended?)"
exit 1
fi
user="$(id -un 2>/dev/null || :)"
if ! grep -qE "^($uid${user:+|$user}):" /etc/subuid || ! grep -qE "^($uid${user:+|$user}):" /etc/subgid; then
echo >&2 "error: attempting to run rootless dockerd but missing necessary entries in /etc/subuid and/or /etc/subgid for $uid"
exit 1
fi
: "${XDG_RUNTIME_DIR:=/run/user/$uid}"
export XDG_RUNTIME_DIR
if ! mkdir -p "$XDG_RUNTIME_DIR" || [ ! -w "$XDG_RUNTIME_DIR" ] || ! mkdir -p "$HOME/.local/share/docker" || [ ! -w "$HOME/.local/share/docker" ]; then
echo >&2 "error: attempting to run rootless dockerd but need writable HOME ($HOME) and XDG_RUNTIME_DIR ($XDG_RUNTIME_DIR) for user $uid"
exit 1
fi
if [ -f /proc/sys/kernel/unprivileged_userns_clone ] && unprivClone="$(cat /proc/sys/kernel/unprivileged_userns_clone)" && [ "$unprivClone" != '1' ]; then
echo >&2 "error: attempting to run rootless dockerd but need 'kernel.unprivileged_userns_clone' (/proc/sys/kernel/unprivileged_userns_clone) set to 1"
exit 1
fi
if [ -f /proc/sys/user/max_user_namespaces ] && maxUserns="$(cat /proc/sys/user/max_user_namespaces)" && [ "$maxUserns" = '0' ]; then
echo >&2 "error: attempting to run rootless dockerd but need 'user.max_user_namespaces' (/proc/sys/user/max_user_namespaces) set to a sufficiently large value"
exit 1
fi
# TODO overlay support detection?
exec rootlesskit \
--net="${DOCKERD_ROOTLESS_ROOTLESSKIT_NET:-vpnkit}" \
--mtu="${DOCKERD_ROOTLESS_ROOTLESSKIT_MTU:-1500}" \
--disable-host-loopback \
--port-driver=builtin \
--copy-up=/etc \
--copy-up=/run \
${DOCKERD_ROOTLESS_ROOTLESSKIT_FLAGS:-} \
"$@"
elif [ -x '/usr/local/bin/dind' ]; then
# if we have the (mostly defunct now) Docker-in-Docker wrapper script, use it
set -- '/usr/local/bin/dind' "$@"
fi
else
# if it isn't `dockerd` we're trying to run, pass it through `docker-entrypoint.sh` so it gets `DOCKER_HOST` set appropriately too
set -- docker-entrypoint.sh "$@"
fi
exec "$@"

View File

@@ -0,0 +1,293 @@
# ADR 0549: Composite Run Steps
**Date**: 2020-06-17
**Status**: Accepted
## Context
Customers want to be able to compose actions from actions (ex: https://github.com/actions/runner/issues/438)
An important step towards meeting this goal is to build in functionality for actions where users can simply execute any number of steps.
### Guiding Principles
We don't want the workflow author to need to know how the internal workings of the action work. Users shouldn't know the internal workings of the composite action (for example, `default.shell` and `default.workingDir` should not be inherited from the workflow file to the action file). When deciding how to design certain parts of composite run steps, we want to think one logical step from the consumer.
A composite action is treated as **one** individual job step (this is known as encapsulation).
## Decision
**In this ADR, we only support running multiple run steps in an Action.** In doing so, we build in support for mapping and flowing the inputs, outputs, and env variables (ex: All nested steps should have access to its parents' input variables and nested steps can overwrite the input variables).
### Steps
Example `workflow.yml`
```yaml
jobs:
build:
runs-on: self-hosted
steps:
- id: step1
uses: actions/setup-python@v1
- id: step2
uses: actions/setup-node@v2
- uses: actions/checkout@v2
- uses: user/composite@v1
- name: workflow step 1
run: echo hello world 3
- name: workflow step 2
run: echo hello world 4
```
Example `user/composite/action.yml`
```yaml
runs:
using: "composite"
steps:
- run: pip install -r requirements.txt
- run: npm install
```
Example Output
```yaml
[npm installation output]
[pip requirements output]
echo hello world 3
echo hello world 4
```
We add a token called "composite" which allows our Runner code to process composite actions. By invoking "using: composite", our Runner code then processes the "steps" attribute, converts this template code to a list of steps, and finally runs each run step sequentially. If any step fails and there are no `if` conditions defined, the whole composite action job fails.
### Inputs
Example `workflow.yml`:
```yaml
steps:
- id: foo
uses: user/composite@v1
with:
your_name: "Octocat"
```
Example `user/composite/action.yml`:
```yaml
inputs:
your_name:
description: 'Your name'
default: 'Ethan'
runs:
using: "composite"
steps:
- run: echo hello ${{ inputs.your_name }}
```
Example Output:
```
hello Octocat
```
Each input variable in the composite action is only viewable in its own scope.
### Outputs
Example `workflow.yml`:
```yaml
...
steps:
- id: foo
uses: user/composite@v1
- run: echo random-number ${{ steps.foo.outputs.random-number }}
```
Example `user/composite/action.yml`:
```yaml
outputs:
random-number:
description: "Random number"
value: ${{ steps.random-number-generator.outputs.random-id }}
runs:
using: "composite"
steps:
- id: random-number-generator
run: echo "::set-output name=random-number::$(echo $RANDOM)"
```
Example Output:
```
::set-output name=my-output::43243
random-number 43243
```
Each of the output variables from the composite action is viewable from the workflow file that uses the composite action. In other words, every child action output(s) is viewable only by its parent using dot notation (ex `steps.foo.outputs.random-number`).
Moreover, the output ids are only accessible within the scope where it was defined. Note that in the example above, in our `workflow.yml` file, it should not have access to output id (i.e. `random-id`). The reason why we are doing this is because we don't want to require the workflow author to know the internal workings of the composite action.
### Context
Similar to the workflow file, the composite action has access to the [same context objects](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#contexts) (ex: `github`, `env`, `strategy`).
### Environment
In the Composite Action, you'll only be able to use `::set-env::` to set environment variables just like you could with other actions.
### Secrets
**Note** : This feature will be focused on in a future ADR.
We'll pass the secrets from the composite action's parents (ex: the workflow file) to the composite action. Secrets can be created in the composite action with the secrets context. In the actions yaml, we'll automatically mask the secret.
### If Condition
Example `workflow.yml`:
```yaml
steps:
- run: exit 1
- uses: user/composite@v1 # <--- this will run, as it's marked as always runing
if: always()
```
Example `user/composite/action.yml`:
```yaml
runs:
using: "composite"
steps:
- run: echo "just succeeding"
- run: echo "I will run, as my current scope is succeeding"
if: success()
- run: exit 1
- run: echo "I will not run, as my current scope is now failing"
```
See the paragraph below for a rudimentary approach (thank you to @cybojenix for the idea, example, and explanation for this approach):
The `if` statement in the parent (in the example above, this is the `workflow.yml`) shows whether or not we should run the composite action. So, our composite action will run since the `if` condition for running the composite action is `always()`.
**Note that the if condition on the parent does not propagate to the rest of its children though.**
In the child action (in this example, this is the `action.yml`), it starts with a clean slate (in other words, no imposing if conditions). Similar to the logic in the paragraph above, `echo "I will run, as my current scope is succeeding"` will run since the `if` condition checks if the previous steps **within this composite action** has not failed. `run: echo "I will not run, as my current scope is now failing"` will not run since the previous step resulted in an error and by default, the if expression is set to `success()` if the if condition is not set for a step.
What if a step has `cancelled()`? We do the opposite of our approach above if `cancelled()` is used for any of our composite run steps. We will cancel any step that has this condition if the workflow is cancelled at all.
### Timeout-minutes
Example `workflow.yml`:
```yaml
steps:
- id: bar
uses: user/test@v1
timeout-minutes: 50
```
Example `user/composite/action.yml`:
```yaml
runs:
using: "composite"
steps:
- id: foo1
run: echo test 1
timeout-minutes: 10
- id: foo2
run: echo test 2
- id: foo3
run: echo test 3
timeout-minutes: 10
```
A composite action in its entirety is a job. You can set both timeout-minutes for the whole composite action or its steps as long as the the sum of the `timeout-minutes` for each composite action step that has the attribute `timeout-minutes` is less than or equals to `timeout-minutes` for the composite action. There is no default timeout-minutes for each composite action step.
If the time taken for any of the steps in combination or individually exceed the whole composite action `timeout-minutes` attribute, the whole job will fail (1). If an individual step exceeds its own `timeout-minutes` attribute but the total time that has been used including this step is below the overall composite action `timeout-minutes`, the individual step will fail but the rest of the steps will run based on their own `timeout-minutes` attribute (they will still abide by condition (1) though).
For reference, in the example above, if the composite step `foo1` takes 11 minutes to run, that step will fail but the rest of the steps, `foo1` and `foo2`, will proceed as long as their total runtime with the previous failed `foo1` action is less than the composite action's `timeout-minutes` (50 minutes). If the composite step `foo2` takes 51 minutes to run, it will cause the whole composite action job to fail. I
The rationale behind this is that users can configure their steps with the `if` condition to conditionally set how steps rely on each other. Due to the additional capabilities that are offered with combining `timeout-minutes` and/or `if`, we wanted the `timeout-minutes` condition to be as dumb as possible and not effect other steps.
[Usage limits still apply](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions?query=if%28%29#usage-limits)
### Continue-on-error
Example `workflow.yml`:
```yaml
steps:
- run: exit 1
- id: bar
uses: user/test@v1
continue-on-error: false
- id: foo
run: echo "Hello World" <------- This step will not run
```
Example `user/composite/action.yml`:
```yaml
runs:
using: "composite"
steps:
- run: exit 1
continue-on-error: true
- run: echo "Hello World 2" <----- This step will run
```
If any of the steps fail in the composite action and the `continue-on-error` is set to `false` for the whole composite action step in the workflow file, then the steps below it will run. On the flip side, if `continue-on-error` is set to `true` for the whole composite action step in the workflow file, the next job step will run.
For the composite action steps, it follows the same logic as above. In this example, `"Hello World 2"` will be outputted because the previous step has `continue-on-error` set to `true` although that previous step errored.
### Defaults
We will not support "defaults" in a composite action.
### Shell and Working-directory
For each run step in a composite action, the action author can set the `shell` and `working-directory` attributes for that step. These attributes are optional for each run step - by default, the `shell` is set to whatever default value is associated with the runner os (ex: bash =\> Mac). Moreover, the composite action author can map in values from the `inputs` for it's `shell` and `working-directory` attributes at the step level for an action.
For example,
`action.yml`
```yaml
inputs:
shell_1:
description: 'Your name'
default: 'pwsh'
steps:
- run: echo 1
shell: ${{ inputs.shell_1 }}
```
Note, the workflow file and action file are treated as separate entities. **So, the workflow `defaults` will never change the `shell` and `working-directory` value in the run steps in a composite action.** Note, `defaults` in a workflow only apply to run steps not "uses" steps (steps that use an action).
### Visualizing Composite Action in the GitHub Actions UI
We want all the composite action's steps to be condensed into the original composite action node.
Here is a visual represenation of the [first example](#Steps)
```yaml
| composite_action_node |
| echo hello world 1 |
| echo hello world 2 |
| echo hello world 3 |
| echo hello world 4 |
```
## Consequences
This ADR lays the framework for eventually supporting nested Composite Actions within Composite Actions. This ADR allows for users to run multiple run steps within a GitHub Composite Action with the support of inputs, outputs, environment, and context for use in any steps as well as the if, timeout-minutes, and the continue-on-error attributes for each Composite Action step.

View File

@@ -23,7 +23,7 @@ An ADR is an Architectural Decision Record. This allows consensus on the direct
### Required Dev Dependencies
![Win](res/win_sm.png) Git for Windows [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
![Win](res/win_sm.png) ![*nix](res/linux_sm.png) Git for Windows and Linux [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
### To Build, Test, Layout
@@ -43,6 +43,7 @@ Sample developer flow:
```bash
git clone https://github.com/actions/runner
cd runner
cd ./src
./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout
<make code changes>
@@ -50,10 +51,23 @@ cd ./src
./dev.(sh/cmd) test # run all unit tests before git commit/push
```
View logs:
```bash
cd runner/_layout/_diag
ls
cat (Runner/Worker)_TIMESTAMP.log # view your log file
```
Run Runner:
```bash
cd runner/_layout
./run.sh # run your custom runner
```
### Editors
[Using Visual Studio Code](https://code.visualstudio.com/)
[Using Visual Studio 2019](https://www.visualstudio.com/vs/)
[Using Visual Studio](https://code.visualstudio.com/docs)
### Styling

View File

@@ -40,7 +40,7 @@ Debian based OS (Debian, Ubuntu, Linux Mint)
- libssl1.1, libssl1.0.2 or libssl1.0.0
- libicu63, libicu60, libicu57 or libicu55
Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7)
Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7)
- lttng-ust
- openssl-libs

48
ephemeralJob.yaml Normal file
View File

@@ -0,0 +1,48 @@
apiVersion: batch.github.actions/v1
kind: CronJob
metadata:
name: cronjob-sample
spec:
schedule: "*/1 * * * *"
jobTemplate:
spec:
template:
spec:
hostNetwork: true
containers:
- name: k8srunner
image: huangtingluo/autoscale-runner:v0.0
imagePullPolicy: Always
env:
- name: GITHUB_PAT
value: 62c13e14e947958516c103a9584f66227697c447
- name: GITHUB_RUNNER_SCOPE
value: monalisa/main123
restartPolicy: Never
# spec:
# containers:
# - name: hello
# image: busybox
# args:
# - /bin/sh
# - -c
# - date; echo Hello from the Kubernetes cluster
# restartPolicy: Never
# jobTemplate:
# spec:
# template:
# spec:
# hostNetwork: true
# containers:
# - name: k8srunner
# image: huangtingluo/autoscale-runner:v0.0
# imagePullPolicy: Always
# env:
# - name: GITHUB_PAT
# value: 62c13e14e947958516c103a9584f66227697c447
# - name: GITHUB_RUNNER_SCOPE
# value: monalisa/main123
# restartPolicy: Never
# backoffLimit: 1
# completions: 0
# parallelism: 3

56
hpa-v2.yaml Normal file
View File

@@ -0,0 +1,56 @@
apiVersion: v1
items:
- apiVersion: autoscaling/v2beta2
kind: HorizontalPodAutoscaler
metadata:
creationTimestamp: "2020-08-05T19:14:04Z"
name: runner-deployment
namespace: default
resourceVersion: "167447"
selfLink: /apis/autoscaling/v2beta2/namespaces/default/horizontalpodautoscalers/runner-deployment
uid: 54d86943-eca9-468c-9698-c843f6b6183a
spec:
maxReplicas: 10
metrics:
- type: Object
object:
metric:
name: test-metric
describedObject:
apiVersion: v1
kind: Service
name: kubernetes
target:
type: Value
value: 300m
- resource:
name: cpu
target:
averageUtilization: 50
type: Utilization
type: Resource
minReplicas: 1
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: runner-deployment
status:
conditions:
- lastTransitionTime: "2020-08-05T19:14:19Z"
message: the HPA controller was able to get the target's current scale
reason: SucceededGetScale
status: "True"
type: AbleToScale
- lastTransitionTime: "2020-08-05T19:14:19Z"
message: 'the HPA was unable to compute the replica count: unable to get metrics
for resource cpu: no metrics returned from resource metrics API'
reason: FailedGetResourceMetric
status: "False"
type: ScalingActive
currentMetrics: null
currentReplicas: 1
desiredReplicas: 0
kind: List
metadata:
resourceVersion: ""
selfLink: ""

92
job.yaml Normal file
View File

@@ -0,0 +1,92 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: pod-admin
namespace: default
rules:
- apiGroups: [ "" ]
resources: [ "pods", "pods/ephemeralcontainers", "pods/log", "pods/attach", "pods/exec"]
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: default-pod-admin
namespace: default
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: pod-admin
subjects:
- kind: ServiceAccount
name: default
namespace: default
---
apiVersion: batch/v1
kind: Job
metadata:
namespace: default
name: actions-runners
spec:
template:
spec:
# hostNetwork: true
volumes:
- name: docker-storage
emptyDir: {}
- name: runner-working
emptyDir: {}
containers:
- name: docker-host
image: docker:18.05-dind
imagePullPolicy: Always
securityContext:
privileged: true
volumeMounts:
- name: docker-storage
mountPath: /var/lib/docker
- mountPath: /actions-runner/_work
name: runner-working
- name: k8srunner
image: huangtingluo/autoscale-runner:v0.0
volumeMounts:
- mountPath: /actions-runner/_work
name: runner-working
imagePullPolicy: Always
env:
- name: GITHUB_PAT
value: 62c13e14e947958516c103a9584f66227697c447
- name: GITHUB_RUNNER_SCOPE
value: monalisa/main123
- name: K8S_HOST_IP
value: "192.168.120.1"
- name: DOCKER_HOST
value: tcp://localhost:2375
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
fieldPath: spec.nodeName
- name: K8S_POD_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
- name: K8S_POD_NAMESPACE
valueFrom:
fieldRef:
fieldPath: metadata.namespace
- name: K8S_POD_IP
valueFrom:
fieldRef:
fieldPath: status.podIP
- name: K8S_POD_SERVICE_ACCOUNT
valueFrom:
fieldRef:
fieldPath: spec.serviceAccountName
restartPolicy: Never
backoffLimit: 1
completions: 20
parallelism: 3

34
prereq.yaml Normal file
View File

@@ -0,0 +1,34 @@
---
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
namespace: default
name: pod-patcher
rules:
- apiGroups: [""] # "" indicates the core API group
resources: ["pods"]
verbs: ["get", "watch", "list", "patch"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: default-pod-patcher
namespace: default
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: pod-patcher
subjects:
- kind: ServiceAccount
name: default
namespace: default
---
apiVersion: v1
kind: Secret
metadata:
name: githubtoken
type: Opaque
stringData:
GITHUB_PAT: "xxx"

View File

@@ -1,20 +1,29 @@
## Features
- Runner support for GHES Alpha (#381 #386 #390 #393 $401)
- Allow secrets context in Container.env (#388)
- Resolve action download info from server (#508, #515, #550)
- Print runner and machine name to log. (#539)
## Bugs
- Raise warning when volume mount root. (#413)
- Fix typo (#394)
- Reduce input validation warnings (#506)
- Fix null ref exception in SecretMasker caused by `hashfiles` timeout. (#516)
- Add libicu66 to `./installDependencies.sh` for Ubuntu 20.04 (#535)
- Fix DataContract with Token service (#532)
- Skip search $PATH on command with fully qualified path (#526)
- Restore SELinux context on service file when SELinux is enabled (#525)
## Misc
- N/A
- Remove SPS/Token migration code. Remove GHES url manipulate code. (#513)
- Add sub-step for developer flow for clarity (#523)
- Update Links and Language to Git + VSCode (#522)
- Update runner configuration exception message (#540)
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows
```
// Create a folder under the drive root
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
The following snipped needs to be run on `powershell`:
``` powershell
# Create a folder under the drive root
mkdir \actions-runner ; cd \actions-runner
// Download the latest runner package
# Download the latest runner package
Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-win-x64-<RUNNER_VERSION>.zip -OutFile actions-runner-win-x64-<RUNNER_VERSION>.zip
// Extract the installer
# Extract the installer
Add-Type -AssemblyName System.IO.Compression.FileSystem ;
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
```
@@ -22,44 +31,44 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
## OSX
``` bash
// Create a folder
# Create a folder
mkdir actions-runner && cd actions-runner
// Download the latest runner package
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
// Extract the installer
# Extract the installer
tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
```
## Linux x64
``` bash
// Create a folder
# Create a folder
mkdir actions-runner && cd actions-runner
// Download the latest runner package
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
// Extract the installer
# Extract the installer
tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
```
## Linux arm64 (Pre-release)
``` bash
// Create a folder
# Create a folder
mkdir actions-runner && cd actions-runner
// Download the latest runner package
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
// Extract the installer
# Extract the installer
tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
```
## Linux arm (Pre-release)
``` bash
// Create a folder
# Create a folder
mkdir actions-runner && cd actions-runner
// Download the latest runner package
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
// Extract the installer
# Extract the installer
tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
```

View File

@@ -1 +1 @@
2.168.0
<Update to ./src/runnerversion when creating release>

12
runner.yaml Normal file
View File

@@ -0,0 +1,12 @@
apiVersion: actions.github.com/v1alpha1
kind: Runner
metadata:
name: auto-scale-runners
spec:
organization: monalisa
group: default
repository: main123
githubAdminToken: 62c13e14e947958516c103a9584f66227697c447
env:
- name: K8S_HOST_IP
value: "192.168.120.1"

46
runners.yaml Normal file
View File

@@ -0,0 +1,46 @@
apiVersion: actions.github.com/v1alpha1
kind: AutoScaleRunner
metadata:
name: auto-scale-runners
spec:
minReplicas: 1
maxReplicas: 5
configURL: https://github.com/TingluoHuang/example-services
githubTokenSecretKeyRef:
name: githubtoken
key: GITHUB_PAT
template:
spec:
setupDockerInDocker: true
imagePullPolicy: Always
runnerUpdateHandler:
containers:
- name: update-image
image: huangtingluo/workflow_dispatch:latest
imagePullPolicy: Always
env:
- name: GITHUB_TOKEN
valueFrom:
secretKeyRef:
name: githubtoken
key: GITHUB_PAT
- name: GITHUB_OWNER
value: tingluohuang
- name: GITHUB_REPO
value: "workflow_dispatch"
- name: GITHUB_EXTRA_CURL_ARG
value: "-v"
- name: GITHUB_WORKFLOW
value: "2539181"
- name: GITHUB_WORKFLOW_INPUTS
value: "{\"test_input\":\"test\"}"
# - name: GITHUB_REPO
# value: "k8s-runner-image"
# - name: GITHUB_EXTRA_CURL_ARG
# value: "-v"
# - name: GITHUB_WORKFLOW
# value: "docker-publish.yml"
# - name: GITHUB_WORKFLOW_INPUTS
# value: "{\"runnerDownloadUrl\":\"https://github.com/TingluoHuang/runner/releases/download/test/actions-runner-linux-x64-2.299.0.tar.gz\"}"

4
scripts/README.md Normal file
View File

@@ -0,0 +1,4 @@
# Sample scripts for self-hosted runners
Here are some examples to work from if you'd like to automate your use of self-hosted runners.
See the docs [here](../docs/automate.md).

View File

@@ -7,27 +7,29 @@ set -e
# Configures as a service
#
# Examples:
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myuser/myrepo
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myorg
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myuser/myrepo my.ghe.deployment.net
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myorg my.ghe.deployment.net
#
# Usage:
# export RUNNER_CFG_PAT=<yourPAT>
# ./create-latest-svc scope [name] [user]
# ./create-latest-svc scope [ghe_domain] [name] [user]
#
# scope required repo (:owner/:repo) or org (:organization)
# ghe_domain optional the fully qualified domain name of your GitHub Enterprise Server deployment
# name optional defaults to hostname
# user optional user svc will run as. defaults to current
#
# scope required repo (:owner/:repo) or org (:organization)
# name optional defaults to hostname
# user optional user svc will run as. defaults to current
#
# Notes:
# PATS over envvars are more secure
# Should be used on VMs and not containers
# Works on OSX and Linux
# Works on OSX and Linux
# Assumes x64 arch
#
runner_scope=${1}
runner_name=${2:-$(hostname)}
svc_user=${3:-$USER}
ghe_hostname=${2}
runner_name=${3:-$(hostname)}
svc_user=${4:-$USER}
echo "Configuring runner @ ${runner_scope}"
sudo echo
@@ -51,9 +53,9 @@ which curl || fatal "curl required. Please install in PATH with apt-get, brew,
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
# bail early if there's already a runner there. also sudo early
if [ -d ./runner ]; then
if [ -d ./runner ]; then
fatal "Runner already exists. Use a different directory or delete ./runner"
fi
fi
sudo -u ${svc_user} mkdir runner
@@ -66,15 +68,20 @@ sudo -u ${svc_user} mkdir runner
echo
echo "Generating a registration token..."
# if the scope has a slash, it's an repo runner
base_api_url="https://api.github.com/orgs"
if [[ "$runner_scope" == *\/* ]]; then
base_api_url="https://api.github.com/repos"
base_api_url="https://api.github.com"
if [ -n "${ghe_hostname}" ]; then
base_api_url="https://${ghe_hostname}/api/v3"
fi
export RUNNER_TOKEN=$(curl -s -X POST ${base_api_url}/${runner_scope}/actions/runners/registration-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token')
# if the scope has a slash, it's a repo runner
orgs_or_repos="orgs"
if [[ "$runner_scope" == *\/* ]]; then
orgs_or_repos="repos"
fi
if [ -z "$RUNNER_TOKEN" ]; then fatal "Failed to get a token"; fi
export RUNNER_TOKEN=$(curl -s -X POST ${base_api_url}/${orgs_or_repos}/${runner_scope}/actions/runners/registration-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token')
if [ "null" == "$RUNNER_TOKEN" -o -z "$RUNNER_TOKEN" ]; then fatal "Failed to get a token"; fi
#---------------------------------------
# Download latest released and extract
@@ -82,6 +89,7 @@ if [ -z "$RUNNER_TOKEN" ]; then fatal "Failed to get a token"; fi
echo
echo "Downloading latest runner ..."
# For the GHES Alpha, download the runner from github.com
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
latest_version=$(echo ${latest_version_label:1})
runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz"
@@ -116,6 +124,10 @@ pushd ./runner
# Unattend config
#---------------------------------------
runner_url="https://github.com/${runner_scope}"
if [ -n "${ghe_hostname}" ]; then
runner_url="https://${ghe_hostname}/${runner_scope}"
fi
echo
echo "Configuring ${runner_name} @ $runner_url"
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name"
@@ -127,9 +139,9 @@ sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNE
echo
echo "Configuring as a service ..."
prefix=""
if [ "${runner_plat}" == "linux" ]; then
prefix="sudo "
fi
if [ "${runner_plat}" == "linux" ]; then
prefix="sudo "
fi
${prefix}./svc.sh install ${svc_user}
${prefix}./svc.sh start

View File

@@ -154,7 +154,16 @@ function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [in
function Get-Machine-Architecture() {
Say-Invocation $MyInvocation
# possible values: amd64, x64, x86, arm64, arm
# On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems.
# To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432.
# PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE.
# Possible values: amd64, x64, x86, arm64, arm
if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null )
{
return $ENV:PROCESSOR_ARCHITEW6432
}
return $ENV:PROCESSOR_ARCHITECTURE
}
@@ -684,3 +693,196 @@ Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath
Say "Installation finished"
exit 0
# SIG # Begin signature block
# MIIjhwYJKoZIhvcNAQcCoIIjeDCCI3QCAQExDzANBglghkgBZQMEAgEFADB5Bgor
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCAiKYSY4KtkeThH
# d5M1aXqv1K0/pff07QwfUbYZ/qX5LqCCDYUwggYDMIID66ADAgECAhMzAAABiK9S
# 1rmSbej5AAAAAAGIMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy
# b3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNpZ25p
# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ4WhcNMjEwMzAzMTgzOTQ4WjB0MQsw
# CQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9u
# ZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMR4wHAYDVQQDExVNaWNy
# b3NvZnQgQ29ycG9yYXRpb24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
# AQCSCNryE+Cewy2m4t/a74wZ7C9YTwv1PyC4BvM/kSWPNs8n0RTe+FvYfU+E9uf0
# t7nYlAzHjK+plif2BhD+NgdhIUQ8sVwWO39tjvQRHjP2//vSvIfmmkRoML1Ihnjs
# 9kQiZQzYRDYYRp9xSQYmRwQjk5hl8/U7RgOiQDitVHaU7BT1MI92lfZRuIIDDYBd
# vXtbclYJMVOwqZtv0O9zQCret6R+fRSGaDNfEEpcILL+D7RV3M4uaJE4Ta6KAOdv
# V+MVaJp1YXFTZPKtpjHO6d9pHQPZiG7NdC6QbnRGmsa48uNQrb6AfmLKDI1Lp31W
# MogTaX5tZf+CZT9PSuvjOCLNAgMBAAGjggGCMIIBfjAfBgNVHSUEGDAWBgorBgEE
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUj9RJL9zNrPcL10RZdMQIXZN7MG8w
# VAYDVR0RBE0wS6RJMEcxLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
# dGlvbnMgTGltaXRlZDEWMBQGA1UEBRMNMjMwMDEyKzQ1ODM4NjAfBgNVHSMEGDAW
# gBRIbmTlUAXTgqoXNzcitW2oynUClTBUBgNVHR8ETTBLMEmgR6BFhkNodHRwOi8v
# d3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NybC9NaWNDb2RTaWdQQ0EyMDExXzIw
# MTEtMDctMDguY3JsMGEGCCsGAQUFBwEBBFUwUzBRBggrBgEFBQcwAoZFaHR0cDov
# L3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9jZXJ0cy9NaWNDb2RTaWdQQ0EyMDEx
# XzIwMTEtMDctMDguY3J0MAwGA1UdEwEB/wQCMAAwDQYJKoZIhvcNAQELBQADggIB
# ACnXo8hjp7FeT+H6iQlV3CcGnkSbFvIpKYafgzYCFo3UHY1VHYJVb5jHEO8oG26Q
# qBELmak6MTI+ra3WKMTGhE1sEIlowTcp4IAs8a5wpCh6Vf4Z/bAtIppP3p3gXk2X
# 8UXTc+WxjQYsDkFiSzo/OBa5hkdW1g4EpO43l9mjToBdqEPtIXsZ7Hi1/6y4gK0P
# mMiwG8LMpSn0n/oSHGjrUNBgHJPxgs63Slf58QGBznuXiRaXmfTUDdrvhRocdxIM
# i8nXQwWACMiQzJSRzBP5S2wUq7nMAqjaTbeXhJqD2SFVHdUYlKruvtPSwbnqSRWT
# GI8s4FEXt+TL3w5JnwVZmZkUFoioQDMMjFyaKurdJ6pnzbr1h6QW0R97fWc8xEIz
# LIOiU2rjwWAtlQqFO8KNiykjYGyEf5LyAJKAO+rJd9fsYR+VBauIEQoYmjnUbTXM
# SY2Lf5KMluWlDOGVh8q6XjmBccpaT+8tCfxpaVYPi1ncnwTwaPQvVq8RjWDRB7Pa
# 8ruHgj2HJFi69+hcq7mWx5nTUtzzFa7RSZfE5a1a5AuBmGNRr7f8cNfa01+tiWjV
# Kk1a+gJUBSP0sIxecFbVSXTZ7bqeal45XSDIisZBkWb+83TbXdTGMDSUFKTAdtC+
# r35GfsN8QVy59Hb5ZYzAXczhgRmk7NyE6jD0Ym5TKiW5MIIHejCCBWKgAwIBAgIK
# YQ6Q0gAAAAAAAzANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMCVVMxEzARBgNV
# BAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jv
# c29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJvb3QgQ2VydGlm
# aWNhdGUgQXV0aG9yaXR5IDIwMTEwHhcNMTEwNzA4MjA1OTA5WhcNMjYwNzA4MjEw
# OTA5WjB+MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UE
# BxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSgwJgYD
# VQQDEx9NaWNyb3NvZnQgQ29kZSBTaWduaW5nIFBDQSAyMDExMIICIjANBgkqhkiG
# 9w0BAQEFAAOCAg8AMIICCgKCAgEAq/D6chAcLq3YbqqCEE00uvK2WCGfQhsqa+la
# UKq4BjgaBEm6f8MMHt03a8YS2AvwOMKZBrDIOdUBFDFC04kNeWSHfpRgJGyvnkmc
# 6Whe0t+bU7IKLMOv2akrrnoJr9eWWcpgGgXpZnboMlImEi/nqwhQz7NEt13YxC4D
# dato88tt8zpcoRb0RrrgOGSsbmQ1eKagYw8t00CT+OPeBw3VXHmlSSnnDb6gE3e+
# lD3v++MrWhAfTVYoonpy4BI6t0le2O3tQ5GD2Xuye4Yb2T6xjF3oiU+EGvKhL1nk
# kDstrjNYxbc+/jLTswM9sbKvkjh+0p2ALPVOVpEhNSXDOW5kf1O6nA+tGSOEy/S6
# A4aN91/w0FK/jJSHvMAhdCVfGCi2zCcoOCWYOUo2z3yxkq4cI6epZuxhH2rhKEmd
# X4jiJV3TIUs+UsS1Vz8kA/DRelsv1SPjcF0PUUZ3s/gA4bysAoJf28AVs70b1FVL
# 5zmhD+kjSbwYuER8ReTBw3J64HLnJN+/RpnF78IcV9uDjexNSTCnq47f7Fufr/zd
# sGbiwZeBe+3W7UvnSSmnEyimp31ngOaKYnhfsi+E11ecXL93KCjx7W3DKI8sj0A3
# T8HhhUSJxAlMxdSlQy90lfdu+HggWCwTXWCVmj5PM4TasIgX3p5O9JawvEagbJjS
# 4NaIjAsCAwEAAaOCAe0wggHpMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBRI
# bmTlUAXTgqoXNzcitW2oynUClTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTAL
# BgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBRyLToCMZBD
# uRQFTuHqp8cx0SOJNDBaBgNVHR8EUzBRME+gTaBLhklodHRwOi8vY3JsLm1pY3Jv
# c29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
# MDNfMjIuY3JsMF4GCCsGAQUFBwEBBFIwUDBOBggrBgEFBQcwAoZCaHR0cDovL3d3
# dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
# MDNfMjIuY3J0MIGfBgNVHSAEgZcwgZQwgZEGCSsGAQQBgjcuAzCBgzA/BggrBgEF
# BQcCARYzaHR0cDovL3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9kb2NzL3ByaW1h
# cnljcHMuaHRtMEAGCCsGAQUFBwICMDQeMiAdAEwAZQBnAGEAbABfAHAAbwBsAGkA
# YwB5AF8AcwB0AGEAdABlAG0AZQBuAHQALiAdMA0GCSqGSIb3DQEBCwUAA4ICAQBn
# 8oalmOBUeRou09h0ZyKbC5YR4WOSmUKWfdJ5DJDBZV8uLD74w3LRbYP+vj/oCso7
# v0epo/Np22O/IjWll11lhJB9i0ZQVdgMknzSGksc8zxCi1LQsP1r4z4HLimb5j0b
# pdS1HXeUOeLpZMlEPXh6I/MTfaaQdION9MsmAkYqwooQu6SpBQyb7Wj6aC6VoCo/
# KmtYSWMfCWluWpiW5IP0wI/zRive/DvQvTXvbiWu5a8n7dDd8w6vmSiXmE0OPQvy
# CInWH8MyGOLwxS3OW560STkKxgrCxq2u5bLZ2xWIUUVYODJxJxp/sfQn+N4sOiBp
# mLJZiWhub6e3dMNABQamASooPoI/E01mC8CzTfXhj38cbxV9Rad25UAqZaPDXVJi
# hsMdYzaXht/a8/jyFqGaJ+HNpZfQ7l1jQeNbB5yHPgZ3BtEGsXUfFL5hYbXw3MYb
# BL7fQccOKO7eZS/sl/ahXJbYANahRr1Z85elCUtIEJmAH9AAKcWxm6U/RXceNcbS
# oqKfenoi+kiVH6v7RyOA9Z74v2u3S5fi63V4GuzqN5l5GEv/1rMjaHXmr/r8i+sL
# gOppO6/8MO0ETI7f33VtY5E90Z1WTk+/gFcioXgRMiF670EKsT/7qMykXcGhiJtX
# cVZOSEXAQsmbdlsKgEhr/Xmfwb1tbWrJUnMTDXpQzTGCFVgwghVUAgEBMIGVMH4x
# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01p
# Y3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTECEzMAAAGIr1LWuZJt6PkAAAAA
# AYgwDQYJYIZIAWUDBAIBBQCgga4wGQYJKoZIhvcNAQkDMQwGCisGAQQBgjcCAQQw
# HAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUwLwYJKoZIhvcNAQkEMSIEIFxZ
# Yezh3liQqiGQuXNa+zYfoSIbLqOpdEn2ZKskBkisMEIGCisGAQQBgjcCAQwxNDAy
# oBSAEgBNAGkAYwByAG8AcwBvAGYAdKEagBhodHRwOi8vd3d3Lm1pY3Jvc29mdC5j
# b20wDQYJKoZIhvcNAQEBBQAEggEAjLUrwCXJCPHZulZuKAQSX+MfnIRFAhlN7ru2
# 6H8rudvhkWgqMISkLb9gFDPR5FhR4sqdYgKW4P0ERao9ypCGi1FWDLqygC2XBbHj
# NEQHBxHJs5SMsMAXNSIcYHqVAvhF3nXoseaNBkhOTrkQ1FS/fW7AfDGRbsiiESzv
# lebf92shZylBFKOsKQLAL0mF/B7xrxHJIj5dgQoD1phATRNHOEQj3jgmkidFWowV
# 4r8MzbxRhAEORbnJexlUoDQJQH3YwxuUyXkTvrYMTKSbGJLlwRaZQbrcBU0k4gCH
# y8Sci+p9Rq+aOTzLCoNrZyh9E7OdwVDm1FJAtY30bV50T2WSFKGCEuIwghLeBgor
# BgEEAYI3AwMBMYISzjCCEsoGCSqGSIb3DQEHAqCCErswghK3AgEDMQ8wDQYJYIZI
# AWUDBAIBBQAwggFRBgsqhkiG9w0BCRABBKCCAUAEggE8MIIBOAIBAQYKKwYBBAGE
# WQoDATAxMA0GCWCGSAFlAwQCAQUABCD7JNcBBSfhlKPL1tN3CEKRKJuT/dZ8RO9K
# orYLXJeLTwIGXvN89YD7GBMyMDIwMDcwMTE0MTYyMC40MDVaMASAAgH0oIHQpIHN
# MIHKMQswCQYDVQQGEwJVUzELMAkGA1UECBMCV0ExEDAOBgNVBAcTB1JlZG1vbmQx
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEtMCsGA1UECxMkTWljcm9z
# b2Z0IElyZWxhbmQgT3BlcmF0aW9ucyBMaW1pdGVkMSYwJAYDVQQLEx1UaGFsZXMg
# VFNTIEVTTjoxNzlFLTRCQjAtODI0NjElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUt
# U3RhbXAgU2VydmljZaCCDjkwggTxMIID2aADAgECAhMzAAABDKp4btzMQkzBAAAA
# AAEMMA0GCSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNo
# aW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29y
# cG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEw
# MB4XDTE5MTAyMzIzMTkxNloXDTIxMDEyMTIzMTkxNlowgcoxCzAJBgNVBAYTAlVT
# MQswCQYDVQQIEwJXQTEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9z
# b2Z0IENvcnBvcmF0aW9uMS0wKwYDVQQLEyRNaWNyb3NvZnQgSXJlbGFuZCBPcGVy
# YXRpb25zIExpbWl0ZWQxJjAkBgNVBAsTHVRoYWxlcyBUU1MgRVNOOjE3OUUtNEJC
# MC04MjQ2MSUwIwYDVQQDExxNaWNyb3NvZnQgVGltZS1TdGFtcCBTZXJ2aWNlMIIB
# IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq5011+XqVJmQKtiw39igeEMv
# CLcZ1forbmxsDkpnCN1SrThKI+n2Pr3zqTzJVgdJFCoKm1ks1gtRJ7HaL6tDkrOw
# 8XJmfJaxyQAluCQ+e40NI+A4w+u59Gy89AVY5lJNrmCva6gozfg1kxw6abV5WWr+
# PjEpNCshO4hxv3UqgMcCKnT2YVSZzF1Gy7APub1fY0P1vNEuOFKrNCEEvWIKRrqs
# eyBB73G8KD2yw6jfz0VKxNSRAdhJV/ghOyrDt5a+L6C3m1rpr8sqiof3iohv3ANI
# gNqw6ex+4+G+B7JMbIHbGpPdebedL6ePbuBCnbgJoDn340k0aw6ij21GvvUnkQID
# AQABo4IBGzCCARcwHQYDVR0OBBYEFAlCOq9DDIa0A0oqgKtM5vjuZeK+MB8GA1Ud
# IwQYMBaAFNVjOlyKMZDzQ3t8RhvFM2hahW1VMFYGA1UdHwRPME0wS6BJoEeGRWh0
# dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kvY3JsL3Byb2R1Y3RzL01pY1RpbVN0
# YVBDQV8yMDEwLTA3LTAxLmNybDBaBggrBgEFBQcBAQROMEwwSgYIKwYBBQUHMAKG
# Pmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9wa2kvY2VydHMvTWljVGltU3RhUENB
# XzIwMTAtMDctMDEuY3J0MAwGA1UdEwEB/wQCMAAwEwYDVR0lBAwwCgYIKwYBBQUH
# AwgwDQYJKoZIhvcNAQELBQADggEBAET3xBg/IZ9zdOfwbDGK7cK3qKYt/qUOlbRB
# zgeNjb32K86nGeRGkBee10dVOEGWUw6KtBeWh1LQ70b64/tLtiLcsf9JzaAyDYb1
# sRmMi5fjRZ753TquaT8V7NJ7RfEuYfvZlubfQD0MVbU4tzsdZdYuxE37V2J9pN89
# j7GoFNtAnSnCn1MRxENAILgt9XzeQzTEDhFYW0N2DNphTkRPXGjpDmwi6WtkJ5fv
# 0iTyB4dwEC+/ed0lGbFLcytJoMwfTNMdH6gcnHlMzsniornGFZa5PPiV78XoZ9Fe
# upKo8ZKNGhLLLB5GTtqfHex5no3ioVSq+NthvhX0I/V+iXJsopowggZxMIIEWaAD
# AgECAgphCYEqAAAAAAACMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzET
# MBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMV
# TWljcm9zb2Z0IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBD
# ZXJ0aWZpY2F0ZSBBdXRob3JpdHkgMjAxMDAeFw0xMDA3MDEyMTM2NTVaFw0yNTA3
# MDEyMTQ2NTVaMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw
# DgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24x
# JjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwMIIBIjANBgkq
# hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqR0NvHcRijog7PwTl/X6f2mUa3RUENWl
# CgCChfvtfGhLLF/Fw+Vhwna3PmYrW/AVUycEMR9BGxqVHc4JE458YTBZsTBED/Fg
# iIRUQwzXTbg4CLNC3ZOs1nMwVyaCo0UN0Or1R4HNvyRgMlhgRvJYR4YyhB50YWeR
# X4FUsc+TTJLBxKZd0WETbijGGvmGgLvfYfxGwScdJGcSchohiq9LZIlQYrFd/Xcf
# PfBXday9ikJNQFHRD5wGPmd/9WbAA5ZEfu/QS/1u5ZrKsajyeioKMfDaTgaRtogI
# Neh4HLDpmc085y9Euqf03GS9pAHBIAmTeM38vMDJRF1eFpwBBU8iTQIDAQABo4IB
# 5jCCAeIwEAYJKwYBBAGCNxUBBAMCAQAwHQYDVR0OBBYEFNVjOlyKMZDzQ3t8RhvF
# M2hahW1VMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1UdDwQEAwIBhjAP
# BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNX2VsuP6KJcYmjRPZSQW9fOmhjE
# MFYGA1UdHwRPME0wS6BJoEeGRWh0dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kv
# Y3JsL3Byb2R1Y3RzL01pY1Jvb0NlckF1dF8yMDEwLTA2LTIzLmNybDBaBggrBgEF
# BQcBAQROMEwwSgYIKwYBBQUHMAKGPmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9w
# a2kvY2VydHMvTWljUm9vQ2VyQXV0XzIwMTAtMDYtMjMuY3J0MIGgBgNVHSABAf8E
# gZUwgZIwgY8GCSsGAQQBgjcuAzCBgTA9BggrBgEFBQcCARYxaHR0cDovL3d3dy5t
# aWNyb3NvZnQuY29tL1BLSS9kb2NzL0NQUy9kZWZhdWx0Lmh0bTBABggrBgEFBQcC
# AjA0HjIgHQBMAGUAZwBhAGwAXwBQAG8AbABpAGMAeQBfAFMAdABhAHQAZQBtAGUA
# bgB0AC4gHTANBgkqhkiG9w0BAQsFAAOCAgEAB+aIUQ3ixuCYP4FxAz2do6Ehb7Pr
# psz1Mb7PBeKp/vpXbRkws8LFZslq3/Xn8Hi9x6ieJeP5vO1rVFcIK1GCRBL7uVOM
# zPRgEop2zEBAQZvcXBf/XPleFzWYJFZLdO9CEMivv3/Gf/I3fVo/HPKZeUqRUgCv
# OA8X9S95gWXZqbVr5MfO9sp6AG9LMEQkIjzP7QOllo9ZKby2/QThcJ8ySif9Va8v
# /rbljjO7Yl+a21dA6fHOmWaQjP9qYn/dxUoLkSbiOewZSnFjnXshbcOco6I8+n99
# lmqQeKZt0uGc+R38ONiU9MalCpaGpL2eGq4EQoO4tYCbIjggtSXlZOz39L9+Y1kl
# D3ouOVd2onGqBooPiRa6YacRy5rYDkeagMXQzafQ732D8OE7cQnfXXSYIghh2rBQ
# Hm+98eEA3+cxB6STOvdlR3jo+KhIq/fecn5ha293qYHLpwmsObvsxsvYgrRyzR30
# uIUBHoD7G4kqVDmyW9rIDVWZeodzOwjmmC3qjeAzLhIp9cAvVCch98isTtoouLGp
# 25ayp0Kiyc8ZQU3ghvkqmqMRZjDTu3QyS99je/WZii8bxyGvWbWu3EQ8l1Bx16HS
# xVXjad5XwdHeMMD9zOZN+w2/XU/pnR4ZOC+8z1gFLu8NoFA12u8JJxzVs341Hgi6
# 2jbb01+P3nSISRKhggLLMIICNAIBATCB+KGB0KSBzTCByjELMAkGA1UEBhMCVVMx
# CzAJBgNVBAgTAldBMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
# ZnQgQ29ycG9yYXRpb24xLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
# dGlvbnMgTGltaXRlZDEmMCQGA1UECxMdVGhhbGVzIFRTUyBFU046MTc5RS00QkIw
# LTgyNDYxJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1wIFNlcnZpY2WiIwoB
# ATAHBgUrDgMCGgMVAMsg9FQ9pgPLXI2Ld5z7xDS0QAZ9oIGDMIGApH4wfDELMAkG
# A1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQx
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9z
# b2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAwDQYJKoZIhvcNAQEFBQACBQDipo0MMCIY
# DzIwMjAwNzAxMTIxODIwWhgPMjAyMDA3MDIxMjE4MjBaMHQwOgYKKwYBBAGEWQoE
# ATEsMCowCgIFAOKmjQwCAQAwBwIBAAICE70wBwIBAAICEeIwCgIFAOKn3owCAQAw
# NgYKKwYBBAGEWQoEAjEoMCYwDAYKKwYBBAGEWQoDAqAKMAgCAQACAwehIKEKMAgC
# AQACAwGGoDANBgkqhkiG9w0BAQUFAAOBgQCOPjlHOH8nYtgt2XnpKXenxPUR03ED
# xPBm8XR5Z1vIq53RU9jG6yYcYNTdK+q38SGZtu0W/SgagTfKCQhjhRakuv7rGSs2
# dlhx9LGCoc/q1vqmZpRSjkqWVcc/NzmldUWIWnLlV6rmLGoDmfCH5BcsiU6Eo6wU
# iUVwnnXoqsCaBzGCAw0wggMJAgEBMIGTMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQI
# EwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
# ZnQgQ29ycG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBD
# QSAyMDEwAhMzAAABDKp4btzMQkzBAAAAAAEMMA0GCWCGSAFlAwQCAQUAoIIBSjAa
# BgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwLwYJKoZIhvcNAQkEMSIEIDpwhjyu
# zgu3Kmxpnpz86ZlthBqEzG5vaEMOkYRyuFCaMIH6BgsqhkiG9w0BCRACLzGB6jCB
# 5zCB5DCBvQQgg5AWKX7M1+m2//+V7qmRvt1K/ww5Muu8XzGJBqygVCkwgZgwgYCk
# fjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYDVQQD
# Ex1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMAITMwAAAQyqeG7czEJMwQAA
# AAABDDAiBCD11urvv5vgo4gFVQ2NMVrzgxT87Yuiq16YdswYbaYeITANBgkqhkiG
# 9w0BAQsFAASCAQAi3q8hwcT2ft4b2EleaiyZxOImV/cKusmth1dtCh5/Jb0GbOld
# f5cSalrjf42MNPodWAtgmWozkYrQF6HxnsOiYiamfRA8E3E7xyRMy7AFfAhjcwMi
# xaW4Iye6E1Ec6LtULANxfDtG/KIdCWdZxKqOezL3nzFNQWmm1mXPV+UnKpnJkA3E
# DsQOUWk8J6ojDurhrP536WI+3arg8PcnppHBLd/xNKYdlsTb+6qndgzKXkDDt1CV
# 4zCyuZ7bO8eyZAmNoSZz22k7vus9UjBz/CDhXylo20N43nr29rWPItUgH4uvOGQn
# t26Y/yjBaQImz32psrfJEMbQ7cl789s8WOx8
# SIG # End signature block

18
src/Misc/download-runner.sh Executable file
View File

@@ -0,0 +1,18 @@
#!/bin/bash
set -e
# if the scope has a slash, it's a repo runner
# orgs_or_repos="orgs"
# if [[ "$GITHUB_RUNNER_SCOPE" == *\/* ]]; then
# orgs_or_repos="repos"
# fi
#RUNNER_DOWNLOAD_URL=$(curl -s -X GET ${GITHUB_API_URL}/${orgs_or_repos}/${GITHUB_RUNNER_SCOPE}/actions/runners/downloads -H "authorization: token $GITHUB_PAT" -H "accept: application/vnd.github.everest-preview+json" | jq -r '.[]|select(.os=="linux" and .architecture=="x64")|.download_url')
# download actions and unzip it
#curl -Ls ${RUNNER_DOWNLOAD_URL} | tar xz \
curl -Ls https://github.com/TingluoHuang/runner/releases/download/test/actions-runner-linux-x64-2.299.0.tar.gz | tar xz
# delete the download tar.gz file
rm -f ${RUNNER_DOWNLOAD_URL##*/}

77
src/Misc/entrypoint.sh Executable file
View File

@@ -0,0 +1,77 @@
#!/bin/bash
set -euo pipefail
function fatal() {
echo "error: $1" >&2
exit 1
}
[ -n "${GITHUB_PAT:-""}" ] || fatal "GITHUB_PAT variable must be set"
[ -n "${RUNNER_CONFIG_URL:-""}" ] || fatal "RUNNER_CONFIG_URL variable must be set"
[ -n "${RUNNER_NAME:-""}" ] || fatal "RUNNER_NAME variable must be set"
# if [ -n "${RUNNER_NAME}" ]; then
# # Use container id to gen unique runner name if name not provide
# CONTAINER_ID=$(cat /proc/self/cgroup | head -n 1 | tr '/' '\n' | tail -1 | cut -c1-12)
# RUNNER_NAME="actions-runner-${CONTAINER_ID}"
# fi
# if the scope has a slash, it's a repo runner
# orgs_or_repos="orgs"
# if [[ "$GITHUB_RUNNER_SCOPE" == *\/* ]]; then
# orgs_or_repos="repos"
# fi
# RUNNER_REG_URL="${GITHUB_SERVER_URL:=https://github.com}/${GITHUB_RUNNER_SCOPE}"
echo "Runner Name : ${RUNNER_NAME}"
echo "Registration URL : ${RUNNER_CONFIG_URL}"
# echo "GitHub API URL : ${GITHUB_API_URL:=https://api.github.com}"
echo "Runner Labels : ${RUNNER_LABELS:=""}"
# TODO: if api url is not default, validate it ends in /api/v3
RUNNER_LABELS_ARG=""
if [ -n "${RUNNER_LABELS}" ]; then
RUNNER_LABELS_ARG="--labels ${RUNNER_LABELS}"
fi
RUNNER_GROUP_ARG=""
if [ -n "${RUNNER_GROUP}" ]; then
RUNNER_GROUP_ARG="--runnergroup ${RUNNER_GROUP}"
fi
# if [ -n "${K8S_HOST_IP}" ]; then
# export http_proxy=http://$K8S_HOST_IP:9090
# fi
# curl -v -s -X POST ${GITHUB_API_URL}/${orgs_or_repos}/${GITHUB_RUNNER_SCOPE}/actions/runners/registration-token -H "authorization: token $GITHUB_PAT" -H "accept: application/vnd.github.everest-preview+json"
# Generate registration token
# RUNNER_REG_TOKEN=$(curl -s -X POST ${GITHUB_API_URL}/${orgs_or_repos}/${GITHUB_RUNNER_SCOPE}/actions/runners/registration-token -H "authorization: token $GITHUB_PAT" -H "accept: application/vnd.github.everest-preview+json" | jq -r '.token')
# Create the runner and configure it
./config.sh --unattended --name $RUNNER_NAME --url $RUNNER_CONFIG_URL --pat $GITHUB_PAT $RUNNER_LABELS_ARG $RUNNER_GROUP_ARG --replace --ephemeral
# while (! docker version ); do
# # Docker takes a few seconds to initialize
# echo "Waiting for Docker to launch..."
# sleep 1
# done
# unset env
unset RUNNER_CONFIG_URL
unset GITHUB_PAT
unset RUNNER_NAME
unset RUNNER_GROUP
unset RUNNER_LABELS
unset RUNNER_LABELS_ARG
unset RUNNER_GROUP_ARG
# Run it
./bin/runsvc.sh interactive
# export http_proxy=""
# dockerdpid=$(kubectl exec $K8S_POD_NAME --container docker-host -- pidof dockerd)
# kubectl exec $K8S_POD_NAME --container docker-host -- kill -SIGINT $dockerdpid

View File

@@ -1683,9 +1683,9 @@
}
},
"lodash": {
"version": "4.17.15",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
"integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==",
"version": "4.17.19",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz",
"integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==",
"dev": true
},
"lodash.unescape": {

25
src/Misc/jobcomplete.sh Executable file
View File

@@ -0,0 +1,25 @@
#!/bin/bash
echo "Test-0"
set -euo pipefail
echo "Test-1"
function fatal() {
echo "error: $1" >&2
exit 1
}
echo "Test-2"
[ -n "${K8S_POD_NAME:-""}" ] || fatal "K8S_POD_NAME variable must be set"
echo "Test-3"
# echo $http_proxy
# unset http_proxy
# unset https_proxy
# export http_proxy=
# export HTTP_PROXY=
echo "Test-4"
kubectl annotate pods $K8S_POD_NAME JOBCOMPLETE=$(date +%s) || fatal "Can't annotate job complete"
echo "Test-5"
exit 0

25
src/Misc/jobrunning.sh Executable file
View File

@@ -0,0 +1,25 @@
#!/bin/bash
echo "Test-0"
set -euo pipefail
echo "Test-1"
function fatal() {
echo "error: $1" >&2
exit 1
}
echo "Test-2"
[ -n "${K8S_POD_NAME:-""}" ] || fatal "K8S_POD_NAME variable must be set"
echo "Test-3"
# echo $http_proxy
# unset http_proxy
# unset https_proxy
# export http_proxy=
# export HTTP_PROXY=
echo "Test-4"
kubectl annotate pods $K8S_POD_NAME JOBRUNNING=$(date +%s) --overwrite || fatal "Can't annotate job running"
echo "Test-5"
exit 0

32
src/Misc/jobstart.sh Executable file
View File

@@ -0,0 +1,32 @@
#!/bin/bash
echo "Test-0"
set -euo pipefail
echo "Test-1"
function fatal() {
echo "error: $1" >&2
exit 1
}
echo "Test-2"
[ -n "${K8S_POD_NAME:-""}" ] || fatal "K8S_POD_NAME variable must be set"
echo "Test-3"
# echo $http_proxy
# # unset http_proxy
# # unset https_proxy
# export http_proxy=
# export HTTP_PROXY=
echo "Test-4"
kubectl -v9 get pod
echo "Test-5"
echo $K8S_POD_NAME
timestamp=$(date +%s)
echo $timestamp
kubectl annotate pods $K8S_POD_NAME JOBSTART=$timestamp
echo "Test-5"

View File

@@ -1,6 +1,7 @@
#!/bin/bash
SVC_NAME="{{SvcNameVar}}"
SVC_NAME=${SVC_NAME// /_}
SVC_DESCRIPTION="{{SvcDescription}}"
user_id=`id -u`

View File

@@ -9,7 +9,7 @@ fi
# Determine OS type
# Debian based OS (Debian, Ubuntu, Linux Mint) has /etc/debian_version
# Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7) has /etc/redhat-release
# Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7) has /etc/redhat-release
# SUSE based OS (OpenSUSE, SUSE Enterprise) has ID_LIKE=suse in /etc/os-release
function print_errormessage()
@@ -70,8 +70,8 @@ then
exit 1
fi
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt install -y libicu66 || apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
if [ $? -ne 0 ]
then
echo "'apt' failed with exit code '$?'"
@@ -99,8 +99,8 @@ then
exit 1
fi
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt-get install -y libicu66 || apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
if [ $? -ne 0 ]
then
echo "'apt-get' failed with exit code '$?'"
@@ -116,12 +116,12 @@ then
elif [ -e /etc/redhat-release ]
then
echo "The current OS is Fedora based"
echo "--------Redhat Version--------"
echo "--Fedora/RHEL/CentOS Version--"
cat /etc/redhat-release
echo "------------------------------"
# use dnf on fedora
# use yum on centos and redhat
# use yum on centos and rhel
if [ -e /etc/fedora-release ]
then
command -v dnf
@@ -191,7 +191,7 @@ then
redhatRelease=$(</etc/redhat-release)
if [[ $redhatRelease == "CentOS release 6."* || $redhatRelease == "Red Hat Enterprise Linux Server release 6."* ]]
then
echo "The current OS is Red Hat Enterprise Linux 6 or Centos 6"
echo "The current OS is Red Hat Enterprise Linux 6 or CentOS 6"
# Install known dependencies, as a best effort.
# The remaining dependencies are covered by the GitHub doc that will be shown by `print_rhel6message`

View File

@@ -1,6 +1,7 @@
#!/bin/bash
SVC_NAME="{{SvcNameVar}}"
SVC_NAME=${SVC_NAME// /_}
SVC_DESCRIPTION="{{SvcDescription}}"
SVC_CMD=$1
@@ -62,12 +63,25 @@ function install()
sed "s/{{User}}/${run_as_user}/g; s/{{Description}}/$(echo ${SVC_DESCRIPTION} | sed -e 's/[\/&]/\\&/g')/g; s/{{RunnerRoot}}/$(echo ${RUNNER_ROOT} | sed -e 's/[\/&]/\\&/g')/g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
mv "${TEMP_PATH}" "${UNIT_PATH}" || failed "failed to copy unit file"
# Recent Fedora based Linux (CentOS/Redhat) has SELinux enabled by default
# We need to restore security context on the unit file we added otherwise SystemD have no access to it.
command -v getenforce > /dev/null
if [ $? -eq 0 ]
then
selinuxEnabled=$(getenforce)
if [[ $selinuxEnabled == "Enforcing" ]]
then
# SELinux is enabled, we will need to Restore SELinux Context for the service file
restorecon -r -v "${UNIT_PATH}" || failed "failed to restore SELinux context on ${UNIT_PATH}"
fi
fi
# unit file should not be executable and world writable
chmod 664 ${UNIT_PATH} || failed "failed to set permissions on ${UNIT_PATH}"
chmod 664 "${UNIT_PATH}" || failed "failed to set permissions on ${UNIT_PATH}"
systemctl daemon-reload || failed "failed to reload daemons"
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
cp ./bin/runsvc.sh ./runsvc.sh || failed "failed to copy runsvc.sh"
chown ${run_as_uid}:${run_as_gid} ./runsvc.sh || failed "failed to set owner for runsvc.sh"
chmod 755 ./runsvc.sh || failed "failed to set permission for runsvc.sh"

View File

@@ -67,7 +67,7 @@ while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symli
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
cd $DIR
cd "$DIR"
source ./env.sh

23
src/Misc/runner_lifecycle.sh Executable file
View File

@@ -0,0 +1,23 @@
#!/bin/bash
set -euo pipefail
EVENT=$1
TIMESTAMP=$2
echo $EVENT
echo $TIMESTAMP
function fatal() {
echo "error: $1" >&2
exit 1
}
[ -n "${K8S_POD_NAME:-""}" ] || fatal "K8S_POD_NAME variable must be set"
echo $K8S_POD_NAME
kubectl get pod
kubectl annotate pods $K8S_POD_NAME $EVENT=$TIMESTAMP
echo "DONE"

View File

@@ -33,6 +33,9 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)]
public string PoolName { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool Ephemeral { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ServerUrl { get; set; }
@@ -108,9 +111,9 @@ namespace GitHub.Runner.Common
CredentialData GetMigratedCredentials();
RunnerSettings GetSettings();
void SaveCredential(CredentialData credential);
void SaveMigratedCredential(CredentialData credential);
void SaveSettings(RunnerSettings settings);
void DeleteCredential();
void DeleteMigratedCredential();
void DeleteSettings();
}
@@ -232,21 +235,6 @@ namespace GitHub.Runner.Common
File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden);
}
public void SaveMigratedCredential(CredentialData credential)
{
Trace.Info("Saving {0} migrated credential @ {1}", credential.Scheme, _migratedCredFilePath);
if (File.Exists(_migratedCredFilePath))
{
// Delete existing credential file first, since the file is hidden and not able to overwrite.
Trace.Info("Delete exist runner migrated credential file.");
IOUtil.DeleteFile(_migratedCredFilePath);
}
IOUtil.SaveObject(credential, _migratedCredFilePath);
Trace.Info("Migrated Credentials Saved.");
File.SetAttributes(_migratedCredFilePath, File.GetAttributes(_migratedCredFilePath) | FileAttributes.Hidden);
}
public void SaveSettings(RunnerSettings settings)
{
Trace.Info("Saving runner settings.");
@@ -268,6 +256,11 @@ namespace GitHub.Runner.Common
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
}
public void DeleteMigratedCredential()
{
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
}
public void DeleteSettings()
{
IOUtil.Delete(_configFilePath, default(CancellationToken));

View File

@@ -99,9 +99,11 @@ namespace GitHub.Runner.Common
// Secret args. Must be added to the "Secrets" getter as well.
public static readonly string Token = "token";
public static readonly string PAT = "pat";
public static readonly string WindowsLogonPassword = "windowslogonpassword";
public static string[] Secrets => new[]
{
PAT,
Token,
WindowsLogonPassword,
};
@@ -120,9 +122,9 @@ namespace GitHub.Runner.Common
public static class Flags
{
public static readonly string Commit = "commit";
public static readonly string Ephemeral = "ephemeral";
public static readonly string Help = "help";
public static readonly string Replace = "replace";
public static readonly string Once = "once";
public static readonly string RunAsService = "runasservice";
public static readonly string Unattended = "unattended";
public static readonly string Version = "version";
@@ -137,6 +139,9 @@ namespace GitHub.Runner.Common
public const int RunnerUpdating = 3;
public const int RunOnceRunnerUpdating = 4;
}
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
public static readonly string WorkerCrash = "WORKER_CRASH";
}
public static class RunnerEvent

View File

@@ -1,19 +1,18 @@
using GitHub.Runner.Common.Util;
using System;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.Tracing;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Reflection;
using System.Runtime.Loader;
using System.Threading;
using System.Threading.Tasks;
using System.Diagnostics;
using System.Net.Http;
using System.Diagnostics.Tracing;
using GitHub.DistributedTask.Logging;
using System.Net.Http.Headers;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
@@ -89,6 +88,7 @@ namespace GitHub.Runner.Common
this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.UriDataEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.XmlDataEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.TrimDoubleQuotes);
// Create the trace manager.
if (string.IsNullOrEmpty(logFile))
@@ -614,9 +614,8 @@ namespace GitHub.Runner.Common
{
public static HttpClientHandler CreateHttpClientHandler(this IHostContext context)
{
HttpClientHandler clientHandler = new HttpClientHandler();
clientHandler.Proxy = context.WebProxy;
return clientHandler;
var handlerFactory = context.GetService<IHttpClientHandlerFactory>();
return handlerFactory.CreateClientHandler(context.WebProxy);
}
}

View File

@@ -0,0 +1,19 @@
using System.Net.Http;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(HttpClientHandlerFactory))]
public interface IHttpClientHandlerFactory : IRunnerService
{
HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy);
}
public class HttpClientHandlerFactory : RunnerService, IHttpClientHandlerFactory
{
public HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy)
{
return new HttpClientHandler() { Proxy = webProxy };
}
}
}

View File

@@ -22,6 +22,7 @@ namespace GitHub.Runner.Common
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken);
}
public sealed class JobServer : RunnerService, IJobServer
@@ -113,5 +114,14 @@ namespace GitHub.Runner.Common
CheckConnection();
return _taskClient.GetTimelineAsync(scopeIdentifier, hubName, planId, timelineId, includeRecords: true, cancellationToken: cancellationToken);
}
//-----------------------------------------------------------------
// Action download info
//-----------------------------------------------------------------
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken)
{
CheckConnection();
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, actions, cancellationToken: cancellationToken);
}
}
}

View File

@@ -50,10 +50,6 @@ namespace GitHub.Runner.Common
// agent update
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState);
// runner authorization url
Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId);
Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error);
}
public sealed class RunnerServer : RunnerService, IRunnerServer

View File

@@ -96,13 +96,14 @@ namespace GitHub.Runner.Common
Trace.Info($"WRITE: {message}");
if (!Silent)
{
if(colorCode != null)
if (colorCode != null)
{
Console.ForegroundColor = colorCode.Value;
Console.Write(message);
Console.ResetColor();
}
else {
else
{
Console.Write(message);
}
}
@@ -120,13 +121,14 @@ namespace GitHub.Runner.Common
Trace.Info($"WRITE LINE: {line}");
if (!Silent)
{
if(colorCode != null)
if (colorCode != null)
{
Console.ForegroundColor = colorCode.Value;
Console.WriteLine(line);
Console.ResetColor();
}
else {
else
{
Console.WriteLine(line);
}
}

View File

@@ -0,0 +1,51 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using GitHub.Runner.Sdk;
using GitHub.Runner.Common;
namespace GitHub.Runner.Common.Util
{
public static class EncodingUtil
{
public static async Task SetEncoding(IHostContext hostContext, Tracing trace, CancellationToken cancellationToken)
{
#if OS_WINDOWS
try
{
if (Console.InputEncoding.CodePage != 65001)
{
using (var p = hostContext.CreateService<IProcessInvoker>())
{
// Use UTF8 code page
int exitCode = await p.ExecuteAsync(workingDirectory: hostContext.GetDirectory(WellKnownDirectory.Work),
fileName: WhichUtil.Which("chcp", true, trace),
arguments: "65001",
environment: null,
requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: false,
redirectStandardIn: null,
inheritConsoleHandler: true,
cancellationToken: cancellationToken);
if (exitCode == 0)
{
trace.Info("Successfully returned to code page 65001 (UTF8)");
}
else
{
trace.Warning($"'chcp 65001' failed with exit code {exitCode}");
}
}
}
}
catch (Exception ex)
{
trace.Warning($"'chcp 65001' failed with exception {ex.Message}");
}
#endif
// Dummy variable to prevent compiler error CS1998: "This async method lacks 'await' operators and will run synchronously..."
await Task.CompletedTask;
}
}
}

View File

@@ -28,10 +28,10 @@ namespace GitHub.Runner.Listener
private readonly string[] validFlags =
{
Constants.Runner.CommandLine.Flags.Commit,
Constants.Runner.CommandLine.Flags.Ephemeral,
Constants.Runner.CommandLine.Flags.Help,
Constants.Runner.CommandLine.Flags.Replace,
Constants.Runner.CommandLine.Flags.RunAsService,
Constants.Runner.CommandLine.Flags.Once,
Constants.Runner.CommandLine.Flags.Unattended,
Constants.Runner.CommandLine.Flags.Version
};
@@ -42,6 +42,7 @@ namespace GitHub.Runner.Listener
Constants.Runner.CommandLine.Args.Labels,
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
Constants.Runner.CommandLine.Args.Name,
Constants.Runner.CommandLine.Args.PAT,
Constants.Runner.CommandLine.Args.Pool,
Constants.Runner.CommandLine.Args.StartupType,
Constants.Runner.CommandLine.Args.Token,
@@ -63,8 +64,7 @@ namespace GitHub.Runner.Listener
public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help);
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
public bool RunOnce => TestFlag(Constants.Runner.CommandLine.Flags.Once);
public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral);
// Constructor.
public CommandSettings(IHostContext context, string[] args)
@@ -178,6 +178,11 @@ namespace GitHub.Runner.Listener
validator: Validators.NonEmptyValidator);
}
public string GetGitHubPersonalAccessToken()
{
return GetArg(name: Constants.Runner.CommandLine.Args.PAT);
}
public string GetRunnerRegisterToken()
{
return GetArgOrPrompt(

View File

@@ -7,11 +7,13 @@ using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
namespace GitHub.Runner.Listener.Configuration
@@ -92,9 +94,11 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteSection("Authentication");
while (true)
{
// Get the URL
// When testing against a dev deployment of Actions Service, set this environment variable
var useDevActionsServiceUrl = Environment.GetEnvironmentVariable("USE_DEV_ACTIONS_SERVICE_URL");
var inputUrl = command.GetUrl();
if (inputUrl.Contains("codedev.ms", StringComparison.OrdinalIgnoreCase))
if (inputUrl.Contains("codedev.ms", StringComparison.OrdinalIgnoreCase)
|| useDevActionsServiceUrl != null)
{
runnerSettings.ServerUrl = inputUrl;
// Get the credentials
@@ -105,8 +109,21 @@ namespace GitHub.Runner.Listener.Configuration
else
{
runnerSettings.GitHubUrl = inputUrl;
var githubToken = command.GetRunnerRegisterToken();
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken, Constants.RunnerEvent.Register);
var githubPAT = command.GetGitHubPersonalAccessToken();
var registerToken = string.Empty;
if (!string.IsNullOrEmpty(githubPAT))
{
Trace.Info("Retriving runner register token using GitHub PAT.");
var jitToken = await GetJITRunnerTokenAsync(inputUrl, githubPAT, "registration");
Trace.Info($"Retrived runner register token is good to {jitToken.ExpiresAt}.");
HostContext.SecretMasker.AddValue(jitToken.Token);
registerToken = jitToken.Token;
}
if (string.IsNullOrEmpty(registerToken))
{
registerToken = command.GetRunnerRegisterToken();
}
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
runnerSettings.ServerUrl = authResult.TenantUrl;
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
@@ -117,6 +134,19 @@ namespace GitHub.Runner.Listener.Configuration
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
// Warn if the Actions server url and GHES server url has different Host
if (!runnerSettings.IsHostedServer)
{
// Example actionsServerUrl is https://my-ghes/_services/pipelines/[...]
// Example githubServerUrl is https://my-ghes
var actionsServerUrl = new Uri(runnerSettings.ServerUrl);
var githubServerUrl = new Uri(runnerSettings.GitHubUrl);
if (!string.Equals(actionsServerUrl.Authority, githubServerUrl.Authority, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"GitHub Actions is not properly configured in GHES. GHES url: {runnerSettings.GitHubUrl}, Actions url: {runnerSettings.ServerUrl}.");
}
}
// Validate can connect.
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), creds);
@@ -162,6 +192,7 @@ namespace GitHub.Runner.Listener.Configuration
TaskAgent agent;
while (true)
{
runnerSettings.Ephemeral = command.Ephemeral;
runnerSettings.AgentName = command.GetRunnerName();
_term.WriteLine();
@@ -178,7 +209,7 @@ namespace GitHub.Runner.Listener.Configuration
if (command.GetReplace())
{
// Update existing agent with new PublicKey, agent version.
agent = UpdateExistingAgent(agent, publicKey, userLabels);
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral);
try
{
@@ -195,13 +226,13 @@ namespace GitHub.Runner.Listener.Configuration
else if (command.Unattended)
{
// if not replace and it is unattended config.
throw new TaskAgentExistsException($"Pool {runnerSettings.PoolId} already contains a runner with name {runnerSettings.AgentName}.");
throw new TaskAgentExistsException($"A runner exists with the same name {runnerSettings.AgentName}.");
}
}
else
{
// Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels);
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral);
try
{
@@ -219,36 +250,11 @@ namespace GitHub.Runner.Listener.Configuration
// Add Agent Id to settings
runnerSettings.AgentId = agent.Id;
// respect the serverUrl resolve by server.
// in case of agent configured using collection url instead of account url.
string agentServerUrl;
if (agent.Properties.TryGetValidatedValue<string>("ServerUrl", out agentServerUrl) &&
!string.IsNullOrEmpty(agentServerUrl))
{
Trace.Info($"Agent server url resolve by server: '{agentServerUrl}'.");
// we need make sure the Schema/Host/Port component of the url remain the same.
UriBuilder inputServerUrl = new UriBuilder(runnerSettings.ServerUrl);
UriBuilder serverReturnedServerUrl = new UriBuilder(agentServerUrl);
if (Uri.Compare(inputServerUrl.Uri, serverReturnedServerUrl.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0)
{
inputServerUrl.Path = serverReturnedServerUrl.Path;
Trace.Info($"Replace server returned url's scheme://host:port component with user input server url's scheme://host:port: '{inputServerUrl.Uri.AbsoluteUri}'.");
runnerSettings.ServerUrl = inputServerUrl.Uri.AbsoluteUri;
}
else
{
runnerSettings.ServerUrl = agentServerUrl;
}
}
// See if the server supports our OAuth key exchange for credentials
if (agent.Authorization != null &&
agent.Authorization.ClientId != Guid.Empty &&
agent.Authorization.AuthorizationUrl != null)
{
UriBuilder configServerUrl = new UriBuilder(runnerSettings.ServerUrl);
UriBuilder oauthEndpointUrlBuilder = new UriBuilder(agent.Authorization.AuthorizationUrl);
var credentialData = new CredentialData
{
Scheme = Constants.Configuration.OAuth,
@@ -256,7 +262,6 @@ namespace GitHub.Runner.Listener.Configuration
{
{ "clientId", agent.Authorization.ClientId.ToString("D") },
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
{ "oauthEndpointUrl", oauthEndpointUrlBuilder.Uri.AbsoluteUri },
},
};
@@ -367,8 +372,22 @@ namespace GitHub.Runner.Listener.Configuration
}
else
{
var githubToken = command.GetRunnerDeletionToken();
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken, Constants.RunnerEvent.Remove);
var githubPAT = command.GetGitHubPersonalAccessToken();
var deletionToken = string.Empty;
if (!string.IsNullOrEmpty(githubPAT))
{
Trace.Info("Retriving runner deletion token using GitHub PAT.");
var jitToken = await GetJITRunnerTokenAsync(settings.GitHubUrl, githubPAT, "remove");
Trace.Info($"Retrived runner deletion token is good to {jitToken.ExpiresAt}.");
HostContext.SecretMasker.AddValue(jitToken.Token);
deletionToken = jitToken.Token;
}
if (string.IsNullOrEmpty(deletionToken))
{
deletionToken = command.GetRunnerDeletionToken();
}
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove);
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
}
@@ -451,7 +470,7 @@ namespace GitHub.Runner.Listener.Configuration
}
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels)
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral)
{
ArgUtil.NotNull(agent, nameof(agent));
agent.Authorization = new TaskAgentAuthorization
@@ -462,7 +481,9 @@ namespace GitHub.Runner.Listener.Configuration
// update should replace the existing labels
agent.Version = BuildConstants.RunnerPackage.Version;
agent.OSDescription = RuntimeInformation.OSDescription;
agent.Ephemeral = ephemeral;
agent.MaxParallelism = 1;
agent.Labels.Clear();
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
@@ -473,11 +494,11 @@ namespace GitHub.Runner.Listener.Configuration
{
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
}
return agent;
}
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels)
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral)
{
TaskAgent agent = new TaskAgent(agentName)
{
@@ -488,6 +509,7 @@ namespace GitHub.Runner.Listener.Configuration
MaxParallelism = 1,
Version = BuildConstants.RunnerPackage.Version,
OSDescription = RuntimeInformation.OSDescription,
Ephemeral = ephemeral,
};
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
@@ -509,6 +531,72 @@ namespace GitHub.Runner.Listener.Configuration
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
}
private async Task<GitHubRunnerRegisterToken> GetJITRunnerTokenAsync(string githubUrl, string githubToken, string tokenType)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar, StringSplitOptions.RemoveEmptyEntries);
if (path.Length == 1)
{
if (IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runners/{tokenType}-token";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runners/{tokenType}-token";
}
}
if (path.Length == 2)
{
var repoScope = "repos/";
if (string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
{
repoScope = "";
}
if (IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{repoScope}{path[0]}/{path[1]}/actions/runners/{tokenType}-token";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{repoScope}{path[0]}/{path[1]}/actions/runners/{tokenType}-token";
}
}
else
{
throw new ArgumentException($"'{githubUrl}' should point to an org or repository.");
}
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}"));
HostContext.SecretMasker.AddValue(base64EncodingToken);
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
return null;
}
}
}
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
{
var githubApiUrl = "";

View File

@@ -13,7 +13,7 @@ namespace GitHub.Runner.Listener.Configuration
public interface ICredentialManager : IRunnerService
{
ICredentialProvider GetCredentialProvider(string credType);
VssCredentials LoadCredentials(bool preferMigrated = true);
VssCredentials LoadCredentials();
}
public class CredentialManager : RunnerService, ICredentialManager
@@ -40,7 +40,7 @@ namespace GitHub.Runner.Listener.Configuration
return creds;
}
public VssCredentials LoadCredentials(bool preferMigrated = true)
public VssCredentials LoadCredentials()
{
IConfigurationStore store = HostContext.GetService<IConfigurationStore>();
@@ -50,14 +50,16 @@ namespace GitHub.Runner.Listener.Configuration
}
CredentialData credData = store.GetCredentials();
if (preferMigrated)
var migratedCred = store.GetMigratedCredentials();
if (migratedCred != null)
{
var migratedCred = store.GetMigratedCredentials();
if (migratedCred != null)
{
credData = migratedCred;
}
credData = migratedCred;
// Re-write .credentials with Token URL
store.SaveCredential(credData);
// Delete .credentials_migrated
store.DeleteMigratedCredential();
}
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);
@@ -69,6 +71,16 @@ namespace GitHub.Runner.Listener.Configuration
}
}
[DataContract]
public sealed class GitHubRunnerRegisterToken
{
[DataMember(Name = "token")]
public string Token { get; set; }
[DataMember(Name = "expires_at")]
public string ExpiresAt { get; set; }
}
[DataContract]
public sealed class GitHubAuthResult
{

View File

@@ -477,6 +477,53 @@ namespace GitHub.Runner.Listener
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
var jobStartNotification = Environment.GetEnvironmentVariable("_INTERNAL_RUNNER_LIFECYCLE_NOTIFICATION");
if (!string.IsNullOrEmpty(jobStartNotification))
{
term.WriteLine($"{DateTime.UtcNow:u}: Publish JobStart to {jobStartNotification}");
using (var jobStartInvoker = HostContext.CreateService<IProcessInvoker>())
{
jobStartInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
Trace.Info($"JobStartNotification: {stdout.Data}");
}
};
jobStartInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
if (!string.IsNullOrEmpty(stderr.Data))
{
Trace.Error($"JobStartNotification: {stderr.Data}");
}
}
};
try
{
await jobStartInvoker.ExecuteAsync(
workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: WhichUtil.Which("bash"),
arguments: $"-c \"{jobStartNotification} JOBSTART {DateTime.UtcNow.ToString("O")}\"",
environment: null,
requireExitCodeZero: true,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: new CancellationTokenSource(10000).Token);
}
catch (Exception ex)
{
Trace.Error($"Fail to publish JobStart notification: {ex}");
}
}
}
HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}");
@@ -613,6 +660,53 @@ namespace GitHub.Runner.Listener
{
// This should be the last thing to run so we don't notify external parties until actually finished
await notification.JobCompleted(message.JobId);
var jobCompleteNotification = Environment.GetEnvironmentVariable("_INTERNAL_RUNNER_LIFECYCLE_NOTIFICATION");
if (!string.IsNullOrEmpty(jobCompleteNotification))
{
term.WriteLine($"{DateTime.UtcNow:u}: Publish JobComplete to {jobCompleteNotification}");
using (var jobCompleteInvoker = HostContext.CreateService<IProcessInvoker>())
{
jobCompleteInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
Trace.Info($"jobCompleteNotification: {stdout.Data}");
}
};
jobCompleteInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
if (!string.IsNullOrEmpty(stderr.Data))
{
Trace.Error($"jobCompleteNotification: {stderr.Data}");
}
}
};
try
{
await jobCompleteInvoker.ExecuteAsync(
workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: WhichUtil.Which("bash"),
arguments: $"-c \"{jobCompleteNotification} JOBCOMPLETE {DateTime.UtcNow.ToString("O")}\"",
environment: null,
requireExitCodeZero: true,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: new CancellationTokenSource(10000).Token);
}
catch (Exception ex)
{
Trace.Error($"Fail to publish JobComplete notification: {ex}");
}
}
}
}
}
}
@@ -645,7 +739,56 @@ namespace GitHub.Runner.Listener
// fire first renew succeed event.
firstJobRequestRenewed.TrySetResult(0);
}
else
{
var jobRunningNotification = Environment.GetEnvironmentVariable("_INTERNAL_RUNNER_LIFECYCLE_NOTIFICATION");
if (!string.IsNullOrEmpty(jobRunningNotification))
{
HostContext.GetService<ITerminal>().WriteLine($"{DateTime.UtcNow:u}: Publish JobRunning to {jobRunningNotification}");
using (var jobRunningInvoker = HostContext.CreateService<IProcessInvoker>())
{
jobRunningInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
Trace.Info($"JobRunningNotification: {stdout.Data}");
}
};
jobRunningInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
if (!string.IsNullOrEmpty(stderr.Data))
{
Trace.Error($"JobRunningNotification: {stderr.Data}");
}
}
};
try
{
await jobRunningInvoker.ExecuteAsync(
workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: WhichUtil.Which("bash"),
arguments: $"-c \"{jobRunningNotification} JOBRUNNING {DateTime.UtcNow.ToString("O")}\"",
environment: null,
requireExitCodeZero: true,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: new CancellationTokenSource(10000).Token);
}
catch (Exception ex)
{
Trace.Error($"Fail to publish JobRunning notification: {ex}");
}
}
}
}
if (encounteringError > 0)
{
encounteringError = 0;
@@ -858,7 +1001,6 @@ namespace GitHub.Runner.Listener
}
}
// TODO: We need send detailInfo back to DT in order to add an issue for the job
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null)
{
Trace.Entering();
@@ -952,8 +1094,10 @@ namespace GitHub.Runner.Listener
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(new Issue() { Type = IssueType.Error, Message = errorMessage });
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
}
catch (Exception ex)

View File

@@ -13,10 +13,7 @@ using System.Diagnostics;
using System.Runtime.InteropServices;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Services.WebApi;
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]
namespace GitHub.Runner.Listener
{
[ServiceLocator(Default = typeof(MessageListener))]
@@ -35,30 +32,18 @@ namespace GitHub.Runner.Listener
private ITerminal _term;
private IRunnerServer _runnerServer;
private TaskAgentSession _session;
private ICredentialManager _credMgr;
private IConfigurationStore _configStore;
private TimeSpan _getNextMessageRetryInterval;
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new Dictionary<string, int>();
// Whether load credentials from .credentials_migrated file
internal bool _useMigratedCredentials;
// need to check auth url if there is only .credentials and auth schema is OAuth
internal bool _needToCheckAuthorizationUrlUpdate;
internal Task<VssCredentials> _authorizationUrlMigrationBackgroundTask;
internal Task _authorizationUrlRollbackReattemptDelayBackgroundTask;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = HostContext.GetService<ITerminal>();
_runnerServer = HostContext.GetService<IRunnerServer>();
_credMgr = HostContext.GetService<ICredentialManager>();
_configStore = HostContext.GetService<IConfigurationStore>();
}
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
@@ -73,8 +58,8 @@ namespace GitHub.Runner.Listener
// Create connection.
Trace.Info("Loading Credentials");
_useMigratedCredentials = !StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_SPSAUTHURL"));
VssCredentials creds = _credMgr.LoadCredentials(_useMigratedCredentials);
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials creds = credMgr.LoadCredentials();
var agent = new TaskAgentReference
{
@@ -89,17 +74,6 @@ namespace GitHub.Runner.Listener
string errorMessage = string.Empty;
bool encounteringError = false;
var originalCreds = _configStore.GetCredentials();
var migratedCreds = _configStore.GetMigratedCredentials();
if (migratedCreds == null)
{
_useMigratedCredentials = false;
if (originalCreds.Scheme == Constants.Configuration.OAuth)
{
_needToCheckAuthorizationUrlUpdate = true;
}
}
while (true)
{
token.ThrowIfCancellationRequested();
@@ -127,12 +101,6 @@ namespace GitHub.Runner.Listener
encounteringError = false;
}
if (_needToCheckAuthorizationUrlUpdate)
{
// start background task try to get new authorization url
_authorizationUrlMigrationBackgroundTask = GetNewOAuthAuthorizationSetting(token);
}
return true;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
@@ -150,25 +118,26 @@ namespace GitHub.Runner.Listener
Trace.Error("Catch exception during create session.");
Trace.Error(ex);
if (!IsSessionCreationExceptionRetriable(ex))
if (ex is VssOAuthTokenRequestException && creds.Federated is VssOAuthCredential vssOAuthCred)
{
if (_useMigratedCredentials)
// Check whether we get 401 because the runner registration already removed by the service.
// If the runner registration get deleted, we can't exchange oauth token.
Trace.Error("Test oauth app registration.");
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl));
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
{
// migrated credentials might cause lose permission during permission check,
// we will force to use original credential and try again
_useMigratedCredentials = false;
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
creds = _credMgr.LoadCredentials(false);
Trace.Error("Fallback to original credentials and try again.");
}
else
{
_term.WriteError($"Failed to create session. {ex.Message}");
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
return false;
}
}
if (!IsSessionCreationExceptionRetriable(ex))
{
_term.WriteError($"Failed to create session. {ex.Message}");
return false;
}
if (!encounteringError) //print the message only on the first error
{
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
@@ -227,51 +196,6 @@ namespace GitHub.Runner.Listener
encounteringError = false;
continuousError = 0;
}
if (_needToCheckAuthorizationUrlUpdate &&
_authorizationUrlMigrationBackgroundTask?.IsCompleted == true)
{
if (HostContext.GetService<IJobDispatcher>().Busy ||
HostContext.GetService<ISelfUpdater>().Busy)
{
Trace.Info("Job or runner updates in progress, update credentials next time.");
}
else
{
try
{
var newCred = await _authorizationUrlMigrationBackgroundTask;
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), newCred);
Trace.Info("Updated connection to use migrated credential for next GetMessage call.");
_useMigratedCredentials = true;
_authorizationUrlMigrationBackgroundTask = null;
_needToCheckAuthorizationUrlUpdate = false;
}
catch (Exception ex)
{
Trace.Error("Fail to refresh connection with new authorization url.");
Trace.Error(ex);
}
}
}
if (_authorizationUrlRollbackReattemptDelayBackgroundTask?.IsCompleted == true)
{
try
{
// we rolled back to use original creds about 2 days before, now it's a good time to try migrated creds again.
Trace.Info("Re-attempt to use migrated credential");
var migratedCreds = _credMgr.LoadCredentials();
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedCreds);
_useMigratedCredentials = true;
_authorizationUrlRollbackReattemptDelayBackgroundTask = null;
}
catch (Exception ex)
{
Trace.Error("Fail to refresh connection with new authorization url on rollback reattempt.");
Trace.Error(ex);
}
}
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
@@ -295,21 +219,7 @@ namespace GitHub.Runner.Listener
}
else if (!IsGetNextMessageExceptionRetriable(ex))
{
if (_useMigratedCredentials)
{
// migrated credentials might cause lose permission during permission check,
// we will force to use original credential and try again
_useMigratedCredentials = false;
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
var originalCreds = _credMgr.LoadCredentials(false);
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), originalCreds);
Trace.Error("Fallback to original credentials and try again.");
}
else
{
throw;
}
throw;
}
else
{
@@ -501,80 +411,5 @@ namespace GitHub.Runner.Listener
return true;
}
}
private async Task<VssCredentials> GetNewOAuthAuthorizationSetting(CancellationToken token)
{
Trace.Info("Start checking oauth authorization url update.");
while (true)
{
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromMinutes(30), TimeSpan.FromMinutes(45));
await HostContext.Delay(backoff, token);
try
{
var migratedAuthorizationUrl = await _runnerServer.GetRunnerAuthUrlAsync(_settings.PoolId, _settings.AgentId);
if (!string.IsNullOrEmpty(migratedAuthorizationUrl))
{
var credData = _configStore.GetCredentials();
var clientId = credData.Data.GetValueOrDefault("clientId", null);
var currentAuthorizationUrl = credData.Data.GetValueOrDefault("authorizationUrl", null);
Trace.Info($"Current authorization url: {currentAuthorizationUrl}, new authorization url: {migratedAuthorizationUrl}");
if (string.Equals(currentAuthorizationUrl, migratedAuthorizationUrl, StringComparison.OrdinalIgnoreCase))
{
// We don't need to update credentials.
Trace.Info("No needs to update authorization url");
await Task.Delay(TimeSpan.FromMilliseconds(-1), token);
}
var keyManager = HostContext.GetService<IRSAKeyManager>();
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
var migratedClientCredential = new VssOAuthJwtBearerClientCredential(clientId, migratedAuthorizationUrl, signingCredentials);
var migratedRunnerCredential = new VssOAuthCredential(new Uri(migratedAuthorizationUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, migratedClientCredential);
Trace.Info("Try connect service with Token Service OAuth endpoint.");
var runnerServer = HostContext.CreateService<IRunnerServer>();
await runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedRunnerCredential);
await runnerServer.GetAgentPoolsAsync();
Trace.Info($"Successfully connected service with new authorization url.");
var migratedCredData = new CredentialData
{
Scheme = Constants.Configuration.OAuth,
Data =
{
{ "clientId", clientId },
{ "authorizationUrl", migratedAuthorizationUrl },
{ "oauthEndpointUrl", migratedAuthorizationUrl },
},
};
_configStore.SaveMigratedCredential(migratedCredData);
return migratedRunnerCredential;
}
else
{
Trace.Verbose("No authorization url updates");
}
}
catch (Exception ex)
{
Trace.Error("Fail to get/test new authorization url.");
Trace.Error(ex);
try
{
await _runnerServer.ReportRunnerAuthUrlErrorAsync(_settings.PoolId, _settings.AgentId, ex.ToString());
}
catch (Exception e)
{
// best effort
Trace.Error("Fail to report the migration error");
Trace.Error(e);
}
}
}
}
}
}

View File

@@ -102,7 +102,9 @@ namespace GitHub.Runner.Listener
IRunner runner = context.GetService<IRunner>();
try
{
return await runner.ExecuteCommand(command);
var returnCode = await runner.ExecuteCommand(command);
trace.Info($"Runner execution has finished with return code {returnCode}");
return returnCode;
}
catch (OperationCanceledException) when (context.RunnerShutdownToken.IsCancellationRequested)
{

View File

@@ -193,7 +193,7 @@ namespace GitHub.Runner.Listener
HostContext.StartupType = startType;
// Run the runner interactively or as service
return await RunAsync(settings, command.RunOnce);
return await RunAsync(settings, settings.Ephemeral);
}
else
{
@@ -466,6 +466,7 @@ Config Options:
--url string Repository to add the runner to. Required if unattended
--token string Registration token. Required if unattended
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
--replace Replace any existing runner with the same name (default false)");
#if OS_WINDOWS
@@ -473,12 +474,14 @@ Config Options:
_term.WriteLine($@" --windowslogonaccount string Account to run the service as. Requires runasservice");
_term.WriteLine($@" --windowslogonpassword string Password for the service account. Requires runasservice");
#endif
_term.WriteLine($@"
_term.WriteLine($@"
Examples:
Configure a runner non-interactively:
.{separator}config.{ext} --unattended --url <url> --token <token>
Configure a runner non-interactively, replacing any existing runner with the same name:
.{separator}config.{ext} --unattended --url <url> --token <token> --replace [--name <name>]");
.{separator}config.{ext} --unattended --url <url> --token <token> --replace [--name <name>]
Configure a runner non-interactively with three extra labels:
.{separator}config.{ext} --unattended --url <url> --token <token> --labels L1,L2,L3");
#if OS_WINDOWS
_term.WriteLine($@" Configure a runner to run as a service:");
_term.WriteLine($@" .{separator}config.{ext} --url <url> --token <token> --runasservice");

View File

@@ -59,6 +59,53 @@ namespace GitHub.Runner.Listener
Trace.Info($"An update is available.");
var runnerUpdateNotification = Environment.GetEnvironmentVariable("_INTERNAL_RUNNER_LIFECYCLE_NOTIFICATION");
if (!string.IsNullOrEmpty(runnerUpdateNotification))
{
HostContext.GetService<ITerminal>().WriteLine($"{DateTime.UtcNow:u}: Publish RunnerUpdate to {runnerUpdateNotification}");
using (var runnerUpdateInvoker = HostContext.CreateService<IProcessInvoker>())
{
runnerUpdateInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
Trace.Info($"RunnerUpdateNotification: {stdout.Data}");
}
};
runnerUpdateInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
if (!string.IsNullOrEmpty(stderr.Data))
{
Trace.Error($"RunnerUpdateNotification: {stderr.Data}");
}
}
};
try
{
await runnerUpdateInvoker.ExecuteAsync(
workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: WhichUtil.Which("bash"),
arguments: $"-c \"{runnerUpdateNotification} RUNNERUPDATE {DateTime.UtcNow.ToString("O")}\"",
environment: null,
requireExitCodeZero: true,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: new CancellationTokenSource(10000).Token);
}
catch (Exception ex)
{
Trace.Error($"Fail to publish RunnerUpdate notification: {ex}");
}
}
}
// Print console line that warn user not shutdown runner.
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");

View File

@@ -80,7 +80,12 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
// Validate args.
ArgUtil.NotNull(executionContext, nameof(executionContext));
executionContext.Output($"Syncing repository: {repoFullName}");
Uri repositoryUrl = new Uri($"https://github.com/{repoFullName}");
// Repository URL
var githubUrl = executionContext.GetGitHubContext("server_url");
var githubUri = new Uri(!string.IsNullOrEmpty(githubUrl) ? githubUrl : "https://github.com");
var portInfo = githubUri.IsDefaultPort ? string.Empty : $":{githubUri.Port}";
Uri repositoryUrl = new Uri($"{githubUri.Scheme}://{githubUri.Host}{portInfo}/{repoFullName}");
if (!repositoryUrl.IsAbsoluteUri)
{
throw new InvalidOperationException("Repository url need to be an absolute uri.");

View File

@@ -318,7 +318,12 @@ namespace GitHub.Runner.Sdk
}
}
using (var registration = cancellationToken.Register(async () => await CancelAndKillProcessTree(killProcessOnCancel)))
var cancellationFinished = new TaskCompletionSource<bool>();
using (var registration = cancellationToken.Register(async () =>
{
await CancelAndKillProcessTree(killProcessOnCancel);
cancellationFinished.TrySetResult(true);
}))
{
Trace.Info($"Process started with process id {_proc.Id}, waiting for process exit.");
while (true)
@@ -341,6 +346,13 @@ namespace GitHub.Runner.Sdk
// data buffers one last time before returning
ProcessOutput();
if (cancellationToken.IsCancellationRequested)
{
// Ensure cancellation also finish on the cancellationToken.Register thread.
await cancellationFinished.Task;
Trace.Info($"Process Cancellation finished.");
}
Trace.Info($"Finished process {_proc.Id} with exit code {_proc.ExitCode}, and elapsed time {_stopWatch.Elapsed}.");
}

View File

@@ -30,7 +30,7 @@ namespace GitHub.Runner.Sdk
//
// For example, on an en-US box, this is required for loading the encoding for the
// default console output code page '437'. Without loading the correct encoding for
// code page IBM437, some characters cannot be translated correctly, e.g. write 'ç'
// code page IBM437, some characters cannot be translated correctly, e.g. write 'ç'
// from powershell.exe.
Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
#endif

View File

@@ -11,6 +11,11 @@ namespace GitHub.Runner.Sdk
{
ArgUtil.NotNullOrEmpty(command, nameof(command));
trace?.Info($"Which: '{command}'");
if (Path.IsPathFullyQualified(command) && File.Exists(command))
{
trace?.Info($"Fully qualified path: '{command}'");
return command;
}
string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
if (string.IsNullOrEmpty(path))
{

View File

@@ -188,7 +188,7 @@ namespace GitHub.Runner.Worker
throw new Exception("Required field 'name' is missing in ##[set-env] command.");
}
context.EnvironmentVariables[envName] = command.Data;
context.Global.EnvironmentVariables[envName] = command.Data;
context.SetEnvContext(envName, command.Data);
context.Debug($"{envName}='{command.Data}'");
}
@@ -283,8 +283,8 @@ namespace GitHub.Runner.Worker
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
{
ArgUtil.NotNullOrEmpty(command.Data, "path");
context.PrependPath.RemoveAll(x => string.Equals(x, command.Data, StringComparison.CurrentCulture));
context.PrependPath.Add(command.Data);
context.Global.PrependPath.RemoveAll(x => string.Equals(x, command.Data, StringComparison.CurrentCulture));
context.Global.PrependPath.Add(command.Data);
}
}
@@ -486,7 +486,10 @@ namespace GitHub.Runner.Worker
foreach (var property in command.Properties)
{
issue.Data[property.Key] = property.Value;
if (!string.Equals(property.Key, Constants.Runner.InternalTelemetryIssueDataKey, StringComparison.OrdinalIgnoreCase))
{
issue.Data[property.Key] = property.Value;
}
}
context.AddIssue(issue);

View File

@@ -1,21 +1,20 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container;
using GitHub.Services.Common;
using Newtonsoft.Json;
using WebApi = GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
@@ -48,7 +47,7 @@ namespace GitHub.Runner.Worker
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
private const int _defaultCopyBufferSize = 81920;
private const string _dotcomApiUrl = "https://api.github.com";
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new Dictionary<Guid, ContainerInfo>();
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
@@ -67,19 +66,18 @@ namespace GitHub.Runner.Worker
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
// Log even if we aren't using it to ensure users know.
if (!string.IsNullOrEmpty(executionContext.Variables.Get("PREVIEW_ACTION_TOKEN")))
if (!string.IsNullOrEmpty(executionContext.Global.Variables.Get("PREVIEW_ACTION_TOKEN")))
{
executionContext.Warning("The 'PREVIEW_ACTION_TOKEN' secret is deprecated. Please remove it from the repository's secrets");
}
// Clear the cache (for self-hosted runners)
// Note, temporarily avoid this step for the on-premises product, to avoid rate limiting.
var configurationStore = HostContext.GetService<IConfigurationStore>();
var isHostedServer = configurationStore.GetSettings().IsHostedServer;
if (isHostedServer)
{
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
}
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
var newActionMetadata = executionContext.Global.Variables.GetBoolean("DistributedTask.NewActionMetadata") ?? false;
var repositoryActions = new List<Pipelines.ActionStep>();
foreach (var action in actions)
{
@@ -98,7 +96,8 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'");
imagesToPull[containerReference.Image].Add(action.Id);
}
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository)
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && !newActionMetadata)
{
// only download the repository archive
await DownloadRepositoryActionAsync(executionContext, action);
@@ -132,6 +131,81 @@ namespace GitHub.Runner.Worker
}
}
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
{
var definition = LoadAction(executionContext, action);
if (definition.Data.Execution.HasPre)
{
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = action;
actionRunner.Stage = ActionRunStage.Pre;
actionRunner.Condition = definition.Data.Execution.InitCondition;
Trace.Info($"Add 'pre' execution for {action.Id}");
preStepTracker[action.Id] = actionRunner;
}
}
}
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && newActionMetadata)
{
repositoryActions.Add(action);
}
}
if (repositoryActions.Count > 0)
{
// Get the download info
var downloadInfos = await GetDownloadInfoAsync(executionContext, repositoryActions);
// Download each action
foreach (var action in repositoryActions)
{
var lookupKey = GetDownloadInfoLookupKey(action);
if (string.IsNullOrEmpty(lookupKey))
{
continue;
}
if (!downloadInfos.TryGetValue(lookupKey, out var downloadInfo))
{
throw new Exception($"Missing download info for {lookupKey}");
}
await DownloadRepositoryActionAsync(executionContext, downloadInfo);
}
// More preparation based on content in the repository (action.yml)
foreach (var action in repositoryActions)
{
var setupInfo = PrepareRepositoryActionAsync(executionContext, action);
if (setupInfo != null)
{
if (!string.IsNullOrEmpty(setupInfo.Image))
{
if (!imagesToPull.ContainsKey(setupInfo.Image))
{
imagesToPull[setupInfo.Image] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to pull image '{setupInfo.Image}'");
imagesToPull[setupInfo.Image].Add(action.Id);
}
else
{
ArgUtil.NotNullOrEmpty(setupInfo.ActionRepository, nameof(setupInfo.ActionRepository));
if (!imagesToBuild.ContainsKey(setupInfo.ActionRepository))
{
imagesToBuild[setupInfo.ActionRepository] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to build image '{setupInfo.Dockerfile}'");
imagesToBuild[setupInfo.ActionRepository].Add(action.Id);
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
}
}
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
{
@@ -321,6 +395,14 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}.");
}
}
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
var compositeAction = definition.Data.Execution as CompositeActionExecutionData;
Trace.Info($"Load {compositeAction.Steps?.Count ?? 0} action steps.");
Trace.Verbose($"Details: {StringUtil.ConvertToJson(compositeAction?.Steps)}");
Trace.Info($"Load: {compositeAction.Outputs?.Count ?? 0} number of outputs");
Trace.Info($"Details: {StringUtil.ConvertToJson(compositeAction?.Outputs)}");
}
else
{
throw new NotSupportedException(definition.Data.Execution.ExecutionType.ToString());
@@ -386,7 +468,7 @@ namespace GitHub.Runner.Worker
ArgUtil.NotNull(setupInfo, nameof(setupInfo));
ArgUtil.NotNullOrEmpty(setupInfo.Container.Image, nameof(setupInfo.Container.Image));
executionContext.Output($"Pull down action image '{setupInfo.Container.Image}'");
executionContext.Output($"##[group]Pull down action image '{setupInfo.Container.Image}'");
// Pull down docker image with retry up to 3 times
var dockerManger = HostContext.GetService<IDockerCommandManager>();
@@ -410,6 +492,7 @@ namespace GitHub.Runner.Worker
}
}
}
executionContext.Output("##[endgroup");
if (retryCount == 3 && pullExitCode != 0)
{
@@ -429,7 +512,7 @@ namespace GitHub.Runner.Worker
ArgUtil.NotNull(setupInfo, nameof(setupInfo));
ArgUtil.NotNullOrEmpty(setupInfo.Container.Dockerfile, nameof(setupInfo.Container.Dockerfile));
executionContext.Output($"Build container for action use: '{setupInfo.Container.Dockerfile}'.");
executionContext.Output($"##[group]Build container for action use: '{setupInfo.Container.Dockerfile}'.");
// Build docker image with retry up to 3 times
var dockerManger = HostContext.GetService<IDockerCommandManager>();
@@ -438,7 +521,12 @@ namespace GitHub.Runner.Worker
var imageName = $"{dockerManger.DockerInstanceLabel}:{Guid.NewGuid().ToString("N")}";
while (retryCount < 3)
{
buildExitCode = await dockerManger.DockerBuild(executionContext, setupInfo.Container.WorkingDirectory, Directory.GetParent(setupInfo.Container.Dockerfile).FullName, imageName);
buildExitCode = await dockerManger.DockerBuild(
executionContext,
setupInfo.Container.WorkingDirectory,
setupInfo.Container.Dockerfile,
Directory.GetParent(setupInfo.Container.Dockerfile).FullName,
imageName);
if (buildExitCode == 0)
{
break;
@@ -454,6 +542,7 @@ namespace GitHub.Runner.Worker
}
}
}
executionContext.Output("##[endgroup]");
if (retryCount == 3 && buildExitCode != 0)
{
@@ -467,6 +556,80 @@ namespace GitHub.Runner.Worker
}
}
// This implementation is temporary and will be replaced with a REST API call to the service to resolve
private async Task<IDictionary<string, WebApi.ActionDownloadInfo>> GetDownloadInfoAsync(IExecutionContext executionContext, List<Pipelines.ActionStep> actions)
{
executionContext.Output("Getting action download info");
// Convert to action reference
var actionReferences = actions
.GroupBy(x => GetDownloadInfoLookupKey(x))
.Where(x => !string.IsNullOrEmpty(x.Key))
.Select(x =>
{
var action = x.First();
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
return new WebApi.ActionReference
{
NameWithOwner = repositoryReference.Name,
Ref = repositoryReference.Ref,
};
})
.ToList();
// Nothing to resolve?
if (actionReferences.Count == 0)
{
return new Dictionary<string, WebApi.ActionDownloadInfo>();
}
// Resolve download info
var jobServer = HostContext.GetService<IJobServer>();
var actionDownloadInfos = default(WebApi.ActionDownloadInfoCollection);
for (var attempt = 1; attempt <= 3; attempt++)
{
try
{
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Global.Plan.ScopeIdentifier, executionContext.Global.Plan.PlanType, executionContext.Global.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
break;
}
catch (Exception ex) when (attempt < 3)
{
executionContext.Output($"Failed to resolve action download info. Error: {ex.Message}");
executionContext.Debug(ex.ToString());
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
{
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
executionContext.Output($"Retrying in {backoff.TotalSeconds} seconds");
await Task.Delay(backoff);
}
}
}
ArgUtil.NotNull(actionDownloadInfos, nameof(actionDownloadInfos));
ArgUtil.NotNull(actionDownloadInfos.Actions, nameof(actionDownloadInfos.Actions));
var apiUrl = GetApiUrl(executionContext);
var defaultAccessToken = executionContext.GetGitHubContext("token");
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
foreach (var actionDownloadInfo in actionDownloadInfos.Actions.Values)
{
// Add secret
HostContext.SecretMasker.AddValue(actionDownloadInfo.Authentication?.Token);
// Default auth token
if (string.IsNullOrEmpty(actionDownloadInfo.Authentication?.Token))
{
actionDownloadInfo.Authentication = new WebApi.ActionDownloadAuthentication { Token = defaultAccessToken };
}
}
return actionDownloadInfos.Actions;
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
{
Trace.Entering();
@@ -490,7 +653,7 @@ namespace GitHub.Runner.Worker
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref);
string watermarkFile = destDirectory + ".completed";
string watermarkFile = GetWatermarkFilePath(destDirectory);
if (File.Exists(watermarkFile))
{
executionContext.Debug($"Action '{repositoryReference.Name}@{repositoryReference.Ref}' already downloaded at '{destDirectory}'.");
@@ -504,27 +667,116 @@ namespace GitHub.Runner.Worker
executionContext.Output($"Download action repository '{repositoryReference.Name}@{repositoryReference.Ref}'");
}
#if OS_WINDOWS
string archiveLink = $"https://api.github.com/repos/{repositoryReference.Name}/zipball/{repositoryReference.Ref}";
#else
string archiveLink = $"https://api.github.com/repos/{repositoryReference.Name}/tarball/{repositoryReference.Ref}";
#endif
Trace.Info($"Download archive '{archiveLink}' to '{destDirectory}'.");
var configurationStore = HostContext.GetService<IConfigurationStore>();
var isHostedServer = configurationStore.GetSettings().IsHostedServer;
if (isHostedServer)
{
string apiUrl = GetApiUrl(executionContext);
string archiveLink = BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref);
var downloadDetails = new ActionDownloadDetails(archiveLink, ConfigureAuthorizationFromContext);
await DownloadRepositoryActionAsync(executionContext, downloadDetails, null, destDirectory);
return;
}
else
{
string apiUrl = GetApiUrl(executionContext);
// URLs to try:
var downloadAttempts = new List<ActionDownloadDetails> {
// A built-in action or an action the user has created, on their GHES instance
// Example: https://my-ghes/api/v3/repos/my-org/my-action/tarball/v1
new ActionDownloadDetails(
BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref),
ConfigureAuthorizationFromContext),
// The same action, on GitHub.com
// Example: https://api.github.com/repos/my-org/my-action/tarball/v1
new ActionDownloadDetails(
BuildLinkToActionArchive(_dotcomApiUrl, repositoryReference.Name, repositoryReference.Ref),
configureAuthorization: (e,h) => { /* no authorization for dotcom */ })
};
foreach (var downloadAttempt in downloadAttempts)
{
try
{
await DownloadRepositoryActionAsync(executionContext, downloadAttempt, null, destDirectory);
return;
}
catch (ActionNotFoundException)
{
Trace.Info($"Failed to find the action '{repositoryReference.Name}' at ref '{repositoryReference.Ref}' at {downloadAttempt.ArchiveLink}");
continue;
}
}
throw new ActionNotFoundException($"Failed to find the action '{repositoryReference.Name}' at ref '{repositoryReference.Ref}'. Paths attempted: {string.Join(", ", downloadAttempts.Select(d => d.ArchiveLink))}");
}
}
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo)
{
Trace.Entering();
ArgUtil.NotNull(executionContext, nameof(executionContext));
ArgUtil.NotNull(downloadInfo, nameof(downloadInfo));
ArgUtil.NotNullOrEmpty(downloadInfo.NameWithOwner, nameof(downloadInfo.NameWithOwner));
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.Ref));
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), downloadInfo.NameWithOwner.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), downloadInfo.Ref);
string watermarkFile = GetWatermarkFilePath(destDirectory);
if (File.Exists(watermarkFile))
{
executionContext.Debug($"Action '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' already downloaded at '{destDirectory}'.");
return;
}
else
{
// make sure we get a clean folder ready to use.
IOUtil.DeleteDirectory(destDirectory, executionContext.CancellationToken);
Directory.CreateDirectory(destDirectory);
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}'");
}
await DownloadRepositoryActionAsync(executionContext, null, downloadInfo, destDirectory);
}
private string GetApiUrl(IExecutionContext executionContext)
{
string apiUrl = executionContext.GetGitHubContext("api_url");
if (!string.IsNullOrEmpty(apiUrl))
{
return apiUrl;
}
// Once the api_url is set for hosted, we can remove this fallback (it doesn't make sense for GHES)
return _dotcomApiUrl;
}
private static string BuildLinkToActionArchive(string apiUrl, string repository, string @ref)
{
#if OS_WINDOWS
return $"{apiUrl}/repos/{repository}/zipball/{@ref}";
#else
return $"{apiUrl}/repos/{repository}/tarball/{@ref}";
#endif
}
// todo: Remove the parameter "actionDownloadDetails" when feature flag DistributedTask.NewActionMetadata is removed
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, ActionDownloadDetails actionDownloadDetails, WebApi.ActionDownloadInfo downloadInfo, string destDirectory)
{
//download and extract action in a temp folder and rename it on success
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
Directory.CreateDirectory(tempDirectory);
#if OS_WINDOWS
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.zip");
string link = downloadInfo?.ZipballUrl ?? actionDownloadDetails.ArchiveLink;
#else
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.tar.gz");
string link = downloadInfo?.TarballUrl ?? actionDownloadDetails.ArchiveLink;
#endif
Trace.Info($"Save archive '{archiveLink}' into {archiveFile}.");
Trace.Info($"Save archive '{link}' into {archiveFile}.");
try
{
int retryCount = 0;
// Allow up to 20 * 60s for any action to be downloaded from github graph.
@@ -541,64 +793,67 @@ namespace GitHub.Runner.Worker
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
var configurationStore = HostContext.GetService<IConfigurationStore>();
var isHostedServer = configurationStore.GetSettings().IsHostedServer;
if (isHostedServer)
// Legacy
if (downloadInfo == null)
{
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
if (string.IsNullOrEmpty(authToken))
{
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
authToken = executionContext.Variables.Get("PREVIEW_ACTION_TOKEN");
}
if (!string.IsNullOrEmpty(authToken))
{
HostContext.SecretMasker.AddValue(authToken);
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
else
{
var accessToken = executionContext.GetGitHubContext("token");
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
actionDownloadDetails.ConfigureAuthorization(executionContext, httpClient);
}
// FF DistributedTask.NewActionMetadata
else
{
// Intentionally empty. Temporary for GHES alpha release, download from dotcom unauthenticated.
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadInfo.Authentication?.Token);
}
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
using (var result = await httpClient.GetStreamAsync(archiveLink))
using (var response = await httpClient.GetAsync(link))
{
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
await fs.FlushAsync(actionDownloadCancellation.Token);
if (response.IsSuccessStatusCode)
{
using (var result = await response.Content.ReadAsStreamAsync())
{
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
await fs.FlushAsync(actionDownloadCancellation.Token);
// download succeed, break out the retry loop.
break;
// download succeed, break out the retry loop.
break;
}
}
else if (response.StatusCode == HttpStatusCode.NotFound)
{
// It doesn't make sense to retry in this case, so just stop
throw new ActionNotFoundException(new Uri(link));
}
else
{
// Something else bad happened, let's go to our retry logic
response.EnsureSuccessStatusCode();
}
}
}
}
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
{
Trace.Info($"Action download has been cancelled.");
Trace.Info("Action download has been cancelled.");
throw;
}
catch (ActionNotFoundException)
{
Trace.Info($"The action at '{link}' does not exist");
throw;
}
catch (Exception ex) when (retryCount < 2)
{
retryCount++;
Trace.Error($"Fail to download archive '{archiveLink}' -- Attempt: {retryCount}");
Trace.Error($"Fail to download archive '{link}' -- Attempt: {retryCount}");
Trace.Error(ex);
if (actionDownloadTimeout.Token.IsCancellationRequested)
{
// action download didn't finish within timeout
executionContext.Warning($"Action '{archiveLink}' didn't finish download within {timeoutSeconds} seconds.");
executionContext.Warning($"Action '{link}' didn't finish download within {timeoutSeconds} seconds.");
}
else
{
executionContext.Warning($"Failed to download action '{archiveLink}'. Error {ex.Message}");
executionContext.Warning($"Failed to download action '{link}'. Error: {ex.Message}");
}
}
}
@@ -612,7 +867,7 @@ namespace GitHub.Runner.Worker
}
ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
executionContext.Debug($"Download '{archiveLink}' to '{archiveFile}'");
executionContext.Debug($"Download '{link}' to '{archiveFile}'");
var stagingDirectory = Path.Combine(tempDirectory, "_staging");
Directory.CreateDirectory(stagingDirectory);
@@ -662,6 +917,7 @@ namespace GitHub.Runner.Worker
}
Trace.Verbose("Create watermark file indicate action download succeed.");
string watermarkFile = GetWatermarkFilePath(destDirectory);
File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString());
executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'.");
@@ -686,6 +942,32 @@ namespace GitHub.Runner.Worker
}
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private void ConfigureAuthorizationFromContext(IExecutionContext executionContext, HttpClient httpClient)
{
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
if (string.IsNullOrEmpty(authToken))
{
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
authToken = executionContext.Global.Variables.Get("PREVIEW_ACTION_TOKEN");
}
if (!string.IsNullOrEmpty(authToken))
{
HostContext.SecretMasker.AddValue(authToken);
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
else
{
var accessToken = executionContext.GetGitHubContext("token");
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
}
private string GetWatermarkFilePath(string directory) => directory + ".completed";
private ActionContainer PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
{
var repositoryReference = repositoryAction.Reference as Pipelines.RepositoryPathReference;
@@ -766,6 +1048,11 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action plugin: {(actionDefinitionData.Execution as PluginActionExecutionData).Plugin}, no more preparation.");
return null;
}
else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
Trace.Info($"Action composite: {(actionDefinitionData.Execution as CompositeActionExecutionData).Steps}, no more preparation.");
return null;
}
else
{
throw new NotSupportedException(actionDefinitionData.Execution.ExecutionType.ToString());
@@ -791,6 +1078,64 @@ namespace GitHub.Runner.Worker
throw new InvalidOperationException($"Can't find 'action.yml', 'action.yaml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
}
}
private static string GetDownloadInfoLookupKey(Pipelines.ActionStep action)
{
if (action.Reference.Type != Pipelines.ActionSourceType.Repository)
{
return null;
}
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
if (string.Equals(repositoryReference.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (!string.Equals(repositoryReference.RepositoryType, Pipelines.RepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase))
{
throw new NotSupportedException(repositoryReference.RepositoryType);
}
ArgUtil.NotNullOrEmpty(repositoryReference.Name, nameof(repositoryReference.Name));
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
}
private static string GetDownloadInfoLookupKey(WebApi.ActionDownloadInfo info)
{
ArgUtil.NotNullOrEmpty(info.NameWithOwner, nameof(info.NameWithOwner));
ArgUtil.NotNullOrEmpty(info.Ref, nameof(info.Ref));
return $"{info.NameWithOwner}@{info.Ref}";
}
private AuthenticationHeaderValue CreateAuthHeader(string token)
{
if (string.IsNullOrEmpty(token))
{
return null;
}
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
HostContext.SecretMasker.AddValue(base64EncodingToken);
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private class ActionDownloadDetails
{
public string ArchiveLink { get; }
public Action<IExecutionContext, HttpClient> ConfigureAuthorization { get; }
public ActionDownloadDetails(string archiveLink, Action<IExecutionContext, HttpClient> configureAuthorization)
{
ArchiveLink = archiveLink;
ConfigureAuthorization = configureAuthorization;
}
}
}
public sealed class Definition
@@ -818,6 +1163,7 @@ namespace GitHub.Runner.Worker
NodeJS,
Plugin,
Script,
Composite,
}
public sealed class ContainerActionExecutionData : ActionExecutionData
@@ -874,6 +1220,15 @@ namespace GitHub.Runner.Worker
public override bool HasPost => false;
}
public sealed class CompositeActionExecutionData : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Composite;
public override bool HasPre => false;
public override bool HasPost => false;
public List<Pipelines.ActionStep> Steps { get; set; }
public MappingToken Outputs { get; set; }
}
public abstract class ActionExecutionData
{
private string _initCondition = $"{Constants.Expressions.Always}()";
@@ -931,4 +1286,3 @@ namespace GitHub.Runner.Worker
public string ActionRepository { get; set; }
}
}

View File

@@ -14,6 +14,7 @@ using YamlDotNet.Core;
using YamlDotNet.Core.Events;
using System.Globalization;
using System.Linq;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
@@ -22,6 +23,8 @@ namespace GitHub.Runner.Worker
{
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
@@ -32,8 +35,6 @@ namespace GitHub.Runner.Worker
public sealed class ActionManifestManager : RunnerService, IActionManifestManager
{
private TemplateSchema _actionManifestSchema;
private IReadOnlyList<String> _fileTable;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -54,25 +55,45 @@ namespace GitHub.Runner.Worker
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
{
var context = CreateContext(executionContext);
var templateContext = CreateTemplateContext(executionContext);
ActionDefinitionData actionDefinition = new ActionDefinitionData();
// Clean up file name real quick
// Instead of using Regex which can be computationally expensive,
// we can just remove the # of characters from the fileName according to the length of the basePath
string basePath = HostContext.GetDirectory(WellKnownDirectory.Actions);
string fileRelativePath = manifestFile;
if (manifestFile.Contains(basePath))
{
fileRelativePath = manifestFile.Remove(0, basePath.Length + 1);
}
try
{
var token = default(TemplateToken);
// Get the file ID
var fileId = context.GetFileId(manifestFile);
_fileTable = context.GetFileTable();
var fileId = templateContext.GetFileId(fileRelativePath);
// Add this file to the FileTable in executionContext if it hasn't been added already
// we use > since fileID is 1 indexed
if (fileId > executionContext.Global.FileTable.Count)
{
executionContext.Global.FileTable.Add(fileRelativePath);
}
// Read the file
var fileContent = File.ReadAllText(manifestFile);
using (var stringReader = new StringReader(fileContent))
{
var yamlObjectReader = new YamlObjectReader(null, stringReader);
token = TemplateReader.Read(context, "action-root", yamlObjectReader, fileId, out _);
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
token = TemplateReader.Read(templateContext, "action-root", yamlObjectReader, fileId, out _);
}
var actionMapping = token.AssertMapping("action manifest root");
var actionOutputs = default(MappingToken);
var actionRunValueToken = default(TemplateToken);
foreach (var actionPair in actionMapping)
{
var propertyName = actionPair.Key.AssertString($"action.yml property key");
@@ -83,44 +104,61 @@ namespace GitHub.Runner.Worker
actionDefinition.Name = actionPair.Value.AssertString("name").Value;
break;
case "outputs":
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
actionOutputs = actionPair.Value.AssertMapping("outputs");
break;
}
Trace.Info($"Ignore action property outputs. Outputs for a whole action is not supported yet.");
break;
case "description":
actionDefinition.Description = actionPair.Value.AssertString("description").Value;
break;
case "inputs":
ConvertInputs(context, actionPair.Value, actionDefinition);
ConvertInputs(actionPair.Value, actionDefinition);
break;
case "runs":
actionDefinition.Execution = ConvertRuns(context, actionPair.Value);
// Defer runs token evaluation to after for loop to ensure that order of outputs doesn't matter.
actionRunValueToken = actionPair.Value;
break;
default:
Trace.Info($"Ignore action property {propertyName}.");
break;
}
}
// Evaluate Runs Last
if (actionRunValueToken != null)
{
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionRunValueToken, fileRelativePath, actionOutputs);
}
}
catch (Exception ex)
{
Trace.Error(ex);
context.Errors.Add(ex);
templateContext.Errors.Add(ex);
}
if (context.Errors.Count > 0)
if (templateContext.Errors.Count > 0)
{
foreach (var error in context.Errors)
foreach (var error in templateContext.Errors)
{
Trace.Error($"Action.yml load error: {error.Message}");
executionContext.Error(error.Message);
}
throw new ArgumentException($"Fail to load {manifestFile}");
throw new ArgumentException($"Fail to load {fileRelativePath}");
}
if (actionDefinition.Execution == null)
{
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
throw new ArgumentException($"Top level 'runs:' section is required for {manifestFile}");
throw new ArgumentException($"Top level 'runs:' section is required for {fileRelativePath}");
}
else
{
@@ -130,6 +168,33 @@ namespace GitHub.Runner.Worker
return actionDefinition;
}
public DictionaryContextData EvaluateCompositeOutputs(
IExecutionContext executionContext,
TemplateToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = default(DictionaryContextData);
if (token != null)
{
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
token = TemplateEvaluator.Evaluate(templateContext, "outputs", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
result = token.ToContextData().AssertDictionary("composite outputs");
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
templateContext.Errors.Add(ex);
}
templateContext.Errors.Check();
}
return result ?? new DictionaryContextData();
}
public List<string> EvaluateContainerArguments(
IExecutionContext executionContext,
SequenceToken token,
@@ -139,11 +204,11 @@ namespace GitHub.Runner.Worker
if (token != null)
{
var context = CreateContext(executionContext, extraExpressionValues);
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-args", token, 0, null, omitHeader: true);
context.Errors.Check();
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-args", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
Trace.Info($"Arguments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
@@ -160,10 +225,10 @@ namespace GitHub.Runner.Worker
catch (Exception ex) when (!(ex is TemplateValidationException))
{
Trace.Error(ex);
context.Errors.Add(ex);
templateContext.Errors.Add(ex);
}
context.Errors.Check();
templateContext.Errors.Check();
}
return result;
@@ -178,11 +243,11 @@ namespace GitHub.Runner.Worker
if (token != null)
{
var context = CreateContext(executionContext, extraExpressionValues);
var templateContext = CreateTemplateContext(executionContext, extraExpressionValues);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-env", token, 0, null, omitHeader: true);
context.Errors.Check();
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "container-runs-env", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
Trace.Info($"Environments evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
@@ -204,10 +269,10 @@ namespace GitHub.Runner.Worker
catch (Exception ex) when (!(ex is TemplateValidationException))
{
Trace.Error(ex);
context.Errors.Add(ex);
templateContext.Errors.Add(ex);
}
context.Errors.Check();
templateContext.Errors.Check();
}
return result;
@@ -221,11 +286,11 @@ namespace GitHub.Runner.Worker
string result = "";
if (token != null)
{
var context = CreateContext(executionContext);
var templateContext = CreateTemplateContext(executionContext);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(context, "input-default-context", token, 0, null, omitHeader: true);
context.Errors.Check();
var evaluateResult = TemplateEvaluator.Evaluate(templateContext, "input-default-context", token, 0, null, omitHeader: true);
templateContext.Errors.Check();
Trace.Info($"Input '{inputName}': default value evaluate result: {StringUtil.ConvertToJson(evaluateResult)}");
@@ -235,16 +300,16 @@ namespace GitHub.Runner.Worker
catch (Exception ex) when (!(ex is TemplateValidationException))
{
Trace.Error(ex);
context.Errors.Add(ex);
templateContext.Errors.Add(ex);
}
context.Errors.Check();
templateContext.Errors.Check();
}
return result;
}
private TemplateContext CreateContext(
private TemplateContext CreateTemplateContext(
IExecutionContext executionContext,
IDictionary<string, PipelineContextData> extraExpressionValues = null)
{
@@ -281,21 +346,21 @@ namespace GitHub.Runner.Worker
result.ExpressionFunctions.Add(item);
}
// Add the file table
if (_fileTable?.Count > 0)
// Add the file table from the Execution Context
for (var i = 0; i < executionContext.Global.FileTable.Count; i++)
{
for (var i = 0 ; i < _fileTable.Count ; i++)
{
result.GetFileId(_fileTable[i]);
}
result.GetFileId(executionContext.Global.FileTable[i]);
}
return result;
}
private ActionExecutionData ConvertRuns(
TemplateContext context,
TemplateToken inputsToken)
IExecutionContext executionContext,
TemplateContext templateContext,
TemplateToken inputsToken,
String fileRelativePath,
MappingToken outputs = null)
{
var runsMapping = inputsToken.AssertMapping("runs");
var usingToken = default(StringToken);
@@ -311,6 +376,8 @@ namespace GitHub.Runner.Worker
var postToken = default(StringToken);
var postEntrypointToken = default(StringToken);
var postIfToken = default(StringToken);
var steps = default(List<Pipelines.Step>);
foreach (var run in runsMapping)
{
var runsKey = run.Key.AssertString("runs key").Value;
@@ -355,6 +422,15 @@ namespace GitHub.Runner.Worker
case "pre-if":
preIfToken = run.Value.AssertString("pre-if");
break;
case "steps":
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
var stepsToken = run.Value.AssertSequence("steps");
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
templateContext.Errors.Check();
break;
}
throw new Exception("You aren't supposed to be using Composite Actions yet!");
default:
Trace.Info($"Ignore run property {runsKey}.");
break;
@@ -367,7 +443,7 @@ namespace GitHub.Runner.Worker
{
if (string.IsNullOrEmpty(imageToken?.Value))
{
throw new ArgumentNullException($"Image is not provided.");
throw new ArgumentNullException($"You are using a Container Action but an image is not provided in {fileRelativePath}.");
}
else
{
@@ -388,7 +464,7 @@ namespace GitHub.Runner.Worker
{
if (string.IsNullOrEmpty(mainToken?.Value))
{
throw new ArgumentNullException($"Entry javascript file is not provided.");
throw new ArgumentNullException($"You are using a JavaScript Action but there is not an entry JavaScript file provided in {fileRelativePath}.");
}
else
{
@@ -402,6 +478,21 @@ namespace GitHub.Runner.Worker
};
}
}
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase) && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
if (steps == null)
{
throw new ArgumentNullException($"You are using a composite action but there are no steps provided in {fileRelativePath}.");
}
else
{
return new CompositeActionExecutionData()
{
Steps = steps.Cast<Pipelines.ActionStep>().ToList(),
Outputs = outputs
};
}
}
else
{
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker' or 'node12' instead.");
@@ -419,7 +510,6 @@ namespace GitHub.Runner.Worker
}
private void ConvertInputs(
TemplateContext context,
TemplateToken inputsToken,
ActionDefinitionData actionDefinition)
{

View File

@@ -0,0 +1,33 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.Runner.Worker
{
public class ActionNotFoundException : Exception
{
public ActionNotFoundException(Uri actionUri)
: base(FormatMessage(actionUri))
{
}
public ActionNotFoundException(string message)
: base(message)
{
}
public ActionNotFoundException(string message, System.Exception inner)
: base(message, inner)
{
}
protected ActionNotFoundException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
private static string FormatMessage(Uri actionUri)
{
return $"An action could not be found at the URI '{actionUri}'";
}
}
}

View File

@@ -94,6 +94,13 @@ namespace GitHub.Runner.Worker
if (handlerData.HasPost && (Stage == ActionRunStage.Pre || Stage == ActionRunStage.Main))
{
string postDisplayName = $"Post {this.DisplayName}";
if (Stage == ActionRunStage.Pre &&
this.DisplayName.StartsWith("Pre ", StringComparison.OrdinalIgnoreCase))
{
// Trim the leading `Pre ` from the display name.
// Otherwise, we will get `Post Pre xxx` as DisplayName for the Post step.
postDisplayName = $"Post {this.DisplayName.Substring("Pre ".Length)}";
}
var repositoryReference = Action.Reference as RepositoryPathReference;
var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}";
var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" :
@@ -129,12 +136,12 @@ namespace GitHub.Runner.Worker
}
// Setup container stephost for running inside the container.
if (ExecutionContext.Container != null)
if (ExecutionContext.Global.Container != null)
{
// Make sure required container is already created.
ArgUtil.NotNullOrEmpty(ExecutionContext.Container.ContainerId, nameof(ExecutionContext.Container.ContainerId));
ArgUtil.NotNullOrEmpty(ExecutionContext.Global.Container.ContainerId, nameof(ExecutionContext.Global.Container.ContainerId));
var containerStepHost = HostContext.CreateService<IContainerStepHost>();
containerStepHost.Container = ExecutionContext.Container;
containerStepHost.Container = ExecutionContext.Global.Container;
stepHost = containerStepHost;
}
@@ -143,8 +150,10 @@ namespace GitHub.Runner.Worker
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues, ExecutionContext.ExpressionFunctions);
var userInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (KeyValuePair<string, string> input in inputs)
{
userInputs.Add(input.Key);
string message = "";
if (definition.Data?.Deprecated?.TryGetValue(input.Key, out message) == true)
{
@@ -152,13 +161,22 @@ namespace GitHub.Runner.Worker
}
}
var validInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
if (handlerData.ExecutionType == ActionExecutionType.Container)
{
// container action always accept 'entryPoint' and 'args' as inputs
// https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepswithargs
validInputs.Add("entryPoint");
validInputs.Add("args");
}
// Merge the default inputs from the definition
if (definition.Data?.Inputs != null)
{
var manifestManager = HostContext.GetService<IActionManifestManager>();
foreach (var input in (definition.Data?.Inputs))
foreach (var input in definition.Data.Inputs)
{
string key = input.Key.AssertString("action input name").Value;
validInputs.Add(key);
if (!inputs.ContainsKey(key))
{
inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value);
@@ -166,6 +184,24 @@ namespace GitHub.Runner.Worker
}
}
// Validate inputs only for actions with action.yml
if (Action.Reference.Type == Pipelines.ActionSourceType.Repository)
{
var unexpectedInputs = new List<string>();
foreach (var input in userInputs)
{
if (!validInputs.Contains(input))
{
unexpectedInputs.Add(input);
}
}
if (unexpectedInputs.Count > 0)
{
ExecutionContext.Warning($"Unexpected input(s) '{string.Join("', '", unexpectedInputs)}', valid inputs are ['{string.Join("', '", validInputs)}']");
}
}
// Load the action environment.
ExecutionContext.Debug("Loading env");
var environment = new Dictionary<String, String>(VarUtil.EnvironmentVariableKeyComparer);
@@ -195,7 +231,7 @@ namespace GitHub.Runner.Worker
handlerData,
inputs,
environment,
ExecutionContext.Variables,
ExecutionContext.Global.Variables,
actionDirectory: definition.Directory);
// Print out action details

View File

@@ -17,7 +17,7 @@ namespace GitHub.Runner.Worker.Container
string DockerInstanceLabel { get; }
Task<DockerVersion> DockerVersion(IExecutionContext context);
Task<int> DockerPull(IExecutionContext context, string image);
Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string tag);
Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag);
Task<string> DockerCreate(IExecutionContext context, ContainerInfo container);
Task<int> DockerRun(IExecutionContext context, ContainerInfo container, EventHandler<ProcessDataReceivedEventArgs> stdoutDataReceived, EventHandler<ProcessDataReceivedEventArgs> stderrDataReceived);
Task<int> DockerStart(IExecutionContext context, string containerId);
@@ -87,9 +87,9 @@ namespace GitHub.Runner.Worker.Container
return await ExecuteDockerCommandAsync(context, "pull", image, context.CancellationToken);
}
public async Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string tag)
public async Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag)
{
return await ExecuteDockerCommandAsync(context, "build", $"-t {tag} \"{dockerFile}\"", workingDirectory, context.CancellationToken);
return await ExecuteDockerCommandAsync(context, "build", $"-t {tag} -f \"{dockerFile}\" \"{dockerContext}\"", workingDirectory, context.CancellationToken);
}
public async Task<string> DockerCreate(IExecutionContext context, ContainerInfo container)

View File

@@ -91,7 +91,10 @@ namespace GitHub.Runner.Worker
#endif
// Check docker client/server version
executionContext.Output("##[group]Checking docker version");
DockerVersion dockerVersion = await _dockerManger.DockerVersion(executionContext);
executionContext.Output("##[endgroup]");
ArgUtil.NotNull(dockerVersion.ServerVersion, nameof(dockerVersion.ServerVersion));
ArgUtil.NotNull(dockerVersion.ClientVersion, nameof(dockerVersion.ClientVersion));
@@ -111,7 +114,7 @@ namespace GitHub.Runner.Worker
}
// Clean up containers left by previous runs
executionContext.Debug($"Delete stale containers from previous jobs");
executionContext.Output("##[group]Clean up resources from previous jobs");
var staleContainers = await _dockerManger.DockerPS(executionContext, $"--all --quiet --no-trunc --filter \"label={_dockerManger.DockerInstanceLabel}\"");
foreach (var staleContainer in staleContainers)
{
@@ -122,18 +125,25 @@ namespace GitHub.Runner.Worker
}
}
executionContext.Debug($"Delete stale container networks from previous jobs");
int networkPruneExitCode = await _dockerManger.DockerNetworkPrune(executionContext);
if (networkPruneExitCode != 0)
{
executionContext.Warning($"Delete stale container networks failed, docker network prune fail with exit code {networkPruneExitCode}");
}
executionContext.Output("##[endgroup]");
// Create local docker network for this job to avoid port conflict when multiple runners run on same machine.
// All containers within a job join the same network
executionContext.Output("##[group]Create local container network");
var containerNetwork = $"github_network_{Guid.NewGuid().ToString("N")}";
await CreateContainerNetworkAsync(executionContext, containerNetwork);
executionContext.JobContext.Container["network"] = new StringContextData(containerNetwork);
executionContext.Output("##[endgroup]");
if (Environment.GetEnvironmentVariable("K8S_POD_NAME") != null)
{
IOUtil.CopyDirectory(HostContext.GetDirectory(WellKnownDirectory.Externals), Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals_copy"), CancellationToken.None);
}
foreach (var container in containers)
{
@@ -141,10 +151,12 @@ namespace GitHub.Runner.Worker
await StartContainerAsync(executionContext, container);
}
executionContext.Output("##[group]Waiting for all services to be ready");
foreach (var container in containers.Where(c => !c.IsJobContainer))
{
await ContainerHealthcheck(executionContext, container);
}
executionContext.Output("##[endgroup]");
}
public async Task StopContainersAsync(IExecutionContext executionContext, object data)
@@ -173,6 +185,10 @@ namespace GitHub.Runner.Worker
Trace.Info($"Container name: {container.ContainerName}");
Trace.Info($"Container image: {container.ContainerImage}");
Trace.Info($"Container options: {container.ContainerCreateOptions}");
var groupName = container.IsJobContainer ? "Starting job container" : $"Starting {container.ContainerNetworkAlias} service container";
executionContext.Output($"##[group]{groupName}");
foreach (var port in container.UserPortMappings)
{
Trace.Info($"User provided port: {port.Value}");
@@ -225,7 +241,14 @@ namespace GitHub.Runner.Worker
#if OS_WINDOWS
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Externals), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Externals))));
#else
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Externals), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Externals)), true));
if (Environment.GetEnvironmentVariable("K8S_POD_NAME") != null)
{
container.MountVolumes.Add(new MountVolume(Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals_copy"), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Externals)), true));
}
else
{
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Externals), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Externals)), true));
}
#endif
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Temp), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Temp))));
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Actions), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Actions))));
@@ -304,6 +327,7 @@ namespace GitHub.Runner.Worker
container.ContainerRuntimePath = DockerUtil.ParsePathFromConfigEnv(containerEnv);
executionContext.JobContext.Container["id"] = new StringContextData(container.ContainerId);
}
executionContext.Output("##[endgroup]");
}
private async Task StopContainerAsync(IExecutionContext executionContext, ContainerInfo container)

View File

@@ -86,9 +86,9 @@ namespace GitHub.Runner.Worker
executionContext.Debug("Zipping diagnostic files.");
string buildNumber = executionContext.Variables.Build_Number ?? "UnknownBuildNumber";
string buildNumber = executionContext.Global.Variables.Build_Number ?? "UnknownBuildNumber";
string buildName = $"Build {buildNumber}";
string phaseName = executionContext.Variables.System_PhaseDisplayName ?? "UnknownPhaseName";
string phaseName = executionContext.Global.Variables.System_PhaseDisplayName ?? "UnknownPhaseName";
// zip the files
string diagnosticsZipFileName = $"{buildName}-{phaseName}.zip";

View File

@@ -6,6 +6,7 @@ using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using System.Web;
@@ -43,39 +44,30 @@ namespace GitHub.Runner.Worker
string ResultCode { get; set; }
TaskResult? CommandResult { get; set; }
CancellationToken CancellationToken { get; }
List<ServiceEndpoint> Endpoints { get; }
GlobalContext Global { get; }
PlanFeatures Features { get; }
Variables Variables { get; }
Dictionary<string, string> IntraActionState { get; }
IDictionary<String, IDictionary<String, String>> JobDefaults { get; }
Dictionary<string, VariableValue> JobOutputs { get; }
IDictionary<String, String> EnvironmentVariables { get; }
IDictionary<String, ContextScope> Scopes { get; }
IList<String> FileTable { get; }
StepsContext StepsContext { get; }
DictionaryContextData ExpressionValues { get; }
IList<IFunctionInfo> ExpressionFunctions { get; }
List<string> PrependPath { get; }
ContainerInfo Container { get; set; }
List<ContainerInfo> ServiceContainers { get; }
JobContext JobContext { get; }
// Only job level ExecutionContext has JobSteps
Queue<IStep> JobSteps { get; }
List<IStep> JobSteps { get; }
// Only job level ExecutionContext has PostJobSteps
Stack<IStep> PostJobSteps { get; }
bool EchoOnActionCommand { get; set; }
ExecutionContext Root { get; }
// Initialize
void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token);
void CancelToken();
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null);
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, CancellationTokenSource cancellationTokenSource = null);
// logging
bool WriteDebug { get; }
long Write(string tag, string message);
void QueueAttachFile(string type, string name, string filePath);
@@ -104,11 +96,13 @@ namespace GitHub.Runner.Worker
// others
void ForceTaskComplete();
void RegisterPostJobStep(IStep step);
IStep CreateCompositeStep(string scopeName, IActionRunner step, DictionaryContextData inputsData, Dictionary<string, string> envData);
}
public sealed class ExecutionContext : RunnerService, IExecutionContext
{
private const int _maxIssueCount = 10;
private const int _throttlingDelayReportThreshold = 10 * 1000; // Don't report throttling with less than 10 seconds delay
private readonly TimelineRecord _record = new TimelineRecord();
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>();
@@ -139,24 +133,16 @@ namespace GitHub.Runner.Worker
public string ContextName { get; private set; }
public Task ForceCompleted => _forceCompleted.Task;
public CancellationToken CancellationToken => _cancellationTokenSource.Token;
public List<ServiceEndpoint> Endpoints { get; private set; }
public Variables Variables { get; private set; }
public Dictionary<string, string> IntraActionState { get; private set; }
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; private set; }
public Dictionary<string, VariableValue> JobOutputs { get; private set; }
public IDictionary<String, String> EnvironmentVariables { get; private set; }
public IDictionary<String, ContextScope> Scopes { get; private set; }
public IList<String> FileTable { get; private set; }
public StepsContext StepsContext { get; private set; }
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
public IList<IFunctionInfo> ExpressionFunctions { get; } = new List<IFunctionInfo>();
public bool WriteDebug { get; private set; }
public List<string> PrependPath { get; private set; }
public ContainerInfo Container { get; set; }
public List<ContainerInfo> ServiceContainers { get; private set; }
// Shared pointer across job-level execution context and step-level execution contexts
public GlobalContext Global { get; private set; }
// Only job level ExecutionContext has JobSteps
public Queue<IStep> JobSteps { get; private set; }
public List<IStep> JobSteps { get; private set; }
// Only job level ExecutionContext has PostJobSteps
public Stack<IStep> PostJobSteps { get; private set; }
@@ -166,7 +152,6 @@ namespace GitHub.Runner.Worker
public bool EchoOnActionCommand { get; set; }
public TaskResult? Result
{
get
@@ -197,9 +182,7 @@ namespace GitHub.Runner.Worker
}
}
public PlanFeatures Features { get; private set; }
private ExecutionContext Root
public ExecutionContext Root
{
get
{
@@ -263,17 +246,44 @@ namespace GitHub.Runner.Worker
Root.PostJobSteps.Push(step);
}
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null)
/// <summary>
/// Helper function used in CompositeActionHandler::RunAsync to
/// add a child node, aka a step, to the current job to the Root.JobSteps based on the location.
/// </summary>
public IStep CreateCompositeStep(
string scopeName,
IActionRunner step,
DictionaryContextData inputsData,
Dictionary<string, string> envData)
{
step.ExecutionContext = Root.CreateChild(_record.Id, step.DisplayName, _record.Id.ToString("N"), scopeName, step.Action.ContextName, logger: _logger, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token));
step.ExecutionContext.ExpressionValues["inputs"] = inputsData;
step.ExecutionContext.ExpressionValues["steps"] = Global.StepsContext.GetScope(step.ExecutionContext.GetFullyQualifiedContextName());
// Add the composite action environment variables to each step.
#if OS_WINDOWS
var envContext = new DictionaryContextData();
#else
var envContext = new CaseSensitiveDictionaryContextData();
#endif
foreach (var pair in envData)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
step.ExecutionContext.ExpressionValues["env"] = envContext;
return step;
}
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, CancellationTokenSource cancellationTokenSource = null)
{
Trace.Entering();
var child = new ExecutionContext();
child.Initialize(HostContext);
child.Global = Global;
child.ScopeName = scopeName;
child.ContextName = contextName;
child.Features = Features;
child.Variables = Variables;
child.Endpoints = Endpoints;
if (intraActionState == null)
{
child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
@@ -282,11 +292,6 @@ namespace GitHub.Runner.Worker
{
child.IntraActionState = intraActionState;
}
child.EnvironmentVariables = EnvironmentVariables;
child.JobDefaults = JobDefaults;
child.Scopes = Scopes;
child.FileTable = FileTable;
child.StepsContext = StepsContext;
foreach (var pair in ExpressionValues)
{
child.ExpressionValues[pair.Key] = pair.Value;
@@ -295,12 +300,8 @@ namespace GitHub.Runner.Worker
{
child.ExpressionFunctions.Add(item);
}
child._cancellationTokenSource = new CancellationTokenSource();
child.WriteDebug = WriteDebug;
child._cancellationTokenSource = cancellationTokenSource ?? new CancellationTokenSource();
child._parentExecutionContext = this;
child.PrependPath = PrependPath;
child.Container = Container;
child.ServiceContainers = ServiceContainers;
child.EchoOnActionCommand = EchoOnActionCommand;
if (recordOrder != null)
@@ -311,9 +312,15 @@ namespace GitHub.Runner.Worker
{
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder);
}
child._logger = HostContext.CreateService<IPagingLogger>();
child._logger.Setup(_mainTimelineId, recordId);
if (logger != null)
{
child._logger = logger;
}
else
{
child._logger = HostContext.CreateService<IPagingLogger>();
child._logger.Setup(_mainTimelineId, recordId);
}
return child;
}
@@ -335,7 +342,7 @@ namespace GitHub.Runner.Worker
}
// report total delay caused by server throttling.
if (_totalThrottlingDelayInMilliseconds > 0)
if (_totalThrottlingDelayInMilliseconds > _throttlingDelayReportThreshold)
{
this.Warning($"The job has experienced {TimeSpan.FromMilliseconds(_totalThrottlingDelayInMilliseconds).TotalSeconds} seconds total delay caused by server throttling.");
}
@@ -363,14 +370,19 @@ namespace GitHub.Runner.Worker
}
}
_cancellationTokenSource?.Dispose();
if (Root != this)
{
// only dispose TokenSource for step level ExecutionContext
_cancellationTokenSource?.Dispose();
}
_logger.End();
// todo: Skip if generated context name. After M271-ish the server will never send an empty context name. Generated context names will start with "__"
if (!string.IsNullOrEmpty(ContextName))
{
StepsContext.SetOutcome(ScopeName, ContextName, (Outcome ?? Result ?? TaskResult.Succeeded).ToActionResult().ToString());
StepsContext.SetConclusion(ScopeName, ContextName, (Result ?? TaskResult.Succeeded).ToActionResult().ToString());
Global.StepsContext.SetOutcome(ScopeName, ContextName, (Outcome ?? Result ?? TaskResult.Succeeded).ToActionResult());
Global.StepsContext.SetConclusion(ScopeName, ContextName, (Result ?? TaskResult.Succeeded).ToActionResult());
}
return Result.Value;
@@ -429,7 +441,8 @@ namespace GitHub.Runner.Worker
{
ArgUtil.NotNullOrEmpty(name, nameof(name));
if (String.IsNullOrEmpty(ContextName))
// todo: Skip if generated context name. After M271-ish the server will never send an empty context name. Generated context names will start with "__"
if (string.IsNullOrEmpty(ContextName))
{
reference = null;
return;
@@ -437,7 +450,7 @@ namespace GitHub.Runner.Worker
// todo: restrict multiline?
StepsContext.SetOutput(ScopeName, ContextName, name, value, out reference);
Global.StepsContext.SetOutput(ScopeName, ContextName, name, value, out reference);
}
public void SetTimeout(TimeSpan? timeout)
@@ -571,42 +584,35 @@ namespace GitHub.Runner.Worker
_cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
// Features
Features = PlanUtil.GetFeatures(message.Plan);
Global = new GlobalContext();
// Plan
Global.Plan = message.Plan;
Global.Features = PlanUtil.GetFeatures(message.Plan);
// Endpoints
Endpoints = message.Resources.Endpoints;
Global.Endpoints = message.Resources.Endpoints;
// Variables
Variables = new Variables(HostContext, message.Variables);
Global.Variables = new Variables(HostContext, message.Variables);
// Environment variables shared across all actions
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
Global.EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
// Job defaults shared across all actions
JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase);
Global.JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase);
// Job Outputs
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
// Service container info
ServiceContainers = new List<ContainerInfo>();
Global.ServiceContainers = new List<ContainerInfo>();
// Steps context (StepsRunner manages adding the scoped steps context)
StepsContext = new StepsContext();
// Scopes
Scopes = new Dictionary<String, ContextScope>(StringComparer.OrdinalIgnoreCase);
if (message.Scopes?.Count > 0)
{
foreach (var scope in message.Scopes)
{
Scopes[scope.Name] = scope;
}
}
Global.StepsContext = new StepsContext();
// File table
FileTable = new List<String>(message.FileTable ?? new string[0]);
Global.FileTable = new List<String>(message.FileTable ?? new string[0]);
// Expression values
if (message.ContextData?.Count > 0)
@@ -617,15 +623,15 @@ namespace GitHub.Runner.Worker
}
}
ExpressionValues["secrets"] = Variables.ToSecretsContext();
ExpressionValues["secrets"] = Global.Variables.ToSecretsContext();
ExpressionValues["runner"] = new RunnerContext();
ExpressionValues["job"] = new JobContext();
Trace.Info("Initialize GitHub context");
var githubAccessToken = new StringContextData(Variables.Get("system.github.token"));
var githubAccessToken = new StringContextData(Global.Variables.Get("system.github.token"));
var base64EncodedToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{githubAccessToken}"));
HostContext.SecretMasker.AddValue(base64EncodedToken);
var githubJob = Variables.Get("system.github.job");
var githubJob = Global.Variables.Get("system.github.job");
var githubContext = new GitHubContext();
githubContext["token"] = githubAccessToken;
if (!string.IsNullOrEmpty(githubJob))
@@ -648,10 +654,10 @@ namespace GitHub.Runner.Worker
#endif
// Prepend Path
PrependPath = new List<string>();
Global.PrependPath = new List<string>();
// JobSteps for job ExecutionContext
JobSteps = new Queue<IStep>();
JobSteps = new List<IStep>();
// PostJobSteps for job ExecutionContext
PostJobSteps = new Stack<IStep>();
@@ -674,10 +680,10 @@ namespace GitHub.Runner.Worker
_logger.Setup(_mainTimelineId, _record.Id);
// Initialize 'echo on action command success' property, default to false, unless Step_Debug is set
EchoOnActionCommand = Variables.Step_Debug ?? false;
EchoOnActionCommand = Global.Variables.Step_Debug ?? false;
// Verbosity (from GitHub.Step_Debug).
WriteDebug = Variables.Step_Debug ?? false;
Global.WriteDebug = Global.Variables.Step_Debug ?? false;
// Hook up JobServerQueueThrottling event, we will log warning on server tarpit.
_jobServerQueue.JobServerQueueThrottling += JobServerQueueThrottling_EventReceived;
@@ -851,7 +857,8 @@ namespace GitHub.Runner.Worker
{
Interlocked.Add(ref _totalThrottlingDelayInMilliseconds, Convert.ToInt64(data.Delay.TotalMilliseconds));
if (!_throttlingReported)
if (!_throttlingReported &&
_totalThrottlingDelayInMilliseconds > _throttlingDelayReportThreshold)
{
this.Warning(string.Format("The job is currently being throttled by the server. You may experience delays in console line output, job status reporting, and action log uploads."));
@@ -877,6 +884,16 @@ namespace GitHub.Runner.Worker
// Otherwise individual overloads would need to be implemented (depending on the unit test).
public static class ExecutionContextExtension
{
public static string GetFullyQualifiedContextName(this IExecutionContext context)
{
if (!string.IsNullOrEmpty(context.ScopeName))
{
return $"{context.ScopeName}.{context.ContextName}";
}
return context.ContextName;
}
public static void Error(this IExecutionContext context, Exception ex)
{
context.Error(ex.Message);
@@ -915,7 +932,7 @@ namespace GitHub.Runner.Worker
// Do not add a format string overload. See comment on ExecutionContext.Write().
public static void Debug(this IExecutionContext context, string message)
{
if (context.WriteDebug)
if (context.Global.WriteDebug)
{
var multilines = message?.Replace("\r\n", "\n")?.Split("\n");
if (multilines != null)
@@ -940,7 +957,7 @@ namespace GitHub.Runner.Worker
traceWriter = context.ToTemplateTraceWriter();
}
var schema = PipelineTemplateSchemaFactory.GetSchema();
return new PipelineTemplateEvaluator(traceWriter, schema, context.FileTable);
return new PipelineTemplateEvaluator(traceWriter, schema, context.Global.FileTable);
}
public static ObjectTemplating.ITraceWriter ToTemplateTraceWriter(this IExecutionContext context)

View File

@@ -12,6 +12,8 @@ namespace GitHub.Runner.Worker.Expressions
{
public sealed class HashFilesFunction : Function
{
private const int _hashFileTimeoutSeconds = 120;
protected sealed override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
@@ -89,19 +91,29 @@ namespace GitHub.Runner.Worker.Expressions
}
env["patterns"] = string.Join(Environment.NewLine, patterns);
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
cancellationToken: new CancellationTokenSource(TimeSpan.FromSeconds(120)).Token).GetAwaiter().GetResult();
if (exitCode != 0)
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(_hashFileTimeoutSeconds)))
{
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
}
try
{
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
cancellationToken: tokenSource.Token).GetAwaiter().GetResult();
return hashResult;
if (exitCode != 0)
{
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
}
}
catch (OperationCanceledException) when (tokenSource.IsCancellationRequested)
{
throw new TimeoutException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') couldn't finish within {_hashFileTimeoutSeconds} seconds.");
}
return hashResult;
}
}
private sealed class HashFilesTrace : ITraceWriter

View File

@@ -10,10 +10,11 @@ namespace GitHub.Runner.Worker
{
"action",
"actor",
"api_url", // temp for GHES alpha release
"api_url",
"base_ref",
"event_name",
"event_path",
"graphql_url",
"head_ref",
"job",
"ref",
@@ -21,8 +22,8 @@ namespace GitHub.Runner.Worker
"repository_owner",
"run_id",
"run_number",
"server_url",
"sha",
"url", // temp for GHES alpha release
"workflow",
"workspace",
};

View File

@@ -0,0 +1,24 @@
using System;
using System.Collections.Generic;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Worker.Container;
namespace GitHub.Runner.Worker
{
public sealed class GlobalContext
{
public ContainerInfo Container { get; set; }
public List<ServiceEndpoint> Endpoints { get; set; }
public IDictionary<String, String> EnvironmentVariables { get; set; }
public PlanFeatures Features { get; set; }
public IList<String> FileTable { get; set; }
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; set; }
public TaskOrchestrationPlanReference Plan { get; set; }
public List<string> PrependPath { get; set; }
public List<ContainerInfo> ServiceContainers { get; set; }
public StepsContext StepsContext { get; set; }
public Variables Variables { get; set; }
public bool WriteDebug { get; set; }
}
}

View File

@@ -0,0 +1,275 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Handlers
{
[ServiceLocator(Default = typeof(CompositeActionHandler))]
public interface ICompositeActionHandler : IHandler
{
CompositeActionExecutionData Data { get; set; }
}
public sealed class CompositeActionHandler : Handler, ICompositeActionHandler
{
public CompositeActionExecutionData Data { get; set; }
public async Task RunAsync(ActionRunStage stage)
{
// Validate args.
Trace.Entering();
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
ArgUtil.NotNull(Inputs, nameof(Inputs));
ArgUtil.NotNull(Data.Steps, nameof(Data.Steps));
var githubContext = ExecutionContext.ExpressionValues["github"] as GitHubContext;
ArgUtil.NotNull(githubContext, nameof(githubContext));
// Resolve action steps
var actionSteps = Data.Steps;
// Create Context Data to reuse for each composite action step
var inputsData = new DictionaryContextData();
foreach (var i in Inputs)
{
inputsData[i.Key] = new StringContextData(i.Value);
}
// Initialize Composite Steps List of Steps
var compositeSteps = new List<IStep>();
// Temporary hack until after M271-ish. After M271-ish the server will never send an empty
// context name. Generated context names start with "__"
var childScopeName = ExecutionContext.GetFullyQualifiedContextName();
if (string.IsNullOrEmpty(childScopeName))
{
childScopeName = $"__{Guid.NewGuid()}";
}
foreach (Pipelines.ActionStep actionStep in actionSteps)
{
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = actionStep;
actionRunner.Stage = stage;
actionRunner.Condition = actionStep.Condition;
var step = ExecutionContext.CreateCompositeStep(childScopeName, actionRunner, inputsData, Environment);
compositeSteps.Add(step);
}
try
{
// This is where we run each step.
await RunStepsAsync(compositeSteps);
// Get the pointer of the correct "steps" object and pass it to the ExecutionContext so that we can process the outputs correctly
ExecutionContext.ExpressionValues["inputs"] = inputsData;
ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(ExecutionContext.GetFullyQualifiedContextName());
ProcessCompositeActionOutputs();
}
catch (Exception ex)
{
// Composite StepRunner should never throw exception out.
Trace.Error($"Caught exception from composite steps {nameof(CompositeActionHandler)}: {ex}");
ExecutionContext.Error(ex);
ExecutionContext.Result = TaskResult.Failed;
}
}
private void ProcessCompositeActionOutputs()
{
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
// Evaluate the mapped outputs value
if (Data.Outputs != null)
{
// Evaluate the outputs in the steps context to easily retrieve the values
var actionManifestManager = HostContext.GetService<IActionManifestManager>();
// Format ExpressionValues to Dictionary<string, PipelineContextData>
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
foreach (var pair in ExecutionContext.ExpressionValues)
{
evaluateContext[pair.Key] = pair.Value;
}
// Get the evluated composite outputs' values mapped to the outputs named
DictionaryContextData actionOutputs = actionManifestManager.EvaluateCompositeOutputs(ExecutionContext, Data.Outputs, evaluateContext);
// Set the outputs for the outputs object in the whole composite action
// Each pair is structured like this
// We ignore "description" for now
// {
// "the-output-name": {
// "description": "",
// "value": "the value"
// },
// ...
// }
foreach (var pair in actionOutputs)
{
var outputsName = pair.Key;
var outputsAttributes = pair.Value as DictionaryContextData;
outputsAttributes.TryGetValue("value", out var val);
var outputsValue = val as StringContextData;
// Set output in the whole composite scope.
if (!String.IsNullOrEmpty(outputsName) && !String.IsNullOrEmpty(outputsValue))
{
ExecutionContext.SetOutput(outputsName, outputsValue, out _);
}
}
}
}
private async Task RunStepsAsync(List<IStep> compositeSteps)
{
ArgUtil.NotNull(compositeSteps, nameof(compositeSteps));
// The parent StepsRunner of the whole Composite Action Step handles the cancellation stuff already.
foreach (IStep step in compositeSteps)
{
Trace.Info($"Processing composite step: DisplayName='{step.DisplayName}'");
step.ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(step.ExecutionContext.ScopeName);
// Populate env context for each step
Trace.Info("Initialize Env context for step");
#if OS_WINDOWS
var envContext = new DictionaryContextData();
#else
var envContext = new CaseSensitiveDictionaryContextData();
#endif
// Global env
foreach (var pair in ExecutionContext.Global.EnvironmentVariables)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
// Stomps over with outside step env
if (step.ExecutionContext.ExpressionValues.TryGetValue("env", out var envContextData))
{
#if OS_WINDOWS
var dict = envContextData as DictionaryContextData;
#else
var dict = envContextData as CaseSensitiveDictionaryContextData;
#endif
foreach (var pair in dict)
{
envContext[pair.Key] = pair.Value;
}
}
step.ExecutionContext.ExpressionValues["env"] = envContext;
var actionStep = step as IActionRunner;
// Set GITHUB_ACTION
step.ExecutionContext.SetGitHubContext("action", step.ExecutionContext.GetFullyQualifiedContextName());
try
{
// Evaluate and merge action's env block to env context
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, Common.Util.VarUtil.EnvironmentVariableKeyComparer);
foreach (var env in actionEnvironment)
{
envContext[env.Key] = new StringContextData(env.Value ?? string.Empty);
}
}
catch (Exception ex)
{
// fail the step since there is an evaluate error.
Trace.Info("Caught exception in Composite Steps Runner from expression for step.env");
// evaluateStepEnvFailed = true;
step.ExecutionContext.Error(ex);
step.ExecutionContext.Complete(TaskResult.Failed);
}
await RunStepAsync(step);
// Directly after the step, check if the step has failed or cancelled
// If so, return that to the output
if (step.ExecutionContext.Result == TaskResult.Failed || step.ExecutionContext.Result == TaskResult.Canceled)
{
ExecutionContext.Result = step.ExecutionContext.Result;
break;
}
// TODO: Add compat for other types of steps.
}
// Completion Status handled by StepsRunner for the whole Composite Action Step
}
private async Task RunStepAsync(IStep step)
{
// Try to evaluate the display name
if (step is IActionRunner actionRunner && actionRunner.Stage == ActionRunStage.Main)
{
actionRunner.TryEvaluateDisplayName(step.ExecutionContext.ExpressionValues, step.ExecutionContext);
}
// Start the step.
Trace.Info("Starting the step.");
step.ExecutionContext.Debug($"Starting: {step.DisplayName}");
// TODO: Fix for Step Level Timeout Attributes for an individual Composite Run Step
// For now, we are not going to support this for an individual composite run step
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
await Common.Util.EncodingUtil.SetEncoding(HostContext, Trace, step.ExecutionContext.CancellationToken);
try
{
await step.RunAsync();
}
catch (OperationCanceledException ex)
{
if (step.ExecutionContext.CancellationToken.IsCancellationRequested &&
!ExecutionContext.Root.CancellationToken.IsCancellationRequested)
{
Trace.Error($"Caught timeout exception from step: {ex.Message}");
step.ExecutionContext.Error("The action has timed out.");
step.ExecutionContext.Result = TaskResult.Failed;
}
else
{
Trace.Error($"Caught cancellation exception from step: {ex}");
step.ExecutionContext.Error(ex);
step.ExecutionContext.Result = TaskResult.Canceled;
}
}
catch (Exception ex)
{
// Log the error and fail the step.
Trace.Error($"Caught exception from step: {ex}");
step.ExecutionContext.Error(ex);
step.ExecutionContext.Result = TaskResult.Failed;
}
// Merge execution context result with command result
if (step.ExecutionContext.CommandResult != null)
{
step.ExecutionContext.Result = Common.Util.TaskResultUtil.MergeTaskResults(step.ExecutionContext.Result, step.ExecutionContext.CommandResult.Value);
}
Trace.Info($"Step result: {step.ExecutionContext.Result}");
// Complete the step context.
step.ExecutionContext.Debug($"Finishing: {step.DisplayName}");
}
}
}

View File

@@ -49,10 +49,18 @@ namespace GitHub.Runner.Worker.Handlers
// ensure docker file exist
var dockerFile = Path.Combine(ActionDirectory, Data.Image);
ArgUtil.File(dockerFile, nameof(Data.Image));
ExecutionContext.Output($"Dockerfile for action: '{dockerFile}'.");
ExecutionContext.Output($"##[group]Building docker image");
ExecutionContext.Output($"Dockerfile for action: '{dockerFile}'.");
var imageName = $"{dockerManger.DockerInstanceLabel}:{ExecutionContext.Id.ToString("N")}";
var buildExitCode = await dockerManger.DockerBuild(ExecutionContext, ExecutionContext.GetGitHubContext("workspace"), Directory.GetParent(dockerFile).FullName, imageName);
var buildExitCode = await dockerManger.DockerBuild(
ExecutionContext,
ExecutionContext.GetGitHubContext("workspace"),
dockerFile,
Directory.GetParent(dockerFile).FullName,
imageName);
ExecutionContext.Output("##[endgroup]");
if (buildExitCode != 0)
{
throw new InvalidOperationException($"Docker build failed with exit code {buildExitCode}");
@@ -180,7 +188,7 @@ namespace GitHub.Runner.Worker.Handlers
}
// Add Actions Runtime server info
var systemConnection = ExecutionContext.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var systemConnection = ExecutionContext.Global.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
Environment["ACTIONS_RUNTIME_URL"] = systemConnection.Url.AbsoluteUri;
Environment["ACTIONS_RUNTIME_TOKEN"] = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken];
if (systemConnection.Data.TryGetValue("CacheServerUrl", out var cacheUrl) && !string.IsNullOrEmpty(cacheUrl))

View File

@@ -148,14 +148,14 @@ namespace GitHub.Runner.Worker.Handlers
{
// Validate args.
Trace.Entering();
ArgUtil.NotNull(ExecutionContext.PrependPath, nameof(ExecutionContext.PrependPath));
if (ExecutionContext.PrependPath.Count == 0)
ArgUtil.NotNull(ExecutionContext.Global.PrependPath, nameof(ExecutionContext.Global.PrependPath));
if (ExecutionContext.Global.PrependPath.Count == 0)
{
return;
}
// Prepend path.
string prepend = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>());
string prepend = string.Join(Path.PathSeparator.ToString(), ExecutionContext.Global.PrependPath.Reverse<string>());
var containerStepHost = StepHost as ContainerStepHost;
if (containerStepHost != null)
{

View File

@@ -66,6 +66,11 @@ namespace GitHub.Runner.Worker.Handlers
handler = HostContext.CreateService<IRunnerPluginHandler>();
(handler as IRunnerPluginHandler).Data = data as PluginActionExecutionData;
}
else if (data.ExecutionType == ActionExecutionType.Composite)
{
handler = HostContext.CreateService<ICompositeActionHandler>();
(handler as ICompositeActionHandler).Data = data as CompositeActionExecutionData;
}
else
{
// This should never happen.

View File

@@ -46,7 +46,7 @@ namespace GitHub.Runner.Worker.Handlers
}
// Add Actions Runtime server info
var systemConnection = ExecutionContext.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var systemConnection = ExecutionContext.Global.Endpoints.Single(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
Environment["ACTIONS_RUNTIME_URL"] = systemConnection.Url.AbsoluteUri;
Environment["ACTIONS_RUNTIME_TOKEN"] = systemConnection.Authorization.Parameters[EndpointAuthorizationParameters.AccessToken];
if (systemConnection.Data.TryGetValue("CacheServerUrl", out var cacheUrl) && !string.IsNullOrEmpty(cacheUrl))
@@ -82,6 +82,10 @@ namespace GitHub.Runner.Worker.Handlers
var nodeRuntimeVersion = await StepHost.DetermineNodeRuntimeVersion(ExecutionContext);
string file = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), nodeRuntimeVersion, "bin", $"node{IOUtil.ExeExtension}");
if (System.Environment.GetEnvironmentVariable("K8S_POD_NAME") != null)
{
file = Path.Combine(Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals_copy"), nodeRuntimeVersion, "bin", $"node{IOUtil.ExeExtension}");
}
// Format the arguments passed to node.
// 1) Wrap the script file path in double quotes.
@@ -113,7 +117,7 @@ namespace GitHub.Runner.Worker.Handlers
requireExitCodeZero: false,
outputEncoding: outputEncoding,
killProcessOnCancel: false,
inheritConsoleHandler: !ExecutionContext.Variables.Retain_Default_Encoding,
inheritConsoleHandler: !ExecutionContext.Global.Variables.Retain_Default_Encoding,
cancellationToken: ExecutionContext.CancellationToken);
// Wait for either the node exit or force finish through ##vso command

View File

@@ -31,7 +31,7 @@ namespace GitHub.Runner.Worker.Handlers
{
_executionContext = executionContext;
_commandManager = commandManager;
_container = container ?? executionContext.Container;
_container = container ?? executionContext.Global.Container;
// Recursion failsafe (test override)
var failsafeString = Environment.GetEnvironmentVariable("RUNNER_TEST_GET_REPOSITORY_PATH_FAILSAFE");
@@ -41,7 +41,7 @@ namespace GitHub.Runner.Worker.Handlers
}
// Determine the timeout
var timeoutStr = _executionContext.Variables.Get(_timeoutKey);
var timeoutStr = _executionContext.Global.Variables.Get(_timeoutKey);
if (string.IsNullOrEmpty(timeoutStr) ||
!TimeSpan.TryParse(timeoutStr, CultureInfo.InvariantCulture, out _timeout) ||
_timeout <= TimeSpan.Zero)
@@ -352,15 +352,24 @@ namespace GitHub.Runner.Worker.Handlers
if (File.Exists(gitConfigPath))
{
// Check if the config contains the workflow repository url
var qualifiedRepository = _executionContext.GetGitHubContext("repository");
var configMatch = $"url = https://github.com/{qualifiedRepository}";
var serverUrl = _executionContext.GetGitHubContext("server_url");
serverUrl = !string.IsNullOrEmpty(serverUrl) ? serverUrl : "https://github.com";
var host = new Uri(serverUrl, UriKind.Absolute).Host;
var nameWithOwner = _executionContext.GetGitHubContext("repository");
var patterns = new[] {
$"url = {serverUrl}/{nameWithOwner}",
$"url = git@{host}:{nameWithOwner}.git",
};
var content = File.ReadAllText(gitConfigPath);
foreach (var line in content.Split("\n").Select(x => x.Trim()))
{
if (String.Equals(line, configMatch, StringComparison.OrdinalIgnoreCase))
foreach (var pattern in patterns)
{
repositoryPath = directoryPath;
break;
if (String.Equals(line, pattern, StringComparison.OrdinalIgnoreCase))
{
repositoryPath = directoryPath;
break;
}
}
}
}

View File

@@ -57,13 +57,13 @@ namespace GitHub.Runner.Worker.Handlers
string shellCommand;
string shellCommandPath = null;
bool validateShellOnHost = !(StepHost is ContainerStepHost);
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>());
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.Global.PrependPath.Reverse<string>());
string shell = null;
if (!Inputs.TryGetValue("shell", out shell) || string.IsNullOrEmpty(shell))
{
// TODO: figure out how defaults interact with template later
// for now, we won't check job.defaults if we are inside a template.
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.Global.JobDefaults.TryGetValue("run", out var runDefaults))
{
runDefaults.TryGetValue("shell", out shell);
}
@@ -153,7 +153,7 @@ namespace GitHub.Runner.Worker.Handlers
{
// TODO: figure out how defaults interact with template later
// for now, we won't check job.defaults if we are inside a template.
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.Global.JobDefaults.TryGetValue("run", out var runDefaults))
{
if (runDefaults.TryGetValue("working-directory", out workingDirectory))
{
@@ -169,7 +169,7 @@ namespace GitHub.Runner.Worker.Handlers
{
// TODO: figure out how defaults interact with template later
// for now, we won't check job.defaults if we are inside a template.
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.Global.JobDefaults.TryGetValue("run", out var runDefaults))
{
if (runDefaults.TryGetValue("shell", out shell))
{
@@ -180,7 +180,7 @@ namespace GitHub.Runner.Worker.Handlers
var isContainerStepHost = StepHost is ContainerStepHost;
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>());
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.Global.PrependPath.Reverse<string>());
string commandPath, argFormat, shellCommand;
// Set up default command and arguments
if (string.IsNullOrEmpty(shell))
@@ -232,7 +232,7 @@ namespace GitHub.Runner.Worker.Handlers
#if OS_WINDOWS
// Normalize Windows line endings
contents = contents.Replace("\r\n", "\n").Replace("\n", "\r\n");
var encoding = ExecutionContext.Variables.Retain_Default_Encoding && Console.InputEncoding.CodePage != 65001
var encoding = ExecutionContext.Global.Variables.Retain_Default_Encoding && Console.InputEncoding.CodePage != 65001
? Console.InputEncoding
: new UTF8Encoding(false);
#else
@@ -285,7 +285,7 @@ namespace GitHub.Runner.Worker.Handlers
requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: false,
inheritConsoleHandler: !ExecutionContext.Variables.Retain_Default_Encoding,
inheritConsoleHandler: !ExecutionContext.Global.Variables.Retain_Default_Encoding,
cancellationToken: ExecutionContext.CancellationToken);
// Error

View File

@@ -149,14 +149,14 @@ namespace GitHub.Runner.Worker.Handlers
throw new NotSupportedException(msg);
}
nodeExternal = "node12_alpine";
executionContext.Output($"Container distribution is alpine. Running JavaScript Action with external tool: {nodeExternal}");
executionContext.Debug($"Container distribution is alpine. Running JavaScript Action with external tool: {nodeExternal}");
return nodeExternal;
}
}
}
// Optimistically use the default
nodeExternal = "node12";
executionContext.Output($"Running JavaScript Action with default external tool: {nodeExternal}");
executionContext.Debug($"Running JavaScript Action with default external tool: {nodeExternal}");
return nodeExternal;
}

View File

@@ -21,7 +21,7 @@ namespace GitHub.Runner.Worker
}
set
{
this["status"] = new StringContextData(value.ToString());
this["status"] = new StringContextData(value.ToString().ToLowerInvariant());
}
}

View File

@@ -64,6 +64,20 @@ namespace GitHub.Runner.Worker
context.Debug($"Starting: Set up job");
context.Output($"Current runner version: '{BuildConstants.RunnerPackage.Version}'");
var setting = HostContext.GetService<IConfigurationStore>().GetSettings();
var credFile = HostContext.GetConfigFile(WellKnownConfigFile.Credentials);
if (File.Exists(credFile))
{
var credData = IOUtil.LoadObject<CredentialData>(credFile);
if (credData != null &&
credData.Data.TryGetValue("clientId", out var clientId))
{
// print out HostName for self-hosted runner
context.Output($"Runner name: '{setting.AgentName}'");
context.Output($"Machine name: '{Environment.MachineName}'");
}
}
var setupInfoFile = HostContext.GetConfigFile(WellKnownConfigFile.SetupInfo);
if (File.Exists(setupInfoFile))
{
@@ -131,12 +145,13 @@ namespace GitHub.Runner.Worker
// Temporary hack for GHES alpha
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
if (!runnerSettings.IsHostedServer && !string.IsNullOrEmpty(runnerSettings.GitHubUrl))
if (string.IsNullOrEmpty(context.GetGitHubContext("server_url")) && !runnerSettings.IsHostedServer && !string.IsNullOrEmpty(runnerSettings.GitHubUrl))
{
var url = new Uri(runnerSettings.GitHubUrl);
var portInfo = url.IsDefaultPort ? string.Empty : $":{url.Port.ToString(CultureInfo.InvariantCulture)}";
context.SetGitHubContext("url", $"{url.Scheme}://{url.Host}{portInfo}");
context.SetGitHubContext("server_url", $"{url.Scheme}://{url.Host}{portInfo}");
context.SetGitHubContext("api_url", $"{url.Scheme}://{url.Host}{portInfo}/api/v3");
context.SetGitHubContext("graphql_url", $"{url.Scheme}://{url.Host}{portInfo}/api/graphql");
}
// Evaluate the job-level environment variables
@@ -147,7 +162,7 @@ namespace GitHub.Runner.Worker
var environmentVariables = templateEvaluator.EvaluateStepEnvironment(token, jobContext.ExpressionValues, jobContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
foreach (var pair in environmentVariables)
{
context.EnvironmentVariables[pair.Key] = pair.Value ?? string.Empty;
context.Global.EnvironmentVariables[pair.Key] = pair.Value ?? string.Empty;
context.SetEnvContext(pair.Key, pair.Value ?? string.Empty);
}
}
@@ -157,7 +172,7 @@ namespace GitHub.Runner.Worker
var container = templateEvaluator.EvaluateJobContainer(message.JobContainer, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
if (container != null)
{
jobContext.Container = new Container.ContainerInfo(HostContext, container);
jobContext.Global.Container = new Container.ContainerInfo(HostContext, container);
}
// Evaluate the job service containers
@@ -169,7 +184,7 @@ namespace GitHub.Runner.Worker
{
var networkAlias = pair.Key;
var serviceContainer = pair.Value;
jobContext.ServiceContainers.Add(new Container.ContainerInfo(HostContext, serviceContainer, false, networkAlias));
jobContext.Global.ServiceContainers.Add(new Container.ContainerInfo(HostContext, serviceContainer, false, networkAlias));
}
}
@@ -180,14 +195,14 @@ namespace GitHub.Runner.Worker
var defaults = token.AssertMapping("defaults");
if (defaults.Any(x => string.Equals(x.Key.AssertString("defaults key").Value, "run", StringComparison.OrdinalIgnoreCase)))
{
context.JobDefaults["run"] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
context.Global.JobDefaults["run"] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
var defaultsRun = defaults.First(x => string.Equals(x.Key.AssertString("defaults key").Value, "run", StringComparison.OrdinalIgnoreCase));
var jobDefaults = templateEvaluator.EvaluateJobDefaultsRun(defaultsRun.Value, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
foreach (var pair in jobDefaults)
{
if (!string.IsNullOrEmpty(pair.Value))
{
context.JobDefaults["run"][pair.Key] = pair.Value;
context.Global.JobDefaults["run"][pair.Key] = pair.Value;
}
}
}
@@ -201,15 +216,15 @@ namespace GitHub.Runner.Worker
preJobSteps.AddRange(prepareResult.ContainerSetupSteps);
// Add start-container steps, record and stop-container steps
if (jobContext.Container != null || jobContext.ServiceContainers.Count > 0)
if (jobContext.Global.Container != null || jobContext.Global.ServiceContainers.Count > 0)
{
var containerProvider = HostContext.GetService<IContainerOperationProvider>();
var containers = new List<Container.ContainerInfo>();
if (jobContext.Container != null)
if (jobContext.Global.Container != null)
{
containers.Add(jobContext.Container);
containers.Add(jobContext.Global.Container);
}
containers.AddRange(jobContext.ServiceContainers);
containers.AddRange(jobContext.Global.ServiceContainers);
preJobSteps.Add(new JobExtensionRunner(runAsync: containerProvider.StartContainersAsync,
condition: $"{PipelineTemplateConstants.Success}()",
@@ -281,7 +296,7 @@ namespace GitHub.Runner.Worker
{
ArgUtil.NotNull(actionStep, step.DisplayName);
intraActionStates.TryGetValue(actionStep.Action.Id, out var intraActionState);
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, actionStep.Action.ScopeName, actionStep.Action.ContextName, intraActionState);
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, null, actionStep.Action.ContextName, intraActionState);
}
}
@@ -290,7 +305,7 @@ namespace GitHub.Runner.Worker
steps.AddRange(jobSteps);
// Prepare for orphan process cleanup
_processCleanup = jobContext.Variables.GetBoolean("process.clean") ?? true;
_processCleanup = jobContext.Global.Variables.GetBoolean("process.clean") ?? true;
if (_processCleanup)
{
// Set the RUNNER_TRACKING_ID env variable.
@@ -361,13 +376,13 @@ namespace GitHub.Runner.Worker
var envContext = new CaseSensitiveDictionaryContextData();
#endif
context.ExpressionValues["env"] = envContext;
foreach (var pair in context.EnvironmentVariables)
foreach (var pair in context.Global.EnvironmentVariables)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
Trace.Info("Initialize steps context for evaluating job outputs");
context.ExpressionValues["steps"] = context.StepsContext.GetScope(context.ScopeName);
context.ExpressionValues["steps"] = context.Global.StepsContext.GetScope(context.ScopeName);
var templateEvaluator = context.ToPipelineTemplateEvaluator();
var outputs = templateEvaluator.EvaluateJobOutput(message.JobOutputs, context.ExpressionValues, context.ExpressionFunctions);
@@ -398,7 +413,7 @@ namespace GitHub.Runner.Worker
}
}
if (context.Variables.GetBoolean(Constants.Variables.Actions.RunnerDebug) ?? false)
if (context.Global.Variables.GetBoolean(Constants.Variables.Actions.RunnerDebug) ?? false)
{
Trace.Info("Support log upload starting.");
context.Output("Uploading runner diagnostic logs");

View File

@@ -5,21 +5,13 @@ using GitHub.Services.Common;
using GitHub.Services.WebApi;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Net.Http;
using System.Text;
using System.IO.Compression;
using System.Diagnostics;
using Newtonsoft.Json.Linq;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.ObjectTemplating;
namespace GitHub.Runner.Worker
{
@@ -107,7 +99,7 @@ namespace GitHub.Runner.Worker
return await CompleteJobAsync(jobServer, jobContext, message, TaskResult.Failed);
}
if (jobContext.WriteDebug)
if (jobContext.Global.WriteDebug)
{
jobContext.SetRunnerContext("debug", "1");
}
@@ -122,13 +114,6 @@ namespace GitHub.Runner.Worker
_tempDirectoryManager = HostContext.GetService<ITempDirectoryManager>();
_tempDirectoryManager.InitializeTempDirectory(jobContext);
// // Expand container properties
// jobContext.Container?.ExpandProperties(jobContext.Variables);
// foreach (var sidecar in jobContext.SidecarContainers)
// {
// sidecar.ExpandProperties(jobContext.Variables);
// }
// Get the job extension.
Trace.Info("Getting job extension.");
IJobExtension jobExtension = HostContext.CreateService<IJobExtension>();
@@ -167,7 +152,7 @@ namespace GitHub.Runner.Worker
{
foreach (var step in jobSteps)
{
jobContext.JobSteps.Enqueue(step);
jobContext.JobSteps.Add(step);
}
await stepsRunner.RunAsync(jobContext);
@@ -224,7 +209,7 @@ namespace GitHub.Runner.Worker
// Clean TEMP after finish process jobserverqueue, since there might be a pending fileupload still use the TEMP dir.
_tempDirectoryManager?.CleanupTempDirectory();
if (!jobContext.Features.HasFlag(PlanFeatures.JobCompletedPlanEvent))
if (!jobContext.Global.Features.HasFlag(PlanFeatures.JobCompletedPlanEvent))
{
Trace.Info($"Skip raise job completed event call from worker because Plan version is {message.Plan.Version}");
return result;
@@ -254,6 +239,12 @@ namespace GitHub.Runner.Worker
Trace.Error(ex);
return TaskResult.Failed;
}
catch (TaskOrchestrationPlanTerminatedException ex)
{
Trace.Error($"TaskOrchestrationPlanTerminatedException received, while attempting to raise JobCompletedEvent for job {message.JobId}.");
Trace.Error(ex);
return TaskResult.Failed;
}
catch (Exception ex)
{
Trace.Error($"Catch exception while attempting to raise JobCompletedEvent for job {message.JobId}, job request {message.RequestId}.");

View File

@@ -100,12 +100,12 @@ namespace GitHub.Runner.Worker
RunnerActionPluginExecutionContext pluginContext = new RunnerActionPluginExecutionContext
{
Inputs = inputs,
Endpoints = context.Endpoints,
Endpoints = context.Global.Endpoints,
Context = context.ExpressionValues
};
// variables
foreach (var variable in context.Variables.AllVariables)
foreach (var variable in context.Global.Variables.AllVariables)
{
pluginContext.Variables[variable.Name] = new VariableValue(variable.Value, variable.Secret);
}

View File

@@ -59,19 +59,19 @@ namespace GitHub.Runner.Worker
public void SetConclusion(
string scopeName,
string stepName,
string conclusion)
ActionResult conclusion)
{
var step = GetStep(scopeName, stepName);
step["conclusion"] = new StringContextData(conclusion);
step["conclusion"] = new StringContextData(conclusion.ToString().ToLowerInvariant());
}
public void SetOutcome(
string scopeName,
string stepName,
string outcome)
ActionResult outcome)
{
var step = GetStep(scopeName, stepName);
step["outcome"] = new StringContextData(outcome);
step["outcome"] = new StringContextData(outcome.ToString().ToLowerInvariant());
}
private DictionaryContextData GetStep(string scopeName, string stepName)

View File

@@ -59,18 +59,19 @@ namespace GitHub.Runner.Worker
checkPostJobActions = true;
while (jobContext.PostJobSteps.TryPop(out var postStep))
{
jobContext.JobSteps.Enqueue(postStep);
jobContext.JobSteps.Add(postStep);
}
continue;
}
var step = jobContext.JobSteps.Dequeue();
var nextStep = jobContext.JobSteps.Count > 0 ? jobContext.JobSteps.Peek() : null;
var step = jobContext.JobSteps[0];
jobContext.JobSteps.RemoveAt(0);
Trace.Info($"Processing step: DisplayName='{step.DisplayName}'");
ArgUtil.NotNull(step.ExecutionContext, nameof(step.ExecutionContext));
ArgUtil.NotNull(step.ExecutionContext.Variables, nameof(step.ExecutionContext.Variables));
ArgUtil.NotNull(step.ExecutionContext.Global, nameof(step.ExecutionContext.Global));
ArgUtil.NotNull(step.ExecutionContext.Global.Variables, nameof(step.ExecutionContext.Global.Variables));
// Start
step.ExecutionContext.Start();
@@ -82,27 +83,41 @@ namespace GitHub.Runner.Worker
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>(PipelineTemplateConstants.HashFiles, 1, byte.MaxValue));
// Initialize scope
if (InitializeScope(step, scopeInputs))
{
// Populate env context for each step
Trace.Info("Initialize Env context for step");
step.ExecutionContext.ExpressionValues["steps"] = step.ExecutionContext.Global.StepsContext.GetScope(step.ExecutionContext.ScopeName);
// Populate env context for each step
Trace.Info("Initialize Env context for step");
#if OS_WINDOWS
var envContext = new DictionaryContextData();
var envContext = new DictionaryContextData();
#else
var envContext = new CaseSensitiveDictionaryContextData();
var envContext = new CaseSensitiveDictionaryContextData();
#endif
step.ExecutionContext.ExpressionValues["env"] = envContext;
foreach (var pair in step.ExecutionContext.EnvironmentVariables)
// Global env
foreach (var pair in step.ExecutionContext.Global.EnvironmentVariables)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
step.ExecutionContext.ExpressionValues["env"] = envContext;
bool evaluateStepEnvFailed = false;
if (step is IActionRunner actionStep)
{
// Set GITHUB_ACTION
// Warning: Do not turn on FF DistributedTask.UseContextNameForGITHUBACTION until after M271-ish. After M271-ish
// the server will never send an empty context name. Generated context names start with "__"
if (step.ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseContextNameForGITHUBACTION") ?? false)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
}
else
{
step.ExecutionContext.SetGitHubContext("action", step.ExecutionContext.GetFullyQualifiedContextName());
}
if (step is IActionRunner actionStep)
try
{
// Set GITHUB_ACTION
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
// Evaluate and merge action's env block to env context
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
@@ -111,7 +126,18 @@ namespace GitHub.Runner.Worker
envContext[env.Key] = new StringContextData(env.Value ?? string.Empty);
}
}
catch (Exception ex)
{
// fail the step since there is an evaluate error.
Trace.Info("Caught exception from expression for step.env");
evaluateStepEnvFailed = true;
step.ExecutionContext.Error(ex);
CompleteStep(step, TaskResult.Failed);
}
}
if (!evaluateStepEnvFailed)
{
try
{
// Register job cancellation call back only if job cancellation token not been fire before each step run
@@ -195,19 +221,19 @@ namespace GitHub.Runner.Worker
{
// Condition == false
Trace.Info("Skipping step due to condition evaluation.");
CompleteStep(step, nextStep, TaskResult.Skipped, resultCode: conditionTraceWriter.Trace);
CompleteStep(step, TaskResult.Skipped, resultCode: conditionTraceWriter.Trace);
}
else if (conditionEvaluateError != null)
{
// fail the step since there is an evaluate error.
step.ExecutionContext.Error(conditionEvaluateError);
CompleteStep(step, nextStep, TaskResult.Failed);
CompleteStep(step, TaskResult.Failed);
}
else
{
// Run the step.
await RunStepAsync(step, jobContext.CancellationToken);
CompleteStep(step, nextStep);
CompleteStep(step);
}
}
finally
@@ -270,40 +296,7 @@ namespace GitHub.Runner.Worker
step.ExecutionContext.SetTimeout(timeout);
}
#if OS_WINDOWS
try
{
if (Console.InputEncoding.CodePage != 65001)
{
using (var p = HostContext.CreateService<IProcessInvoker>())
{
// Use UTF8 code page
int exitCode = await p.ExecuteAsync(workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Work),
fileName: WhichUtil.Which("chcp", true, Trace),
arguments: "65001",
environment: null,
requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: false,
redirectStandardIn: null,
inheritConsoleHandler: true,
cancellationToken: step.ExecutionContext.CancellationToken);
if (exitCode == 0)
{
Trace.Info("Successfully returned to code page 65001 (UTF8)");
}
else
{
Trace.Warning($"'chcp 65001' failed with exit code {exitCode}");
}
}
}
}
catch (Exception ex)
{
Trace.Warning($"'chcp 65001' failed with exception {ex.Message}");
}
#endif
await EncodingUtil.SetEncoding(HostContext, Trace, step.ExecutionContext.CancellationToken);
try
{
@@ -369,117 +362,9 @@ namespace GitHub.Runner.Worker
step.ExecutionContext.Debug($"Finishing: {step.DisplayName}");
}
private bool InitializeScope(IStep step, Dictionary<string, PipelineContextData> scopeInputs)
private void CompleteStep(IStep step, TaskResult? result = null, string resultCode = null)
{
var executionContext = step.ExecutionContext;
var stepsContext = executionContext.StepsContext;
if (!string.IsNullOrEmpty(executionContext.ScopeName))
{
// Gather uninitialized current and ancestor scopes
var scope = executionContext.Scopes[executionContext.ScopeName];
var scopesToInitialize = default(Stack<ContextScope>);
while (scope != null && !scopeInputs.ContainsKey(scope.Name))
{
if (scopesToInitialize == null)
{
scopesToInitialize = new Stack<ContextScope>();
}
scopesToInitialize.Push(scope);
scope = string.IsNullOrEmpty(scope.ParentName) ? null : executionContext.Scopes[scope.ParentName];
}
// Initialize current and ancestor scopes
while (scopesToInitialize?.Count > 0)
{
scope = scopesToInitialize.Pop();
executionContext.Debug($"Initializing scope '{scope.Name}'");
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.ParentName);
executionContext.ExpressionValues["inputs"] = !String.IsNullOrEmpty(scope.ParentName) ? scopeInputs[scope.ParentName] : null;
var templateEvaluator = executionContext.ToPipelineTemplateEvaluator();
var inputs = default(DictionaryContextData);
try
{
inputs = templateEvaluator.EvaluateStepScopeInputs(scope.Inputs, executionContext.ExpressionValues, executionContext.ExpressionFunctions);
}
catch (Exception ex)
{
Trace.Info($"Caught exception from initialize scope '{scope.Name}'");
Trace.Error(ex);
executionContext.Error(ex);
executionContext.Complete(TaskResult.Failed);
return false;
}
scopeInputs[scope.Name] = inputs;
}
}
// Setup expression values
var scopeName = executionContext.ScopeName;
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scopeName);
executionContext.ExpressionValues["inputs"] = string.IsNullOrEmpty(scopeName) ? null : scopeInputs[scopeName];
return true;
}
private void CompleteStep(IStep step, IStep nextStep, TaskResult? result = null, string resultCode = null)
{
var executionContext = step.ExecutionContext;
if (!string.IsNullOrEmpty(executionContext.ScopeName))
{
// Gather current and ancestor scopes to finalize
var scope = executionContext.Scopes[executionContext.ScopeName];
var scopesToFinalize = default(Queue<ContextScope>);
var nextStepScopeName = nextStep?.ExecutionContext.ScopeName;
while (scope != null &&
!string.Equals(nextStepScopeName, scope.Name, StringComparison.OrdinalIgnoreCase) &&
!(nextStepScopeName ?? string.Empty).StartsWith($"{scope.Name}.", StringComparison.OrdinalIgnoreCase))
{
if (scopesToFinalize == null)
{
scopesToFinalize = new Queue<ContextScope>();
}
scopesToFinalize.Enqueue(scope);
scope = string.IsNullOrEmpty(scope.ParentName) ? null : executionContext.Scopes[scope.ParentName];
}
// Finalize current and ancestor scopes
var stepsContext = step.ExecutionContext.StepsContext;
while (scopesToFinalize?.Count > 0)
{
scope = scopesToFinalize.Dequeue();
executionContext.Debug($"Finalizing scope '{scope.Name}'");
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.Name);
executionContext.ExpressionValues["inputs"] = null;
var templateEvaluator = executionContext.ToPipelineTemplateEvaluator();
var outputs = default(DictionaryContextData);
try
{
outputs = templateEvaluator.EvaluateStepScopeOutputs(scope.Outputs, executionContext.ExpressionValues, executionContext.ExpressionFunctions);
}
catch (Exception ex)
{
Trace.Info($"Caught exception from finalize scope '{scope.Name}'");
Trace.Error(ex);
executionContext.Error(ex);
executionContext.Complete(TaskResult.Failed);
return;
}
if (outputs?.Count > 0)
{
var parentScopeName = scope.ParentName;
var contextName = scope.ContextName;
foreach (var pair in outputs)
{
var outputName = pair.Key;
var outputValue = pair.Value.ToString();
stepsContext.SetOutput(parentScopeName, contextName, outputName, outputValue, out var reference);
executionContext.Debug($"{reference}='{outputValue}'");
}
}
}
}
executionContext.Complete(result, resultCode: resultCode);
}

View File

@@ -7,7 +7,8 @@
"name": "string",
"description": "string",
"inputs": "inputs",
"runs": "runs"
"runs": "runs",
"outputs": "outputs"
},
"loose-key-type": "non-empty-string",
"loose-value-type": "any"
@@ -28,11 +29,26 @@
"loose-value-type": "any"
}
},
"outputs": {
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "outputs-attributes"
}
},
"outputs-attributes": {
"mapping": {
"properties": {
"description": "string",
"value": "output-value"
}
}
},
"runs": {
"one-of": [
"container-runs",
"node12-runs",
"plugin-runs"
"plugin-runs",
"composite-runs"
]
},
"container-runs": {
@@ -83,12 +99,49 @@
}
}
},
"composite-runs": {
"mapping": {
"properties": {
"using": "non-empty-string",
"steps": "composite-steps"
}
}
},
"composite-steps": {
"context": [
"github",
"strategy",
"matrix",
"steps",
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"sequence": {
"item-type": "any"
}
},
"container-runs-context": {
"context": [
"inputs"
],
"string": {}
},
"output-value": {
"context": [
"github",
"strategy",
"matrix",
"steps",
"inputs",
"job",
"runner",
"env"
],
"string": {}
},
"input-default-context": {
"context": [
"github",

View File

@@ -317,5 +317,37 @@ namespace GitHub.DistributedTask.WebApi
userState: userState,
cancellationToken: cancellationToken);
}
/// <summary>
/// [Preview API] Resolves information required to download actions (URL, token) defined in an orchestration.
/// </summary>
/// <param name="scopeIdentifier">The project GUID to scope the request</param>
/// <param name="hubName">The name of the server hub: "build" for the Build server or "rm" for the Release Management server</param>
/// <param name="planId"></param>
/// <param name="actionReferenceList"></param>
/// <param name="userState"></param>
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
public virtual Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(
Guid scopeIdentifier,
string hubName,
Guid planId,
ActionReferenceList actionReferenceList,
object userState = null,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
Guid locationId = new Guid("27d7f831-88c1-4719-8ca1-6a061dad90eb");
object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId };
HttpContent content = new ObjectContent<ActionReferenceList>(actionReferenceList, new VssJsonMediaTypeFormatter(true));
return SendAsync<ActionDownloadInfoCollection>(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1),
userState: userState,
cancellationToken: cancellationToken,
content: content);
}
}
}

View File

@@ -60,6 +60,20 @@ namespace GitHub.DistributedTask.Logging
return SecurityElement.Escape(value);
}
public static String TrimDoubleQuotes(String value)
{
var trimmed = string.Empty;
if (!string.IsNullOrEmpty(value) &&
value.Length > 8 &&
value.StartsWith('"') &&
value.EndsWith('"'))
{
trimmed = value.Substring(1, value.Length - 2);
}
return trimmed;
}
private static string Base64StringEscapeShift(String value, int shift)
{
var bytes = Encoding.UTF8.GetBytes(value);

View File

@@ -24,7 +24,6 @@ namespace GitHub.DistributedTask.Pipelines
Environment = actionToClone.Environment?.Clone();
Inputs = actionToClone.Inputs?.Clone();
ContextName = actionToClone?.ContextName;
ScopeName = actionToClone?.ScopeName;
DisplayNameToken = actionToClone.DisplayNameToken?.Clone();
}
@@ -41,9 +40,6 @@ namespace GitHub.DistributedTask.Pipelines
[DataMember(EmitDefaultValue = false)]
public TemplateToken DisplayNameToken { get; set; }
[DataMember(EmitDefaultValue = false)]
public String ScopeName { get; set; }
[DataMember(EmitDefaultValue = false)]
public String ContextName { get; set; }

View File

@@ -39,7 +39,6 @@ namespace GitHub.DistributedTask.Pipelines
DictionaryContextData contextData,
WorkspaceOptions workspaceOptions,
IEnumerable<JobStep> steps,
IEnumerable<ContextScope> scopes,
IList<String> fileTable,
TemplateToken jobOutputs,
IList<TemplateToken> defaults)
@@ -60,11 +59,6 @@ namespace GitHub.DistributedTask.Pipelines
m_maskHints = new List<MaskHint>(maskHints);
m_steps = new List<JobStep>(steps);
if (scopes != null)
{
m_scopes = new List<ContextScope>(scopes);
}
if (environmentVariables?.Count > 0)
{
m_environmentVariables = new List<TemplateToken>(environmentVariables);
@@ -261,18 +255,6 @@ namespace GitHub.DistributedTask.Pipelines
}
}
public IList<ContextScope> Scopes
{
get
{
if (m_scopes == null)
{
m_scopes = new List<ContextScope>();
}
return m_scopes;
}
}
/// <summary>
/// Gets the table of files used when parsing the pipeline (e.g. yaml files)
/// </summary>
@@ -415,11 +397,6 @@ namespace GitHub.DistributedTask.Pipelines
m_maskHints = new List<MaskHint>(this.m_maskHints.Distinct());
}
if (m_scopes?.Count == 0)
{
m_scopes = null;
}
if (m_variables?.Count == 0)
{
m_variables = null;
@@ -447,9 +424,6 @@ namespace GitHub.DistributedTask.Pipelines
[DataMember(Name = "Steps", EmitDefaultValue = false)]
private List<JobStep> m_steps;
[DataMember(Name = "Scopes", EmitDefaultValue = false)]
private List<ContextScope> m_scopes;
[DataMember(Name = "Variables", EmitDefaultValue = false)]
private IDictionary<String, VariableValue> m_variables;

View File

@@ -42,7 +42,12 @@ namespace GitHub.DistributedTask.Pipelines.ContextData
var floored = Math.Floor(m_value);
if (m_value == floored && m_value <= (Double)Int32.MaxValue && m_value >= (Double)Int32.MinValue)
{
Int32 flooredInt = (Int32)floored;
var flooredInt = (Int32)floored;
return (JToken)flooredInt;
}
else if (m_value == floored && m_value <= (Double)Int64.MaxValue && m_value >= (Double)Int64.MinValue)
{
var flooredInt = (Int64)floored;
return (JToken)flooredInt;
}
else

View File

@@ -1,53 +0,0 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Runtime.Serialization;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using Newtonsoft.Json;
namespace GitHub.DistributedTask.Pipelines
{
[DataContract]
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class ContextScope
{
[DataMember(EmitDefaultValue = false)]
public String Name { get; set; }
[IgnoreDataMember]
public String ContextName
{
get
{
var index = Name.LastIndexOf('.');
if (index >= 0)
{
return Name.Substring(index + 1);
}
return Name;
}
}
[IgnoreDataMember]
public String ParentName
{
get
{
var index = Name.LastIndexOf('.');
if (index >= 0)
{
return Name.Substring(0, index);
}
return String.Empty;
}
}
[DataMember(EmitDefaultValue = false)]
public TemplateToken Inputs { get; set; }
[DataMember(EmitDefaultValue = false)]
public TemplateToken Outputs { get; set; }
}
}

View File

@@ -11,8 +11,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String BooleanStrategyContext = "boolean-strategy-context";
public const String CancelTimeoutMinutes = "cancel-timeout-minutes";
public const String Cancelled = "cancelled";
public const String Checkout = "checkout";
public const String Clean = "clean";
public const String Clean= "clean";
public const String Container = "container";
public const String ContinueOnError = "continue-on-error";
public const String Defaults = "defaults";
@@ -23,7 +22,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String FailFast = "fail-fast";
public const String Failure = "failure";
public const String FetchDepth = "fetch-depth";
public const String GeneratedId = "generated-id";
public const String GitHub = "github";
public const String HashFiles = "hashFiles";
public const String Id = "id";
@@ -36,7 +34,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String JobIfResult = "job-if-result";
public const String JobOutputs = "job-outputs";
public const String Jobs = "jobs";
public const String Labels = "labels";
public const String Lfs = "lfs";
public const String Matrix = "matrix";
public const String MaxParallel = "max-parallel";
@@ -52,23 +49,18 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String Pool = "pool";
public const String Ports = "ports";
public const String Result = "result";
public const String RunDisplayPrefix = "Run ";
public const String Run = "run";
public const String RunDisplayPrefix = "Run ";
public const String Runner = "runner";
public const String RunsOn = "runs-on";
public const String Scope = "scope";
public const String Scopes = "scopes";
public const String Secrets = "secrets";
public const String Services = "services";
public const String Shell = "shell";
public const String Skipped = "skipped";
public const String StepEnv = "step-env";
public const String StepIfResult = "step-if-result";
public const String Steps = "steps";
public const String StepsScopeInputs = "steps-scope-inputs";
public const String StepsScopeOutputs = "steps-scope-outputs";
public const String StepsTemplateRoot = "steps-template-root";
public const String StepWith = "step-with";
public const String Steps = "steps";
public const String Strategy = "strategy";
public const String StringStepsContext = "string-steps-context";
public const String StringStrategyContext = "string-strategy-context";
@@ -76,7 +68,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String Success = "success";
public const String Template = "template";
public const String TimeoutMinutes = "timeout-minutes";
public const String Token = "token";
public const String Uses = "uses";
public const String VmImage = "vmImage";
public const String Volumes = "volumes";

View File

@@ -1,6 +1,6 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.ComponentModel;
using System.Linq;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
@@ -14,8 +14,62 @@ using Newtonsoft.Json.Linq;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal static class PipelineTemplateConverter
[EditorBrowsable(EditorBrowsableState.Never)]
public static class PipelineTemplateConverter
{
public static List<Step> ConvertToSteps(
TemplateContext context,
TemplateToken steps)
{
var stepsSequence = steps.AssertSequence($"job {PipelineTemplateConstants.Steps}");
var result = new List<Step>();
var nameBuilder = new ReferenceNameBuilder();
foreach (var stepsItem in stepsSequence)
{
var step = ConvertToStep(context, stepsItem, nameBuilder);
if (step != null) // step = null means we are hitting error during step conversion, there should be an error in context.errors
{
if (step.Enabled)
{
result.Add(step);
}
}
}
// Generate context name if empty
foreach (ActionStep step in result)
{
if (!String.IsNullOrEmpty(step.ContextName))
{
continue;
}
var name = default(string);
switch (step.Reference.Type)
{
case ActionSourceType.ContainerRegistry:
var containerReference = step.Reference as ContainerRegistryReference;
name = containerReference.Image;
break;
case ActionSourceType.Repository:
var repositoryReference = step.Reference as RepositoryPathReference;
name = !String.IsNullOrEmpty(repositoryReference.Name) ? repositoryReference.Name : PipelineConstants.SelfAlias;
break;
}
if (String.IsNullOrEmpty(name))
{
name = "run";
}
nameBuilder.AppendSegment($"__{name}");
step.ContextName = nameBuilder.Build();
}
return result;
}
internal static Boolean ConvertToIfResult(
TemplateContext context,
TemplateToken ifResult)
@@ -264,5 +318,311 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
private static ActionStep ConvertToStep(
TemplateContext context,
TemplateToken stepsItem,
ReferenceNameBuilder nameBuilder)
{
var step = stepsItem.AssertMapping($"{PipelineTemplateConstants.Steps} item");
var continueOnError = default(ScalarToken);
var env = default(TemplateToken);
var id = default(StringToken);
var ifCondition = default(String);
var ifToken = default(ScalarToken);
var name = default(ScalarToken);
var run = default(ScalarToken);
var timeoutMinutes = default(ScalarToken);
var uses = default(StringToken);
var with = default(TemplateToken);
var workingDir = default(ScalarToken);
var path = default(ScalarToken);
var clean = default(ScalarToken);
var fetchDepth = default(ScalarToken);
var lfs = default(ScalarToken);
var submodules = default(ScalarToken);
var shell = default(ScalarToken);
foreach (var stepProperty in step)
{
var propertyName = stepProperty.Key.AssertString($"{PipelineTemplateConstants.Steps} item key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Clean:
clean = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Clean}");
break;
case PipelineTemplateConstants.ContinueOnError:
ConvertToStepContinueOnError(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
continueOnError = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} {PipelineTemplateConstants.ContinueOnError}");
break;
case PipelineTemplateConstants.Env:
ConvertToStepEnvironment(context, stepProperty.Value, StringComparer.Ordinal, allowExpressions: true); // Validate early if possible
env = stepProperty.Value;
break;
case PipelineTemplateConstants.FetchDepth:
fetchDepth = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.FetchDepth}");
break;
case PipelineTemplateConstants.Id:
id = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Id}");
if (!String.IsNullOrEmpty(id.Value))
{
if (!nameBuilder.TryAddKnownName(id.Value, out var error))
{
context.Error(id, error);
}
}
break;
case PipelineTemplateConstants.If:
ifToken = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.If}");
break;
case PipelineTemplateConstants.Lfs:
lfs = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Lfs}");
break;
case PipelineTemplateConstants.Name:
name = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Name}");
break;
case PipelineTemplateConstants.Path:
path = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Path}");
break;
case PipelineTemplateConstants.Run:
run = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Run}");
break;
case PipelineTemplateConstants.Shell:
shell = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Shell}");
break;
case PipelineTemplateConstants.Submodules:
submodules = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Submodules}");
break;
case PipelineTemplateConstants.TimeoutMinutes:
ConvertToStepTimeout(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
timeoutMinutes = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.TimeoutMinutes}");
break;
case PipelineTemplateConstants.Uses:
uses = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}");
break;
case PipelineTemplateConstants.With:
ConvertToStepInputs(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
with = stepProperty.Value;
break;
case PipelineTemplateConstants.WorkingDirectory:
workingDir = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.WorkingDirectory}");
break;
default:
propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Steps} item key"); // throws
break;
}
}
// Fixup the if-condition
ifCondition = ConvertToIfCondition(context, ifToken, false);
if (run != null)
{
var result = new ActionStep
{
ContextName = id?.Value,
ContinueOnError = continueOnError,
DisplayNameToken = name,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes,
Environment = env,
Reference = new ScriptReference(),
};
var inputs = new MappingToken(null, null, null);
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script), run);
if (workingDir != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.WorkingDirectory), workingDir);
}
if (shell != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Shell), shell);
}
result.Inputs = inputs;
return result;
}
else
{
uses.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}");
var result = new ActionStep
{
ContextName = id?.Value,
ContinueOnError = continueOnError,
DisplayNameToken = name,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes,
Inputs = with,
Environment = env,
};
if (uses.Value.StartsWith("docker://", StringComparison.Ordinal))
{
var image = uses.Value.Substring("docker://".Length);
result.Reference = new ContainerRegistryReference { Image = image };
}
else if (uses.Value.StartsWith("./") || uses.Value.StartsWith(".\\"))
{
result.Reference = new RepositoryPathReference
{
RepositoryType = PipelineConstants.SelfAlias,
Path = uses.Value
};
}
else
{
var usesSegments = uses.Value.Split('@');
var pathSegments = usesSegments[0].Split(new[] { '/', '\\' }, StringSplitOptions.RemoveEmptyEntries);
var gitRef = usesSegments.Length == 2 ? usesSegments[1] : String.Empty;
if (usesSegments.Length != 2 ||
pathSegments.Length < 2 ||
String.IsNullOrEmpty(pathSegments[0]) ||
String.IsNullOrEmpty(pathSegments[1]) ||
String.IsNullOrEmpty(gitRef))
{
// todo: loc
context.Error(uses, $"Expected format {{org}}/{{repo}}[/path]@ref. Actual '{uses.Value}'");
}
else
{
var repositoryName = $"{pathSegments[0]}/{pathSegments[1]}";
var directoryPath = pathSegments.Length > 2 ? String.Join("/", pathSegments.Skip(2)) : String.Empty;
result.Reference = new RepositoryPathReference
{
RepositoryType = RepositoryTypes.GitHub,
Name = repositoryName,
Ref = gitRef,
Path = directoryPath,
};
}
}
return result;
}
}
/// <summary>
/// When empty, default to "success()".
/// When a status function is not referenced, format as "success() &amp;&amp; &lt;CONDITION&gt;".
/// </summary>
private static String ConvertToIfCondition(
TemplateContext context,
TemplateToken token,
Boolean isJob)
{
String condition;
if (token is null)
{
condition = null;
}
else if (token is BasicExpressionToken expressionToken)
{
condition = expressionToken.Expression;
}
else
{
var stringToken = token.AssertString($"{(isJob ? "job" : "step")} {PipelineTemplateConstants.If}");
condition = stringToken.Value;
}
if (String.IsNullOrWhiteSpace(condition))
{
return $"{PipelineTemplateConstants.Success}()";
}
var expressionParser = new ExpressionParser();
var functions = default(IFunctionInfo[]);
var namedValues = default(INamedValueInfo[]);
if (isJob)
{
namedValues = s_jobIfNamedValues;
// TODO: refactor into seperate functions
// functions = PhaseCondition.FunctionInfo;
}
else
{
namedValues = s_stepNamedValues;
functions = s_stepConditionFunctions;
}
var node = default(ExpressionNode);
try
{
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
}
catch (Exception ex)
{
context.Error(token, ex);
return null;
}
if (node == null)
{
return $"{PipelineTemplateConstants.Success}()";
}
var hasStatusFunction = node.Traverse().Any(x =>
{
if (x is Function function)
{
return String.Equals(function.Name, PipelineTemplateConstants.Always, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Cancelled, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Failure, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Success, StringComparison.OrdinalIgnoreCase);
}
return false;
});
return hasStatusFunction ? condition : $"{PipelineTemplateConstants.Success}() && ({condition})";
}
private static readonly INamedValueInfo[] s_jobIfNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly INamedValueInfo[] s_stepNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly IFunctionInfo[] s_stepConditionFunctions = new IFunctionInfo[]
{
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Always, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Cancelled, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Failure, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Success, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.HashFiles, 1, Byte.MaxValue),
};
}
}

View File

@@ -51,60 +51,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public Int32 MaxResultSize { get; set; } = 10 * 1024 * 1024; // 10 mb
public DictionaryContextData EvaluateStepScopeInputs(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(DictionaryContextData);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsScopeInputs, token, 0, null, omitHeader: true);
context.Errors.Check();
result = token.ToContextData().AssertDictionary("steps scope inputs");
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new DictionaryContextData();
}
public DictionaryContextData EvaluateStepScopeOutputs(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(DictionaryContextData);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsScopeOutputs, token, 0, null, omitHeader: true);
context.Errors.Check();
result = token.ToContextData().AssertDictionary("steps scope outputs");
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new DictionaryContextData();
}
public Boolean EvaluateStepContinueOnError(
TemplateToken token,
DictionaryContextData contextData,

View File

@@ -0,0 +1,121 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Text;
using GitHub.DistributedTask.Pipelines.Validation;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal sealed class ReferenceNameBuilder
{
internal void AppendSegment(String value)
{
if (String.IsNullOrEmpty(value))
{
return;
}
if (m_name.Length == 0)
{
var first = value[0];
if ((first >= 'a' && first <= 'z') ||
(first >= 'A' && first <= 'Z') ||
first == '_')
{
// Legal first char
}
else if ((first >= '0' && first <= '9') || first == '-')
{
// Illegal first char, but legal char.
// Prepend "_".
m_name.Append("_");
}
else
{
// Illegal char
}
}
else
{
// Separator
m_name.Append(c_separator);
}
foreach (var c in value)
{
if ((c >= 'a' && c <= 'z') ||
(c >= 'A' && c <= 'Z') ||
(c >= '0' && c <= '9') ||
c == '_' ||
c == '-')
{
// Legal
m_name.Append(c);
}
else
{
// Illegal
m_name.Append("_");
}
}
}
internal String Build()
{
var original = m_name.Length > 0 ? m_name.ToString() : "job";
var attempt = 1;
var suffix = default(String);
while (true)
{
if (attempt == 1)
{
suffix = String.Empty;
}
else if (attempt < 1000)
{
suffix = String.Format(CultureInfo.InvariantCulture, "_{0}", attempt);
}
else
{
throw new InvalidOperationException("Unable to create a unique name");
}
var candidate = original.Substring(0, Math.Min(original.Length, PipelineConstants.MaxNodeNameLength - suffix.Length)) + suffix;
if (m_distinctNames.Add(candidate))
{
m_name.Clear();
return candidate;
}
attempt++;
}
}
internal Boolean TryAddKnownName(
String value,
out String error)
{
if (!NameValidation.IsValid(value, allowHyphens: true) && value.Length < PipelineConstants.MaxNodeNameLength)
{
error = $"The identifier '{value}' is invalid. IDs may only contain alphanumeric characters, '_', and '-'. IDs must start with a letter or '_' and and must be less than {PipelineConstants.MaxNodeNameLength} characters.";
return false;
}
else if (!m_distinctNames.Add(value))
{
error = $"The identifier '{value}' may not be used more than once within the same scope.";
return false;
}
else
{
error = null;
return true;
}
}
private const String c_separator = "_";
private readonly HashSet<String> m_distinctNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
private readonly StringBuilder m_name = new StringBuilder();
}
}

View File

@@ -94,5 +94,12 @@ namespace GitHub.DistributedTask.Pipelines
public static readonly String Resources = "resources";
public static readonly String All = "all";
}
public static class ScriptStepInputs
{
public static readonly String Script = "script";
public static readonly String WorkingDirectory = "workingDirectory";
public static readonly String Shell = "shell";
}
}
}

View File

@@ -16,116 +16,6 @@
}
},
"steps-template-root": {
"description": "Steps template file",
"mapping": {
"properties": {
"inputs": "steps-template-inputs",
"outputs": "steps-template-outputs",
"steps": "steps-in-template"
}
}
},
"steps-scope-inputs": {
"description": "Used when evaluating steps scope inputs",
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "steps-scope-input-value"
}
},
"steps-scope-input-value": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"inputs",
"job",
"runner",
"env"
],
"one-of": [
"string",
"sequence",
"mapping"
]
},
"steps-scope-outputs": {
"description": "Used when evaluating steps scope outputs",
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "steps-scope-output-value"
}
},
"steps-scope-output-value": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"inputs",
"job",
"runner",
"env"
],
"string": {}
},
"steps-template-inputs": {
"description": "Allowed inputs in a steps template",
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "steps-template-input-value"
}
},
"steps-template-input-value": {
"description": "Default input values for a steps template",
"context": [
"github",
"needs",
"strategy",
"matrix"
],
"one-of": [
"string",
"sequence",
"mapping"
]
},
"steps-template-outputs": {
"description": "Output mapping for a steps template",
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "steps-template-output-value"
}
},
"steps-template-output-value": {
"description": "Output values for a steps template",
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"job",
"runner",
"env"
],
"string": {}
},
"workflow-defaults": {
"mapping": {
"properties": {
@@ -240,54 +130,27 @@
"matrix": {
"mapping": {
"properties": {
"include": "matrix-include",
"exclude": "matrix-exclude"
"include": "matrix-filter",
"exclude": "matrix-filter"
},
"loose-key-type": "non-empty-string",
"loose-value-type": "sequence"
}
},
"matrix-include": {
"matrix-filter": {
"sequence": {
"item-type": "matrix-include-item"
"item-type": "matrix-filter-item"
}
},
"matrix-include-item": {
"matrix-filter-item": {
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "any"
}
},
"matrix-exclude": {
"sequence": {
"item-type": "matrix-exclude-item"
}
},
"matrix-exclude-item": {
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "matrix-exclude-filter-item"
}
},
"matrix-exclude-filter-item": {
"one-of": [
"string",
"matrix-exclude-mapping-filter"
]
},
"matrix-exclude-mapping-filter": {
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "matrix-exclude-filter-item"
}
},
"runs-on": {
"context": [
"github",
@@ -364,25 +227,10 @@
}
},
"steps-in-template": {
"sequence": {
"item-type": "steps-item-in-template"
}
},
"steps-item": {
"one-of": [
"run-step",
"regular-step",
"steps-template-reference"
]
},
"steps-item-in-template": {
"one-of": [
"run-step-in-template",
"regular-step-in-template",
"steps-template-reference-in-template"
"regular-step"
]
},
@@ -405,25 +253,6 @@
}
},
"run-step-in-template": {
"mapping": {
"properties": {
"name": "string-steps-context-in-template",
"id": "non-empty-string",
"if": "step-if-in-template",
"timeout-minutes": "number-steps-context-in-template",
"run": {
"type": "string-steps-context-in-template",
"required": true
},
"continue-on-error": "boolean-steps-context-in-template",
"env": "step-env-in-template",
"working-directory": "string-steps-context-in-template",
"shell": "non-empty-string"
}
}
},
"regular-step": {
"mapping": {
"properties": {
@@ -442,24 +271,6 @@
}
},
"regular-step-in-template": {
"mapping": {
"properties": {
"name": "string-steps-context-in-template",
"id": "non-empty-string",
"if": "step-if-in-template",
"continue-on-error": "boolean-steps-context-in-template",
"timeout-minutes": "number-steps-context-in-template",
"uses": {
"type": "non-empty-string",
"required": true
},
"with": "step-with-in-template",
"env": "step-env-in-template"
}
}
},
"step-if": {
"context": [
"github",
@@ -479,26 +290,6 @@
"string": {}
},
"step-if-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"steps",
"inputs",
"job",
"runner",
"env",
"always(0,0)",
"failure(0,0)",
"cancelled(0,0)",
"success(0,0)",
"hashFiles(1,255)"
],
"string": {}
},
"step-if-result": {
"context": [
"github",
@@ -524,89 +315,6 @@
]
},
"step-if-result-in-template": {
"context": [
"github",
"strategy",
"matrix",
"steps",
"inputs",
"job",
"runner",
"env",
"always(0,0)",
"failure(0,0)",
"cancelled(0,0)",
"success(0,0)",
"hashFiles(1,255)"
],
"one-of": [
"null",
"boolean",
"number",
"string",
"sequence",
"mapping"
]
},
"steps-template-reference": {
"mapping": {
"properties": {
"template": "non-empty-string",
"id": "non-empty-string",
"inputs": "steps-template-reference-inputs"
}
}
},
"steps-template-reference-in-template": {
"mapping": {
"properties": {
"template": "non-empty-string",
"id": "non-empty-string",
"inputs": "steps-template-reference-inputs-in-template"
}
}
},
"steps-template-reference-inputs": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"job",
"runner",
"env"
],
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "string"
}
},
"steps-template-reference-inputs-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"inputs",
"job",
"runner",
"env"
],
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "string"
}
},
"step-env": {
"context": [
"github",
@@ -626,26 +334,6 @@
}
},
"step-env-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "string"
}
},
"step-with": {
"context": [
"github",
@@ -665,26 +353,6 @@
}
},
"step-with-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "string"
}
},
"container": {
"context": [
"github",
@@ -801,23 +469,6 @@
"boolean": {}
},
"boolean-steps-context-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"boolean": {}
},
"number-steps-context": {
"context": [
"github",
@@ -834,23 +485,6 @@
"number": {}
},
"number-steps-context-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"number": {}
},
"string-runner-context": {
"context": [
"github",
@@ -880,23 +514,6 @@
"hashFiles(1,255)"
],
"string": {}
},
"string-steps-context-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"string": {}
}
}
}

View File

@@ -0,0 +1,40 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionDownloadInfo
{
[DataMember(EmitDefaultValue = false)]
public ActionDownloadAuthentication Authentication { get; set; }
[DataMember(EmitDefaultValue = false)]
public string NameWithOwner { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ResolvedNameWithOwner { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ResolvedSha { get; set; }
[DataMember(EmitDefaultValue = false)]
public string TarballUrl { get; set; }
[DataMember(EmitDefaultValue = false)]
public string Ref { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ZipballUrl { get; set; }
}
[DataContract]
public class ActionDownloadAuthentication
{
[DataMember(EmitDefaultValue = false)]
public DateTime ExpiresAt { get; set; }
[DataMember(EmitDefaultValue = false)]
public string Token { get; set; }
}
}

View File

@@ -0,0 +1,16 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionDownloadInfoCollection
{
[DataMember]
public IDictionary<string, ActionDownloadInfo> Actions
{
get;
set;
}
}
}

View File

@@ -0,0 +1,22 @@
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionReference
{
[DataMember]
public string NameWithOwner
{
get;
set;
}
[DataMember]
public string Ref
{
get;
set;
}
}
}

View File

@@ -0,0 +1,16 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionReferenceList
{
[DataMember]
public IList<ActionReference> Actions
{
get;
set;
}
}
}

View File

@@ -24,6 +24,7 @@ namespace GitHub.DistributedTask.WebApi
this.OSDescription = referenceToBeCloned.OSDescription;
this.ProvisioningState = referenceToBeCloned.ProvisioningState;
this.AccessPoint = referenceToBeCloned.AccessPoint;
this.Ephemeral = referenceToBeCloned.Ephemeral;
if (referenceToBeCloned.m_links != null)
{
@@ -81,6 +82,16 @@ namespace GitHub.DistributedTask.WebApi
set;
}
/// <summary>
/// Signifies that this Agent can only run one job and will be removed by the server after that one job finish.
/// </summary>
[DataMember]
public bool? Ephemeral
{
get;
set;
}
/// <summary>
/// Whether or not the agent is online.
/// </summary>

View File

@@ -119,6 +119,15 @@ namespace GitHub.Services.OAuth
}
}
public async Task<string> ValidateCredentialAsync(CancellationToken cancellationToken)
{
var tokenHttpClient = new VssOAuthTokenHttpClient(this.SignInUrl);
var tokenResponse = await tokenHttpClient.GetTokenAsync(this.Grant, this.ClientCredential, this.TokenParameters, cancellationToken);
// return the underlying authentication error
return tokenResponse.Error;
}
/// <summary>
/// Issues a token request to the configured secure token service. On success, the access token issued by the
/// token service is returned to the caller
@@ -131,7 +140,7 @@ namespace GitHub.Services.OAuth
CancellationToken cancellationToken)
{
if (this.SignInUrl == null ||
this.Grant == null ||
this.Grant == null ||
this.ClientCredential == null)
{
return null;

View File

@@ -39,7 +39,7 @@ namespace GitHub.Services.OAuth
/// <summary>
/// Gets or sets the error description for the response.
/// </summary>
[DataMember(Name = "errordescription", EmitDefaultValue = false)]
[DataMember(Name = "error_description", EmitDefaultValue = false)]
public String ErrorDescription
{
get;

View File

@@ -1,58 +1,58 @@
using Xunit;
using System.IO;
using System.Net.Http;
using System.Threading.Tasks;
// using Xunit;
// using System.IO;
// using System.Net.Http;
// using System.Threading.Tasks;
namespace GitHub.Runner.Common.Tests
{
public sealed class DotnetsdkDownloadScriptL0
{
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async Task EnsureDotnetsdkBashDownloadScriptUpToDate()
{
string shDownloadUrl = "https://dot.net/v1/dotnet-install.sh";
// namespace GitHub.Runner.Common.Tests
// {
// public sealed class DotnetsdkDownloadScriptL0
// {
// [Fact]
// [Trait("Level", "L0")]
// [Trait("Category", "Runner")]
// public async Task EnsureDotnetsdkBashDownloadScriptUpToDate()
// {
// string shDownloadUrl = "https://dot.net/v1/dotnet-install.sh";
using (HttpClient downloadClient = new HttpClient())
{
var response = await downloadClient.GetAsync("https://www.bing.com");
if (!response.IsSuccessStatusCode)
{
return;
}
// using (HttpClient downloadClient = new HttpClient())
// {
// var response = await downloadClient.GetAsync("https://www.bing.com");
// if (!response.IsSuccessStatusCode)
// {
// return;
// }
string shScript = await downloadClient.GetStringAsync(shDownloadUrl);
// string shScript = await downloadClient.GetStringAsync(shDownloadUrl);
string existingShScript = File.ReadAllText(Path.Combine(TestUtil.GetSrcPath(), "Misc/dotnet-install.sh"));
// string existingShScript = File.ReadAllText(Path.Combine(TestUtil.GetSrcPath(), "Misc/dotnet-install.sh"));
bool shScriptMatched = string.Equals(shScript.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"), existingShScript.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"));
Assert.True(shScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.sh with content from https://dot.net/v1/dotnet-install.sh");
}
}
// bool shScriptMatched = string.Equals(shScript.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"), existingShScript.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"));
// Assert.True(shScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.sh with content from https://dot.net/v1/dotnet-install.sh");
// }
// }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async Task EnsureDotnetsdkPowershellDownloadScriptUpToDate()
{
string ps1DownloadUrl = "https://dot.net/v1/dotnet-install.ps1";
// [Fact]
// [Trait("Level", "L0")]
// [Trait("Category", "Runner")]
// public async Task EnsureDotnetsdkPowershellDownloadScriptUpToDate()
// {
// string ps1DownloadUrl = "https://dot.net/v1/dotnet-install.ps1";
using (HttpClient downloadClient = new HttpClient())
{
var response = await downloadClient.GetAsync("https://www.bing.com");
if (!response.IsSuccessStatusCode)
{
return;
}
// using (HttpClient downloadClient = new HttpClient())
// {
// var response = await downloadClient.GetAsync("https://www.bing.com");
// if (!response.IsSuccessStatusCode)
// {
// return;
// }
string ps1Script = await downloadClient.GetStringAsync(ps1DownloadUrl);
// string ps1Script = await downloadClient.GetStringAsync(ps1DownloadUrl);
string existingPs1Script = File.ReadAllText(Path.Combine(TestUtil.GetSrcPath(), "Misc/dotnet-install.ps1"));
// string existingPs1Script = File.ReadAllText(Path.Combine(TestUtil.GetSrcPath(), "Misc/dotnet-install.ps1"));
bool ps1ScriptMatched = string.Equals(ps1Script.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"), existingPs1Script.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"));
Assert.True(ps1ScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.ps1 with content from https://dot.net/v1/dotnet-install.ps1");
}
}
}
}
// bool ps1ScriptMatched = string.Equals(ps1Script.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"), existingPs1Script.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"));
// Assert.True(ps1ScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.ps1 with content from https://dot.net/v1/dotnet-install.ps1");
// }
// }
// }
// }

Some files were not shown because too many files have changed in this diff Show More