Compare commits

..

147 Commits

Author SHA1 Message Date
Ethan Chiu
68b05f63c9 Add filetable to testing file, remove ? since we know filetable should never be non null 2020-07-08 16:25:20 -04:00
Ethan Chiu
635baa5295 Add null check 2020-07-08 14:51:07 -04:00
Ethan Chiu
af9202bd8d Fix logic for trimming manifestfile path 2020-07-08 14:43:03 -04:00
Ethan Chiu
b3eea21e4f Fix unit tests to instantiate a FileTable 2020-07-08 14:37:30 -04:00
Ethan Chiu
9a2b3e2662 Revert 2020-07-08 14:11:53 -04:00
Ethan Chiu
6d7efa9906 add line 2020-07-08 14:08:04 -04:00
Ethan Chiu
8828263795 Address situation if FileTable is null + add sanity check for adding file to fileTable 2020-07-08 14:00:12 -04:00
Ethan Chiu
35879fc3b1 Fix null errors 2020-07-08 12:33:22 -04:00
Ethan Chiu
7c57d41b3a Merge branch 'master' of https://github.com/actions/runner into users/ethanchewy/compositeFileTable 2020-07-08 11:33:32 -04:00
Ethan Chiu
41a8c8c3aa Add back line break 2020-07-08 11:20:44 -04:00
Ethan Chiu
5822a38c39 Add bash command for running custom runner (#569) 2020-07-08 11:20:38 -04:00
Ethan Chiu
d2d0ecf1ed revert back 2020-07-08 11:16:08 -04:00
Ethan Chiu
9a1dd80bcb Sanity check for fileTable add + remove unn. code 2020-07-08 11:14:46 -04:00
Ethan Chiu
3836574cce Fix period 2020-07-08 10:52:36 -04:00
Ethan Chiu
b6526db74e Merge branch 'users/ethanchewy/compositeenv' of https://github.com/actions/runner into users/ethanchewy/compositeFileTable 2020-07-08 10:29:19 -04:00
Ethan Chiu
d42c9da2d7 Composite Actions: Support Env Flow (#557)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Fix Workflow Step Env overiding Parent Env

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error
2020-07-08 10:16:51 -04:00
Ethan Chiu
881e8e72a3 Fix windows unit test syntax error 2020-07-08 10:06:03 -04:00
Ethan Chiu
51784687f9 Merge branch 'users/ethanchewy/compositeenv' of https://github.com/actions/runner into users/ethanchewy/compositeenv 2020-07-08 10:00:43 -04:00
Ethan Chiu
a58ac2ef59 fix unit tests 2020-07-08 10:00:24 -04:00
Ethan Chiu
b712ba2648 Merge branch 'master' into users/ethanchewy/compositeenv 2020-07-07 18:47:48 -04:00
Ethan Chiu
11b9cace75 formatting 2020-07-07 18:18:09 -04:00
Ethan Chiu
96bff6a206 Figure out how to handle set-env edge cases 2020-07-07 18:16:54 -04:00
Ethan Chiu
14a2cbd473 Clean up 2020-07-07 13:02:07 -04:00
Ethan Chiu
20bc6a9a5c Add file length check 2020-07-07 12:57:59 -04:00
Ethan Chiu
191a096f8d Merge branch 'users/ethanchewy/compositeenv' of https://github.com/actions/runner into users/ethanchewy/compositeFileTable 2020-07-07 12:53:19 -04:00
Ethan Chiu
4b3ec9fbe6 Remove unnecessary code 2020-07-07 12:45:59 -04:00
Ethan Chiu
0041023399 add back envToken 2020-07-07 12:44:54 -04:00
Ethan Chiu
4ccac8c0e2 revert back 2020-07-07 12:44:11 -04:00
Ethan Chiu
5d6114548e Revert back 2020-07-07 12:43:13 -04:00
Ethan Chiu
da1e2b0a84 Clean up 2020-07-07 12:40:41 -04:00
Ethan Chiu
b63f98714c Remove env in composite action scope 2020-07-07 12:38:39 -04:00
eric sciple
121deedeb5 Fix trailing '.0' for Int64 values (#572) 2020-06-30 17:25:47 -04:00
Ethan Chiu
0bd97d6597 Merge branch 'users/ethanchewy/compositeenv' of https://github.com/actions/runner into users/ethanchewy/compositeFileTable 2020-06-25 13:59:07 -04:00
Ethan Chiu
57d59fcd6e Fix Workflow Step Env overiding Parent Env 2020-06-25 13:53:41 -04:00
Ethan Chiu
a0942ed345 Composite Actions Support for Multiple Run Steps (#549)
* Composite Action Run Steps

* Clean up trace messages + add Trace debug in ActionManager

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Add verbose trace logs which are only viewable by devs

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps
2020-06-23 15:35:32 -04:00
Ethan Chiu
1f6518dfad Merge branch 'users/ethanchewy/compositetest2' of https://github.com/actions/runner into users/ethanchewy/compositeenv 2020-06-23 14:30:04 -04:00
Ethan Chiu
54ed6eabce Remove todo 2020-06-23 14:27:06 -04:00
Ethan Chiu
0a6453e241 Clean up file path for error message 2020-06-23 14:19:51 -04:00
TingluoHuang
7cef9a27ca release 2.267.0 runner. 2020-06-23 14:05:28 -04:00
Tingluo Huang
df7e16954e print runner and machine name to log. (#539) 2020-06-23 13:57:37 -04:00
Ethan Chiu
368b6254ed Remove unnessary FileID attribute from CompositeActionExecutionData 2020-06-23 13:40:06 -04:00
Ethan Chiu
9c60f1a264 Merge branch 'users/ethanchewy/compositetest2' of https://github.com/actions/runner into users/ethanchewy/compositeFileTable 2020-06-23 13:26:32 -04:00
Ethan Chiu
ba4ce9c3d3 Change confusing term context => templateContext, Eliminate _fileTable and only use ExecutionContext.FileTable + update this table when need be 2020-06-23 13:22:20 -04:00
Ethan Chiu
da2da85766 Pass fileID to all children token of root action token 2020-06-23 12:56:52 -04:00
Ethan Chiu
f9b28c7210 Add helpful error message for null steps 2020-06-23 12:23:02 -04:00
Ethan Chiu
8aadbbdb8e Update context variables in composite action yaml 2020-06-23 12:21:34 -04:00
Ethan Chiu
9ec7047441 Change 0 to location 2020-06-23 12:20:39 -04:00
Ethan Chiu
0d5e84b183 Sort usings in Composite Action Handler 2020-06-23 12:20:12 -04:00
Ethan Chiu
58d11ef80a Progress towards passing FileTable or FileID or FileName 2020-06-22 18:29:48 -04:00
Ethan Chiu
43a3006e7b Initial Start for FileTable stuff 2020-06-22 14:43:50 -04:00
Ethan Chiu
fbef557fd3 Merge branch 'users/ethanchewy/compositetest2' of https://github.com/actions/runner into users/ethanchewy/compositeenv 2020-06-22 12:56:50 -04:00
Ethan Chiu
a254442dcc Add verbose trace logs which are only viewable by devs 2020-06-22 11:37:52 -04:00
Ethan Chiu
94e7b474e1 Remove TODO message for context 2020-06-22 11:31:43 -04:00
Ethan Chiu
37849dc6e3 Remove comment 2020-06-22 10:23:52 -04:00
Ethan Chiu
45ddd4233e Fix env format 2020-06-20 14:20:04 -04:00
Ethan Chiu
f8054f9c2e Add/Merge changes from Multiple Steps PR 2020-06-19 15:04:34 -04:00
Ethan Chiu
e4dfd0e8fd Fix unit tests 2020-06-19 14:51:16 -04:00
Ethan Chiu
00a736d9cc Remove unnecessary code 2020-06-19 14:43:31 -04:00
Ethan Chiu
5988076fcf Add TODO, remove unn. content 2020-06-19 14:38:37 -04:00
Ethan Chiu
9939cf527e Change JobSteps to a List, Change Register Step function name 2020-06-19 14:29:48 -04:00
Ethan Chiu
28be3dffc0 Merge branch 'users/ethanchewy/compositetest2' of https://github.com/actions/runner into users/ethanchewy/compositeenv 2020-06-19 11:30:06 -04:00
Ethan Chiu
941a24ee37 Add comma to Composite 2020-06-19 11:27:19 -04:00
Ethan Chiu
5843362bd4 Change String to string 2020-06-19 11:22:49 -04:00
Ethan Chiu
496064f72d Optimize runtime of code 2020-06-18 17:34:17 -04:00
Ethan Chiu
96e003706f Add debugging message 2020-06-18 16:16:55 -04:00
Ethan Chiu
66cadebeb8 Merge branch 'users/ethanchewy/compositetest2' of https://github.com/actions/runner into users/ethanchewy/compositeenv 2020-06-18 16:14:41 -04:00
Ethan Chiu
180a687f30 Clean up trace messages + add Trace debug in ActionManager 2020-06-18 16:13:56 -04:00
Ethan Chiu
6552263369 clean up 2020-06-18 15:57:23 -04:00
Ethan Chiu
e56b2439b9 Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick' 2020-06-18 15:34:48 -04:00
Ethan Chiu
038e5e2c2e Composite Action Run Steps 2020-06-18 09:48:18 -04:00
eric sciple
4e7d27a53c remove temporary logic when resolving action download info (#550) 2020-06-15 13:13:47 -04:00
Lokesh Gopu
89d1418e48 Update exception message (#540) 2020-06-11 17:25:50 -04:00
Tingluo Huang
e728b8594d fix race condition. (#538) 2020-06-11 16:17:24 -04:00
Tingluo Huang
de4490d06d Restore SELinux context on service file when SELinux is enabled (#525) 2020-06-11 15:40:09 -04:00
Tingluo Huang
2e800f857e Skip search $PATH on command with fully qualified path (#526) 2020-06-11 13:52:42 -04:00
Tingluo Huang
312c7668a8 Fix DataContract with Token service (#532) 2020-06-11 12:11:35 -04:00
Tingluo Huang
eaf39bb058 add libicu66 for Ubuntu 20.04 (#535) 2020-06-11 12:11:13 -04:00
eric sciple
5815819f24 Resolve action download info (#515) 2020-06-09 08:53:28 -04:00
Ethan Chiu
1aea046932 Add substep for developer flow for clarity (#523) 2020-06-08 10:47:58 -04:00
Ethan Chiu
eda463601c Update Links and Language to Git + VSCode (#522) 2020-06-08 10:19:17 -04:00
Nick Fields
f994ae0542 Reduce input validation warnings (#506)
* Only raise a single warning for unexpected inputs

* Update invalid input test to raise single warning
2020-06-05 23:09:14 -04:00
Tingluo Huang
3c5aef791c Fix null ref exception in SecretMasker caused by hashfiles timeout. (#516) 2020-06-05 23:02:10 -04:00
Tingluo Huang
c4626d0c3a Remove SPS/Token migration code. Remove GHES url manipulate code. (#513)
* Remove SPS/Token migration code. Remove GHES url manipulate code.

* feedback.
2020-06-03 23:24:53 -04:00
eric sciple
416a7ac4b8 prepare to switch to service resolves archive download info (#508) 2020-06-02 17:21:50 -04:00
TingluoHuang
11435857e4 prepare 2.263.0 runner release. 2020-05-21 15:49:10 -04:00
Tingluo Huang
6f260012a3 Fix inputs validation warning, fix post step display name, fix worker crash due to error in step.env (#490) 2020-05-21 11:09:50 -04:00
eric sciple
4fc87ddfc6 fix problem matcher for GHES (#488) 2020-05-19 16:15:03 -04:00
eric sciple
b45c1b9440 switch GITHUB_URL to GITHUB_SERVER_URL (#482) 2020-05-18 13:02:30 -04:00
Quan TRAN
73307c0a30 jq returns "null" if the field does not exist (#478) 2020-05-14 11:09:20 -04:00
Tingluo Huang
cd8e4ddba1 Validate inputs only for repo action, no warning for small delay. (#476)
* validate inputs only for repo action, no warning for small delay.

* l0
2020-05-12 16:09:13 -04:00
Tingluo Huang
abf59bdcb6 Fix configure as service with runner name has space. (#474) 2020-05-12 16:08:50 -04:00
Brian Cristante
09cf59c1e0 Use an env var to point to an Actions Service dev instance (#468)
* Use an environment variable for the testing backdoor

* Make the env var a boolean

We'll still have to pass the URL on the command line

* Empty commit to rerun CI
2020-05-11 17:14:02 -04:00
TingluoHuang
7a65236022 prepare 2.262.0 runner release. 2020-05-11 15:05:59 -04:00
David Kale
462b5117c8 docker build using -f instead of implied default (#471)
* pass -f to docker build

* Wrong place

* build path

* Also pass docker context path

* Tidy up format

* PR Feedback
2020-05-11 13:57:31 -04:00
Tingluo Huang
6922f3cb86 sps/token migration tweak, ActionResult casing. (#462) 2020-05-11 12:36:35 -04:00
Tingluo Huang
911135e66c add help info for '--labels' (#472) 2020-05-11 12:36:16 -04:00
PJ Quirk
01c9a8a8af Remove community actions munging and add dotcom fallback for downloading actions (#469)
* Fallback to dotcom rather than munged community actions

* Encapsulate the link and the auth details

* Rename the method to be clearer

* Remove BOM
2020-05-11 12:23:02 -04:00
eric sciple
33d2d2c328 update checkout@v1 for GHES (#470) 2020-05-11 11:41:16 -04:00
Justin Hutchings
a246b3b29d Add CodeQL Analysis workflow (#459)
* Add CodeQL Analysis workflow

* Fix path

* Add manual build step

Import manual build step from build.yml
2020-05-07 11:17:34 -04:00
Brian Cristante
c7768d4a7b Adapt create-latest-svc.sh to work with GHES (#452)
* Add README

* Adapt create-latest-svc to work with GHES

* Fix inverted conditions
2020-04-27 23:53:53 -04:00
Tingluo Huang
70729fb3c4 Help trace worker crash in Kusto. (#450)
* Help trace worker crash in Kusto.

* more

* feedback.
2020-04-27 23:44:17 -04:00
Tingluo Huang
1470a3b6e2 better error when runner removed from service. (#441) 2020-04-27 23:02:00 -04:00
eric sciple
2fadf430e4 post-alpha fixes for github.url github.api_url and github.graphql_url (#451) 2020-04-24 13:38:59 -04:00
PJ Quirk
f798f5606b Use the API_URL and munge action URLs for GHES (#437)
* First pass at logic for GHES, not all correct

* Need to mock out file downloading

* Allowed for mocking of HTTP responses

* Added test for builtin GHES action download

* More tests

* Don't retry on action 404

* Remove commented out code

* Add a using statement back, because Windows

* Make windows happy again

* Another windows fix

* Always delete the cache since it isn't fully implemented

* Use RunnerService base class

* Add examples, update URL path

* Remove forceDotCom

* Fix a bug

* Remove a test that's no longer relevant

* PR feedback

* Add missing return

* More trace info

* Use the new agreed-upon format

* Use the auth token since we're hitting GHES directly

* Fixing tests on windows

* Fixed one more test
2020-04-23 17:02:13 -04:00
Tingluo Huang
3f7a01af93 add secret masker for trimming double qoutes. (#440) 2020-04-21 22:07:55 -04:00
Tingluo Huang
d5c54f9819 Raise warning when action input does not match action.yml. (#429) 2020-04-21 19:42:53 -04:00
Mathias LANG
9f78ad3b34 Make release notes code blocks copy-paste-able (#430)
The release notes used C-style comments (`//`) instead of shell-style (`#`),
causing the code snippet to not be easily copy-paste-able.

Additionally, the code fence for Windows had the extra `powershell` added,
as well as a comment to direct users towards `powershell` over `cmd`.
2020-04-19 22:11:44 -04:00
Jan Pazdziora
97883c8cd5 Fix spelling of RHEL and CentOS. (#436) 2020-04-19 16:53:06 -04:00
Tingluo Huang
c5fa9fb062 Print node version in debug instead of output. (#433) 2020-04-17 11:53:51 -04:00
Bryan MacFarlane
b2dcdc21dc Sample scripts to automate scaleable runners (#427) 2020-04-17 11:08:45 -04:00
David Kale
c126b52fe5 ArgumentNullException: Value cannot be null, for anonymous volume mounts (#426)
* Dont check if path starts with null

* Check SourceVolumePath not MountVolume obj

* Prefer string.IsNullOrEmpty
2020-04-14 14:36:39 -04:00
Lokesh Gopu
117ec1fff9 Fix optional parameter for unattended (#425) 2020-04-14 12:14:26 -04:00
Tingluo Huang
d5c7097d2c support 'pre' execution for actions. (#389) 2020-04-13 21:46:30 -04:00
Lokesh Gopu
f9baec4b32 Added support for custom labels (#414)
* Added support for custom labels

* ignore case

* Added interactive config for labels

* Fixing L0s

* pr comments
2020-04-13 21:33:13 -04:00
chenrui
a20ad4e121 Update releaseVersion to v2.168.0 (#420)
v2.168.0 is the latest release version, update the meta file to reflect that.
2020-04-11 13:59:20 -04:00
Tingluo Huang
2bd0b1af0e launch middle man process on macOS to workaround SIP limit (#416) 2020-04-09 16:13:06 -04:00
Tingluo Huang
baa6ded3bc Better Kusto Tracing for self-hosted runner. (#405) 2020-04-09 14:33:16 -04:00
TingluoHuang
7817e1a976 Prepare 2.169.0 runner release for GHES Alpha. 2020-04-08 11:32:56 -04:00
Tingluo Huang
d90273a068 Raise warning when volume mount root. (#413) 2020-04-08 11:17:54 -04:00
TingluoHuang
2cdde6cb16 fix L0 test. 2020-04-07 14:04:37 -04:00
TingluoHuang
1f52dfa636 bump dev-dependency version. 2020-04-07 14:00:37 -04:00
dependabot[bot]
83b5742278 Bump acorn from 6.4.0 to 6.4.1 in /src/Misc/expressionFunc/hashFiles (#371)
Bumps [acorn](https://github.com/acornjs/acorn) from 6.4.0 to 6.4.1.
- [Release notes](https://github.com/acornjs/acorn/releases)
- [Commits](https://github.com/acornjs/acorn/compare/6.4.0...6.4.1)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2020-04-07 13:49:48 -04:00
eric sciple
ba69b5bc93 Fix runner config IsHostedServer detection for GHES alpha (#401) 2020-04-01 17:29:57 -04:00
Tingluo Huang
0e8777ebda ADR for wrapper action (#361)
* wrapper action adr.

* rename

* updates.

* Update 0361-wrapper-action.md
2020-03-31 10:11:02 -04:00
Bryan MacFarlane
a5f06b3ec2 ADR 397: Configuration time custom labels (#397)
Since configuring self-hosted runners is commonly automated via scripts, the labels need to be able to be created during configuration. The runner currently registers the built-in labels (os, arch) during registration but does not accept labels via command line args to extend the set registered.
2020-03-30 17:46:32 -04:00
Tingluo Huang
be325f26a6 support config with GHES url. (#393) 2020-03-30 14:48:02 -04:00
Josh Soref
dec260920f spelling: deprecate (#394) 2020-03-30 07:45:06 -04:00
eric sciple
b0a1294ef5 Fix API URL for GHES (#390) 2020-03-27 00:16:02 -04:00
Tingluo Huang
3d70ef2da1 update workflow schema file. (#388) 2020-03-26 23:01:17 -04:00
eric sciple
e23d68f6e2 add github.url and github.api_url for ghes alpha (#386) 2020-03-25 15:11:52 -04:00
eric sciple
dff1024cd3 cache actions on premises (#381) 2020-03-24 21:51:37 -04:00
TingluoHuang
9fc0686dc2 prepare 2.168.0 runner release. 2020-03-24 16:25:11 -04:00
David Kale
ab001a7004 Add expanded volumes strings to container mounts (#384) 2020-03-23 18:53:01 -04:00
Tingluo Huang
178a618e01 expose GITHUB_REPOSITORY_OWNER. (#378) 2020-03-20 13:02:07 -04:00
eric sciple
dfaf6e06ee switch hashFiles to extension function (#362) 2020-03-18 12:08:51 -04:00
Tingluo Huang
b0a71481f0 support defaults. (#369) 2020-03-17 23:40:37 -04:00
Tingluo Huang
88875ca1b0 set steps.<id>.outcome and steps.<id>.conclusion. (#372) 2020-03-17 21:18:42 -04:00
Tingluo Huang
a5eb8cb5c4 set CI=true when launch process in actions runner. (#374) 2020-03-17 19:58:12 -04:00
Josh Soref
41f4ca3414 grammar (#373) 2020-03-16 22:19:57 -04:00
eric sciple
aa9f5bf070 adr step output and conclusion (#274) 2020-03-16 14:56:07 -04:00
Tingluo Huang
2d6042421f add support for job outputs. (#365)
* add support for job outputs.
2020-03-14 17:54:58 -04:00
Tingluo Huang
c8890d0f3f Expose job name as $GITHUB_JOB (#366) 2020-03-12 20:47:25 -04:00
Konrad Pabjan
53fb6297cb Change problem matchers output to debug (#363) 2020-03-11 21:52:46 -04:00
Tingluo Huang
f9b5d626c5 load and print machine setup info from .setup_info (#364) 2020-03-11 10:36:56 -04:00
Tingluo Huang
d34afb54b1 ADR for expose runner's machine info in log. (#354)
* ADR for expose runner's machine info in log.

* rename

* Update docs/adrs/0354-runner-machine-info.md

Co-Authored-By: Hugo van Kemenade <hugovk@users.noreply.github.com>

* Update docs/adrs/0354-runner-machine-info.md

Co-Authored-By: Hugo van Kemenade <hugovk@users.noreply.github.com>

* Update docs/adrs/0354-runner-machine-info.md

Co-Authored-By: Hugo van Kemenade <hugovk@users.noreply.github.com>

* Update 0354-runner-machine-info.md

Co-authored-by: Hugo van Kemenade <hugovk@users.noreply.github.com>
2020-03-10 15:45:35 -04:00
Julio Barba
e291ebc58a Add runner auth documentation (#357)
Add runner authentication/authorization documentation.

This doc explains how auth is used at all phases of the runner lifetime (i.e. configuration, listener start, and workflow run), for both self-hosted and hosted runners.
2020-03-09 13:01:41 -04:00
Tingluo Huang
6bec1e3bb8 Switch to use token service instead of SPS for exchanging oauth token. (#325)
* Gracefully switch the runner to use Token Service instead of SPS.

* PR feedback.

* feedback2

* report error.
2020-03-04 21:40:58 -05:00
141 changed files with 8465 additions and 1981 deletions

35
.github/workflows/codeql.yml vendored Normal file
View File

@@ -0,0 +1,35 @@
name: "Code Scanning - Action"
on:
push:
schedule:
- cron: '0 0 * * 0'
jobs:
CodeQL-Build:
strategy:
fail-fast: false
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
- name: Manual build
run : |
./dev.sh layout Release linux-x64
working-directory: src
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@@ -0,0 +1,62 @@
# ADR 0274: Step outcome and conclusion
**Date**: 2020-01-13
**Status**: Accepted
## Context
This ADR proposes adding `steps.<id>.outcome` and `steps.<id>.conclusion` to the steps context.
This allows downstream a step to run based on whether a previous step succeeded or failed.
Reminder, currently the steps contains `steps.<id>.outputs`.
## Decision
For steps that have completed, populate `steps.<id>.outcome` and `steps.<id>.conclusion` with one of the following values:
- `success`
- `failure`
- `cancelled`
- `skipped`
When a continue-on-error step fails, the outcome will be `failure` even though the final conclusion is `success`.
### Example
```yaml
steps:
- id: experimental
continue-on-error: true
run: ./build.sh experimental
- if: ${{ steps.experimental.outcome == 'success' }}
run: ./publish.sh experimental
```
### Terminology
The runs API uses the term `conclusion`.
Therefore we use a different term `outcome` for the value prior to continue-on-error.
The following is a snippet from the runs API response payload:
```json
"steps": [
{
"name": "Set up job",
"status": "completed",
"conclusion": "success",
"number": 1,
"started_at": "2020-01-09T11:06:16.000-05:00",
"completed_at": "2020-01-09T11:06:18.000-05:00"
},
```
## Consequences
- Update runner
- Update [docs](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#steps-context)

View File

@@ -0,0 +1,35 @@
# ADR 354: Expose runner machine info
**Date**: 2020-03-02
**Status**: Pending
## Context
- Provide a mechanism in the runner to include extra information in `Set up job` step's log.
Ex: Include OS/Software info from Hosted image.
## Decision
The runner will look for a file `.setup_info` under the runner's root directory, The file can be a JSON with a simple schema.
```json
[
{
"group": "OS Detail",
"detail": "........"
},
{
"group": "Software Detail",
"detail": "........"
}
]
```
The runner will use `##[group]` and `##[endgroup]` to fold all detail info into an expandable group.
Both [virtual-environments](https://github.com/actions/virtual-environments) and self-hosted runners can use this mechanism to add extra logging info to the `Set up job` step's log.
## Consequences
1. Change the runner to best effort read/parse `.extra_setup_info` file under runner root directory.
2. [virtual-environments](https://github.com/actions/virtual-environments) generate the file during image generation.
3. Change MMS provisioner to properly copy the file to runner root directory at runtime.

View File

@@ -0,0 +1,75 @@
# ADR 361: Wrapper Action
**Date**: 2020-03-06
**Status**: Pending
## Context
In addition to action's regular execution, action author may wants their action has a chance to participate in:
- Job initialize
My Action will collect machine resource usage (CPU/RAM/Disk) during a workflow job execution, we need to start perf recorder at the begin of the job.
- Job cleanup
My Action will dirty local workspace or machine environment during execution, we need to cleanup these changes at the end of the job.
Ex: `actions/checkout@v2` will write `github.token` into local `.git/config` during execution, it has post job cleanup defined to undo the changes.
## Decision
### Add `pre` and `post` execution to action
Node Action Example:
```yaml
name: 'My action with pre'
description: 'My action with pre'
runs:
using: 'node12'
pre: 'setup.js'
pre-if: 'success()' // Optional
main: 'index.js'
post: 'cleanup.js'
post-if: 'success()' // Optional
```
Container Action Example:
```yaml
name: 'My action with pre'
description: 'My action with pre'
runs:
using: 'docker'
image: 'mycontainer:latest'
pre-entrypoint: 'setup.sh'
pre-if: 'success()' // Optional
entrypoint: 'entrypoint.sh'
post-entrypoint: 'cleanup.sh'
post-if: 'success()' // Optional
```
Both `pre` and `post` will has default `pre-if/post-if` sets to `always()`.
Setting `pre` to `always()` will make sure no matter what condition evaluate result the `main` gets at runtime, the `pre` has always run already.
`pre` executes in order of how the steps are defined.
`pre` will always be added to job steps list during job setup.
> Action referenced from local repository (`./my-action`) won't get `pre` setup correctly since the repository haven't checkout during job initialize.
> We can't use GitHub api to download the repository since there is a about 3 mins delay between `git push` and the new commit available to download using GitHub api.
`post` will be pushed into a `poststeps` stack lazily when the action's `pre` or `main` execution passed `if` condition check and about to run, you can't have an action that only contains a `post`, we will pop and run each `post` after all `pre` and `main` finished.
> Currently `post` works for both repository action (`org/repo@v1`) and local action (`./my-action`)
Valid action:
- only has `main`
- has `pre` and `main`
- has `main` and `post`
- has `pre`, `main` and `post`
Invalid action:
- only has `pre`
- only has `post`
- has `pre` and `post`
Potential downside of introducing `pre`:
- Extra magic wrt step order. Users should control the step order. Especially when we introduce templates.
- Eliminates the possibility to lazily download the action tarball, since `pre` always run by default, we have to download the tarball to check whether action defined a `pre`
- `pre` doesn't work with local action, we suggested customer use local action for testing their action changes, ex CI for their action, to avoid delay between `git push` and GitHub repo tarball download api.
- Condition on the `pre` can't be controlled using dynamic step outputs. `pre` executes too early.

View File

@@ -0,0 +1,56 @@
# ADR 0397: Support adding custom labels during runner config
**Date**: 2020-03-30
**Status**: Approved
## Context
Since configuring self-hosted runners is commonly automated via scripts, the labels need to be able to be created during configuration. The runner currently registers the built-in labels (os, arch) during registration but does not accept labels via command line args to extend the set registered.
See Issue: https://github.com/actions/runner/issues/262
This is another version of [ADR275](https://github.com/actions/runner/pull/275)
## Decision
This ADR proposes that we add a `--labels` option to `config`, which could be used to add custom additional labels to the configured runner.
For example, to add a single extra label the operator could run:
```bash
./config.sh --labels mylabel
```
> Note: the current runner command line parsing and envvar override algorithm only supports a single argument (key).
This would add the label `mylabel` to the runner, and enable users to select the runner in their workflow using this label:
```yaml
runs-on: [self-hosted, mylabel]
```
To add multiple labels the operator could run:
```bash
./config.sh --labels mylabel,anotherlabel
```
> Note: the current runner command line parsing and envvar override algorithm only supports a single argument (key).
This would add the label `mylabel` and `anotherlabel` to the runner, and enable users to select the runner in their workflow using this label:
```yaml
runs-on: [self-hosted, mylabel, anotherlabel]
```
It would not be possible to remove labels from an existing runner using `config.sh`, instead labels would have to be removed using the GitHub UI.
The labels argument will split on commas, trim and discard empty strings. That effectively means don't use commans in unattended config label names. Alternatively we could choose to escape commans but it's a nice to have.
## Replace
If an existing runner exists and the option to replace is chosen (interactively of via unattend as in this scenario), then the labels will be replaced / overwritten (not merged).
## Overriding built-in labels
Note that it is possible to register "built-in" hosted labels like `ubuntu-latest` and is not considered an error. This is an effective way for the org / runner admin to dictate by policy through registration that this set of runners will be used without having to edit all the workflow files now and in the future.
We will also not make other restrictions such as limiting explicitly adding os / arch labels and validating. We will assume that explicit labels were added for a reason and not restricting offers the most flexibility and future proofing / compat.
## Consequences
The ability to add custom labels to a self-hosted runner would enable most scenarios where job runner selection based on runner capabilities or characteristics are required.

57
docs/automate.md Normal file
View File

@@ -0,0 +1,57 @@
# Automate Configuring Self-Hosted Runners
## Export PAT
Before running any of these sample scripts, create a GitHub PAT and export it before running the script
```bash
export RUNNER_CFG_PAT=yourPAT
```
## Create running as a service
**Scenario**: Run on a machine or VM (not container) which automates:
- Resolving latest released runner
- Download and extract latest
- Acquire a registration token
- Configure the runner
- Run as a systemd (linux) or Launchd (osx) service
:point_right: [Sample script here](../scripts/create-latest-svc.sh) :point_left:
Run as a one-liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level)
```bash
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/create-latest-svc.sh | bash -s yourorg/yourrepo
```
## Uninstall running as service
**Scenario**: Run on a machine or VM (not container) which automates:
- Stops and uninstalls the systemd (linux) or Launchd (osx) service
- Acquires a removal token
- Removes the runner
:point_right: [Sample script here](../scripts/remove-svc.sh) :point_left:
Repo level one liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level)
```bash
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/remove-svc.sh | bash -s yourorg/yourrepo
```
### Delete an offline runner
**Scenario**: Deletes a registered runner that is offline:
- Ensures the runner is offline
- Resolves id from name
- Deletes the runner
:point_right: [Sample script here](../scripts/delete.sh) :point_left:
Repo level one-liner. NOTE: replace with yourorg/yourrepo (repo level) or just yourorg (org level) and replace runnername
```bash
curl -s https://raw.githubusercontent.com/actions/runner/automate/scripts/delete.sh | bash -s yourorg/yourrepo runnername
```

View File

@@ -23,7 +23,7 @@ An ADR is an Architectural Decision Record. This allows consensus on the direct
### Required Dev Dependencies
![Win](res/win_sm.png) Git for Windows [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
![Win](res/win_sm.png) ![*nix](res/linux_sm.png) Git for Windows and Linux [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
### To Build, Test, Layout
@@ -43,17 +43,31 @@ Sample developer flow:
```bash
git clone https://github.com/actions/runner
cd runner
cd ./src
./dev.(sh/cmd) layout # the runner that build from source is in {root}/_layout
./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout
<make code changes>
./dev.(sh/cmd) build # {root}/_layout will get updated
./dev.(sh/cmd) test # run all unit tests before git commit/push
```
View logs:
```bash
cd runner/_layout/_diag
ls
cat (Runner/Worker)_TIMESTAMP.log # view your log file
```
Run Runner:
```bash
cd runner/_layout
./run.sh # run your custom runner
```
### Editors
[Using Visual Studio Code](https://code.visualstudio.com/)
[Using Visual Studio 2019](https://www.visualstudio.com/vs/)
[Using Visual Studio](https://code.visualstudio.com/docs)
### Styling

View File

@@ -40,7 +40,7 @@ Debian based OS (Debian, Ubuntu, Linux Mint)
- libssl1.1, libssl1.0.2 or libssl1.0.0
- libicu63, libicu60, libicu57 or libicu55
Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7)
Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7)
- lttng-ust
- openssl-libs

View File

@@ -1,36 +1,29 @@
## Features
- Expose whether debug is on/off via RUNNER_DEBUG. (#253)
- Upload log on runner when worker get killed due to cancellation timeout. (#255)
- Update config.sh/cmd --help documentation (#282)
- Set http_proxy and related env vars for job/service containers (#304)
- Set both http_proxy and HTTP_PROXY env for runner/worker processes. (#298)
- Resolve action download info from server (#508, #515, #550)
- Print runner and machine name to log. (#539)
## Bugs
- Verify runner Windows service hash started successfully after configuration (#236)
- Detect source file path in L0 without using env. (#257)
- Handle escaped '%' in commands data section (#200)
- Allow container to be null/empty during matrix expansion (#266)
- Translate problem matcher file to host path (#272)
- Change hashFiles() expression function to use @actions/glob. (#268)
- Default post-job action's condition to always(). (#293)
- Support action.yaml file as action's entry file (#288)
- Trace javascript action exit code to debug instead of user logs (#290)
- Change prompt message when removing a runner to lines up with GitHub.com UI (#303)
- Include step.env as part of env context. (#300)
- Update Base64 Encoders to deal with suffixes (#284)
- Reduce input validation warnings (#506)
- Fix null ref exception in SecretMasker caused by `hashfiles` timeout. (#516)
- Add libicu66 to `./installDependencies.sh` for Ubuntu 20.04 (#535)
- Fix DataContract with Token service (#532)
- Skip search $PATH on command with fully qualified path (#526)
- Restore SELinux context on service file when SELinux is enabled (#525)
## Misc
- Move .sln file under ./src (#238)
- Treat warnings as errors during compile (#249)
- Remove SPS/Token migration code. Remove GHES url manipulate code. (#513)
- Add sub-step for developer flow for clarity (#523)
- Update Links and Language to Git + VSCode (#522)
- Update runner configuration exception message (#540)
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows
```
// Create a folder under the drive root
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
The following snipped needs to be run on `powershell`:
``` powershell
# Create a folder under the drive root
mkdir \actions-runner ; cd \actions-runner
// Download the latest runner package
# Download the latest runner package
Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-win-x64-<RUNNER_VERSION>.zip -OutFile actions-runner-win-x64-<RUNNER_VERSION>.zip
// Extract the installer
# Extract the installer
Add-Type -AssemblyName System.IO.Compression.FileSystem ;
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
```
@@ -38,44 +31,44 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
## OSX
``` bash
// Create a folder
# Create a folder
mkdir actions-runner && cd actions-runner
// Download the latest runner package
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
// Extract the installer
# Extract the installer
tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
```
## Linux x64
``` bash
// Create a folder
# Create a folder
mkdir actions-runner && cd actions-runner
// Download the latest runner package
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
// Extract the installer
# Extract the installer
tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
```
## Linux arm64 (Pre-release)
``` bash
// Create a folder
# Create a folder
mkdir actions-runner && cd actions-runner
// Download the latest runner package
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
// Extract the installer
# Extract the installer
tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
```
## Linux arm (Pre-release)
``` bash
// Create a folder
# Create a folder
mkdir actions-runner && cd actions-runner
// Download the latest runner package
# Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
// Extract the installer
# Extract the installer
tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
```

View File

@@ -1 +1 @@
2.164.0
<Update to ./src/runnerversion when creating release>

4
scripts/README.md Normal file
View File

@@ -0,0 +1,4 @@
# Sample scripts for self-hosted runners
Here are some examples to work from if you'd like to automate your use of self-hosted runners.
See the docs [here](../docs/automate.md).

147
scripts/create-latest-svc.sh Executable file
View File

@@ -0,0 +1,147 @@
#/bin/bash
set -e
#
# Downloads latest releases (not pre-release) runner
# Configures as a service
#
# Examples:
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myuser/myrepo my.ghe.deployment.net
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myorg my.ghe.deployment.net
#
# Usage:
# export RUNNER_CFG_PAT=<yourPAT>
# ./create-latest-svc scope [ghe_domain] [name] [user]
#
# scope required repo (:owner/:repo) or org (:organization)
# ghe_domain optional the fully qualified domain name of your GitHub Enterprise Server deployment
# name optional defaults to hostname
# user optional user svc will run as. defaults to current
#
# Notes:
# PATS over envvars are more secure
# Should be used on VMs and not containers
# Works on OSX and Linux
# Assumes x64 arch
#
runner_scope=${1}
ghe_hostname=${2}
runner_name=${3:-$(hostname)}
svc_user=${4:-$USER}
echo "Configuring runner @ ${runner_scope}"
sudo echo
#---------------------------------------
# Validate Environment
#---------------------------------------
runner_plat=linux
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
function fatal()
{
echo "error: $1" >&2
exit 1
}
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
# bail early if there's already a runner there. also sudo early
if [ -d ./runner ]; then
fatal "Runner already exists. Use a different directory or delete ./runner"
fi
sudo -u ${svc_user} mkdir runner
# TODO: validate not in a container
# TODO: validate systemd or osx svc installer
#--------------------------------------
# Get a config token
#--------------------------------------
echo
echo "Generating a registration token..."
base_api_url="https://api.github.com"
if [ -n "${ghe_hostname}" ]; then
base_api_url="https://${ghe_hostname}/api/v3"
fi
# if the scope has a slash, it's a repo runner
orgs_or_repos="orgs"
if [[ "$runner_scope" == *\/* ]]; then
orgs_or_repos="repos"
fi
export RUNNER_TOKEN=$(curl -s -X POST ${base_api_url}/${orgs_or_repos}/${runner_scope}/actions/runners/registration-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token')
if [ "null" == "$RUNNER_TOKEN" -o -z "$RUNNER_TOKEN" ]; then fatal "Failed to get a token"; fi
#---------------------------------------
# Download latest released and extract
#---------------------------------------
echo
echo "Downloading latest runner ..."
# For the GHES Alpha, download the runner from github.com
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
latest_version=$(echo ${latest_version_label:1})
runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz"
if [ -f "${runner_file}" ]; then
echo "${runner_file} exists. skipping download."
else
runner_url="https://github.com/actions/runner/releases/download/${latest_version_label}/${runner_file}"
echo "Downloading ${latest_version_label} for ${runner_plat} ..."
echo $runner_url
curl -O -L ${runner_url}
fi
ls -la *.tar.gz
#---------------------------------------------------
# extract to runner directory in this directory
#---------------------------------------------------
echo
echo "Extracting ${runner_file} to ./runner"
tar xzf "./${runner_file}" -C runner
# export of pass
sudo chown -R $svc_user ./runner
pushd ./runner
#---------------------------------------
# Unattend config
#---------------------------------------
runner_url="https://github.com/${runner_scope}"
if [ -n "${ghe_hostname}" ]; then
runner_url="https://${ghe_hostname}/${runner_scope}"
fi
echo
echo "Configuring ${runner_name} @ $runner_url"
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name"
sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNER_TOKEN --name $runner_name
#---------------------------------------
# Configuring as a service
#---------------------------------------
echo
echo "Configuring as a service ..."
prefix=""
if [ "${runner_plat}" == "linux" ]; then
prefix="sudo "
fi
${prefix}./svc.sh install ${svc_user}
${prefix}./svc.sh start

83
scripts/delete.sh Executable file
View File

@@ -0,0 +1,83 @@
#/bin/bash
set -e
#
# Force deletes a runner from the service
# The caller should have already ensured the runner is gone and/or stopped
#
# Examples:
# RUNNER_CFG_PAT=<yourPAT> ./delete.sh myuser/myrepo myname
# RUNNER_CFG_PAT=<yourPAT> ./delete.sh myorg
#
# Usage:
# export RUNNER_CFG_PAT=<yourPAT>
# ./delete.sh scope name
#
# scope required repo (:owner/:repo) or org (:organization)
# name optional defaults to hostname. name to delete
#
# Notes:
# PATS over envvars are more secure
# Works on OSX and Linux
# Assumes x64 arch
#
runner_scope=${1}
runner_name=${2}
echo "Deleting runner ${runner_name} @ ${runner_scope}"
function fatal()
{
echo "error: $1" >&2
exit 1
}
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
if [ -z "${runner_name}" ]; then fatal "supply name as argument 2"; fi
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
base_api_url="https://api.github.com/orgs"
if [[ "$runner_scope" == *\/* ]]; then
base_api_url="https://api.github.com/repos"
fi
#--------------------------------------
# Ensure offline
#--------------------------------------
runner_status=$(curl -s -X GET ${base_api_url}/${runner_scope}/actions/runners?per_page=100 -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" \
| jq -M -j ".runners | .[] | [select(.name == \"${runner_name}\")] | .[0].status")
if [ -z "${runner_status}" ]; then
fatal "Could not find runner with name ${runner_name}"
fi
echo "Status: ${runner_status}"
if [ "${runner_status}" != "offline" ]; then
fatal "Runner should be offline before removing"
fi
#--------------------------------------
# Get id of runner to remove
#--------------------------------------
runner_id=$(curl -s -X GET ${base_api_url}/${runner_scope}/actions/runners?per_page=100 -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" \
| jq -M -j ".runners | .[] | [select(.name == \"${runner_name}\")] | .[0].id")
if [ -z "${runner_id}" ]; then
fatal "Could not find runner with name ${runner_name}"
fi
echo "Removing id ${runner_id}"
#--------------------------------------
# Remove the runner
#--------------------------------------
curl -s -X DELETE ${base_api_url}/${runner_scope}/actions/runners/${runner_id} -H "authorization: token ${RUNNER_CFG_PAT}"
echo "Done."

76
scripts/remove-svc.sh Executable file
View File

@@ -0,0 +1,76 @@
#/bin/bash
set -e
#
# Removes a runner running as a service
# Must be run on the machine where the service is run
#
# Examples:
# RUNNER_CFG_PAT=<yourPAT> ./remove-svc.sh myuser/myrepo
# RUNNER_CFG_PAT=<yourPAT> ./remove-svc.sh myorg
#
# Usage:
# export RUNNER_CFG_PAT=<yourPAT>
# ./remove-svc scope name
#
# scope required repo (:owner/:repo) or org (:organization)
# name optional defaults to hostname. name to uninstall and remove
#
# Notes:
# PATS over envvars are more secure
# Should be used on VMs and not containers
# Works on OSX and Linux
# Assumes x64 arch
#
runner_scope=${1}
runner_name=${2:-$(hostname)}
echo "Uninstalling runner ${runner_name} @ ${runner_scope}"
sudo echo
function fatal()
{
echo "error: $1" >&2
exit 1
}
if [ -z "${runner_scope}" ]; then fatal "supply scope as argument 1"; fi
if [ -z "${RUNNER_CFG_PAT}" ]; then fatal "RUNNER_CFG_PAT must be set before calling"; fi
which curl || fatal "curl required. Please install in PATH with apt-get, brew, etc"
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
runner_plat=linux
[ ! -z "$(which sw_vers)" ] && runner_plat=osx;
#--------------------------------------
# Get a remove token
#--------------------------------------
echo
echo "Generating a removal token..."
# if the scope has a slash, it's an repo runner
base_api_url="https://api.github.com/orgs"
if [[ "$runner_scope" == *\/* ]]; then
base_api_url="https://api.github.com/repos"
fi
export REMOVE_TOKEN=$(curl -s -X POST ${base_api_url}/${runner_scope}/actions/runners/remove-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token')
if [ -z "$REMOVE_TOKEN" ]; then fatal "Failed to get a token"; fi
#---------------------------------------
# Stop and uninstall the service
#---------------------------------------
echo
echo "Uninstall the service ..."
pushd ./runner
prefix=""
if [ "${runner_plat}" == "linux" ]; then
prefix="sudo "
fi
${prefix}./svc.sh stop
${prefix}./svc.sh uninstall
${prefix}./config.sh remove --token $REMOVE_TOKEN

View File

@@ -154,7 +154,16 @@ function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [in
function Get-Machine-Architecture() {
Say-Invocation $MyInvocation
# possible values: amd64, x64, x86, arm64, arm
# On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems.
# To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432.
# PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE.
# Possible values: amd64, x64, x86, arm64, arm
if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null )
{
return $ENV:PROCESSOR_ARCHITEW6432
}
return $ENV:PROCESSOR_ARCHITECTURE
}
@@ -684,3 +693,196 @@ Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath
Say "Installation finished"
exit 0
# SIG # Begin signature block
# MIIjhwYJKoZIhvcNAQcCoIIjeDCCI3QCAQExDzANBglghkgBZQMEAgEFADB5Bgor
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCAiKYSY4KtkeThH
# d5M1aXqv1K0/pff07QwfUbYZ/qX5LqCCDYUwggYDMIID66ADAgECAhMzAAABiK9S
# 1rmSbej5AAAAAAGIMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy
# b3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNpZ25p
# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ4WhcNMjEwMzAzMTgzOTQ4WjB0MQsw
# CQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9u
# ZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMR4wHAYDVQQDExVNaWNy
# b3NvZnQgQ29ycG9yYXRpb24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
# AQCSCNryE+Cewy2m4t/a74wZ7C9YTwv1PyC4BvM/kSWPNs8n0RTe+FvYfU+E9uf0
# t7nYlAzHjK+plif2BhD+NgdhIUQ8sVwWO39tjvQRHjP2//vSvIfmmkRoML1Ihnjs
# 9kQiZQzYRDYYRp9xSQYmRwQjk5hl8/U7RgOiQDitVHaU7BT1MI92lfZRuIIDDYBd
# vXtbclYJMVOwqZtv0O9zQCret6R+fRSGaDNfEEpcILL+D7RV3M4uaJE4Ta6KAOdv
# V+MVaJp1YXFTZPKtpjHO6d9pHQPZiG7NdC6QbnRGmsa48uNQrb6AfmLKDI1Lp31W
# MogTaX5tZf+CZT9PSuvjOCLNAgMBAAGjggGCMIIBfjAfBgNVHSUEGDAWBgorBgEE
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUj9RJL9zNrPcL10RZdMQIXZN7MG8w
# VAYDVR0RBE0wS6RJMEcxLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
# dGlvbnMgTGltaXRlZDEWMBQGA1UEBRMNMjMwMDEyKzQ1ODM4NjAfBgNVHSMEGDAW
# gBRIbmTlUAXTgqoXNzcitW2oynUClTBUBgNVHR8ETTBLMEmgR6BFhkNodHRwOi8v
# d3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NybC9NaWNDb2RTaWdQQ0EyMDExXzIw
# MTEtMDctMDguY3JsMGEGCCsGAQUFBwEBBFUwUzBRBggrBgEFBQcwAoZFaHR0cDov
# L3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9jZXJ0cy9NaWNDb2RTaWdQQ0EyMDEx
# XzIwMTEtMDctMDguY3J0MAwGA1UdEwEB/wQCMAAwDQYJKoZIhvcNAQELBQADggIB
# ACnXo8hjp7FeT+H6iQlV3CcGnkSbFvIpKYafgzYCFo3UHY1VHYJVb5jHEO8oG26Q
# qBELmak6MTI+ra3WKMTGhE1sEIlowTcp4IAs8a5wpCh6Vf4Z/bAtIppP3p3gXk2X
# 8UXTc+WxjQYsDkFiSzo/OBa5hkdW1g4EpO43l9mjToBdqEPtIXsZ7Hi1/6y4gK0P
# mMiwG8LMpSn0n/oSHGjrUNBgHJPxgs63Slf58QGBznuXiRaXmfTUDdrvhRocdxIM
# i8nXQwWACMiQzJSRzBP5S2wUq7nMAqjaTbeXhJqD2SFVHdUYlKruvtPSwbnqSRWT
# GI8s4FEXt+TL3w5JnwVZmZkUFoioQDMMjFyaKurdJ6pnzbr1h6QW0R97fWc8xEIz
# LIOiU2rjwWAtlQqFO8KNiykjYGyEf5LyAJKAO+rJd9fsYR+VBauIEQoYmjnUbTXM
# SY2Lf5KMluWlDOGVh8q6XjmBccpaT+8tCfxpaVYPi1ncnwTwaPQvVq8RjWDRB7Pa
# 8ruHgj2HJFi69+hcq7mWx5nTUtzzFa7RSZfE5a1a5AuBmGNRr7f8cNfa01+tiWjV
# Kk1a+gJUBSP0sIxecFbVSXTZ7bqeal45XSDIisZBkWb+83TbXdTGMDSUFKTAdtC+
# r35GfsN8QVy59Hb5ZYzAXczhgRmk7NyE6jD0Ym5TKiW5MIIHejCCBWKgAwIBAgIK
# YQ6Q0gAAAAAAAzANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMCVVMxEzARBgNV
# BAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jv
# c29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJvb3QgQ2VydGlm
# aWNhdGUgQXV0aG9yaXR5IDIwMTEwHhcNMTEwNzA4MjA1OTA5WhcNMjYwNzA4MjEw
# OTA5WjB+MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UE
# BxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSgwJgYD
# VQQDEx9NaWNyb3NvZnQgQ29kZSBTaWduaW5nIFBDQSAyMDExMIICIjANBgkqhkiG
# 9w0BAQEFAAOCAg8AMIICCgKCAgEAq/D6chAcLq3YbqqCEE00uvK2WCGfQhsqa+la
# UKq4BjgaBEm6f8MMHt03a8YS2AvwOMKZBrDIOdUBFDFC04kNeWSHfpRgJGyvnkmc
# 6Whe0t+bU7IKLMOv2akrrnoJr9eWWcpgGgXpZnboMlImEi/nqwhQz7NEt13YxC4D
# dato88tt8zpcoRb0RrrgOGSsbmQ1eKagYw8t00CT+OPeBw3VXHmlSSnnDb6gE3e+
# lD3v++MrWhAfTVYoonpy4BI6t0le2O3tQ5GD2Xuye4Yb2T6xjF3oiU+EGvKhL1nk
# kDstrjNYxbc+/jLTswM9sbKvkjh+0p2ALPVOVpEhNSXDOW5kf1O6nA+tGSOEy/S6
# A4aN91/w0FK/jJSHvMAhdCVfGCi2zCcoOCWYOUo2z3yxkq4cI6epZuxhH2rhKEmd
# X4jiJV3TIUs+UsS1Vz8kA/DRelsv1SPjcF0PUUZ3s/gA4bysAoJf28AVs70b1FVL
# 5zmhD+kjSbwYuER8ReTBw3J64HLnJN+/RpnF78IcV9uDjexNSTCnq47f7Fufr/zd
# sGbiwZeBe+3W7UvnSSmnEyimp31ngOaKYnhfsi+E11ecXL93KCjx7W3DKI8sj0A3
# T8HhhUSJxAlMxdSlQy90lfdu+HggWCwTXWCVmj5PM4TasIgX3p5O9JawvEagbJjS
# 4NaIjAsCAwEAAaOCAe0wggHpMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBRI
# bmTlUAXTgqoXNzcitW2oynUClTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTAL
# BgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBRyLToCMZBD
# uRQFTuHqp8cx0SOJNDBaBgNVHR8EUzBRME+gTaBLhklodHRwOi8vY3JsLm1pY3Jv
# c29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
# MDNfMjIuY3JsMF4GCCsGAQUFBwEBBFIwUDBOBggrBgEFBQcwAoZCaHR0cDovL3d3
# dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
# MDNfMjIuY3J0MIGfBgNVHSAEgZcwgZQwgZEGCSsGAQQBgjcuAzCBgzA/BggrBgEF
# BQcCARYzaHR0cDovL3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9kb2NzL3ByaW1h
# cnljcHMuaHRtMEAGCCsGAQUFBwICMDQeMiAdAEwAZQBnAGEAbABfAHAAbwBsAGkA
# YwB5AF8AcwB0AGEAdABlAG0AZQBuAHQALiAdMA0GCSqGSIb3DQEBCwUAA4ICAQBn
# 8oalmOBUeRou09h0ZyKbC5YR4WOSmUKWfdJ5DJDBZV8uLD74w3LRbYP+vj/oCso7
# v0epo/Np22O/IjWll11lhJB9i0ZQVdgMknzSGksc8zxCi1LQsP1r4z4HLimb5j0b
# pdS1HXeUOeLpZMlEPXh6I/MTfaaQdION9MsmAkYqwooQu6SpBQyb7Wj6aC6VoCo/
# KmtYSWMfCWluWpiW5IP0wI/zRive/DvQvTXvbiWu5a8n7dDd8w6vmSiXmE0OPQvy
# CInWH8MyGOLwxS3OW560STkKxgrCxq2u5bLZ2xWIUUVYODJxJxp/sfQn+N4sOiBp
# mLJZiWhub6e3dMNABQamASooPoI/E01mC8CzTfXhj38cbxV9Rad25UAqZaPDXVJi
# hsMdYzaXht/a8/jyFqGaJ+HNpZfQ7l1jQeNbB5yHPgZ3BtEGsXUfFL5hYbXw3MYb
# BL7fQccOKO7eZS/sl/ahXJbYANahRr1Z85elCUtIEJmAH9AAKcWxm6U/RXceNcbS
# oqKfenoi+kiVH6v7RyOA9Z74v2u3S5fi63V4GuzqN5l5GEv/1rMjaHXmr/r8i+sL
# gOppO6/8MO0ETI7f33VtY5E90Z1WTk+/gFcioXgRMiF670EKsT/7qMykXcGhiJtX
# cVZOSEXAQsmbdlsKgEhr/Xmfwb1tbWrJUnMTDXpQzTGCFVgwghVUAgEBMIGVMH4x
# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01p
# Y3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTECEzMAAAGIr1LWuZJt6PkAAAAA
# AYgwDQYJYIZIAWUDBAIBBQCgga4wGQYJKoZIhvcNAQkDMQwGCisGAQQBgjcCAQQw
# HAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUwLwYJKoZIhvcNAQkEMSIEIFxZ
# Yezh3liQqiGQuXNa+zYfoSIbLqOpdEn2ZKskBkisMEIGCisGAQQBgjcCAQwxNDAy
# oBSAEgBNAGkAYwByAG8AcwBvAGYAdKEagBhodHRwOi8vd3d3Lm1pY3Jvc29mdC5j
# b20wDQYJKoZIhvcNAQEBBQAEggEAjLUrwCXJCPHZulZuKAQSX+MfnIRFAhlN7ru2
# 6H8rudvhkWgqMISkLb9gFDPR5FhR4sqdYgKW4P0ERao9ypCGi1FWDLqygC2XBbHj
# NEQHBxHJs5SMsMAXNSIcYHqVAvhF3nXoseaNBkhOTrkQ1FS/fW7AfDGRbsiiESzv
# lebf92shZylBFKOsKQLAL0mF/B7xrxHJIj5dgQoD1phATRNHOEQj3jgmkidFWowV
# 4r8MzbxRhAEORbnJexlUoDQJQH3YwxuUyXkTvrYMTKSbGJLlwRaZQbrcBU0k4gCH
# y8Sci+p9Rq+aOTzLCoNrZyh9E7OdwVDm1FJAtY30bV50T2WSFKGCEuIwghLeBgor
# BgEEAYI3AwMBMYISzjCCEsoGCSqGSIb3DQEHAqCCErswghK3AgEDMQ8wDQYJYIZI
# AWUDBAIBBQAwggFRBgsqhkiG9w0BCRABBKCCAUAEggE8MIIBOAIBAQYKKwYBBAGE
# WQoDATAxMA0GCWCGSAFlAwQCAQUABCD7JNcBBSfhlKPL1tN3CEKRKJuT/dZ8RO9K
# orYLXJeLTwIGXvN89YD7GBMyMDIwMDcwMTE0MTYyMC40MDVaMASAAgH0oIHQpIHN
# MIHKMQswCQYDVQQGEwJVUzELMAkGA1UECBMCV0ExEDAOBgNVBAcTB1JlZG1vbmQx
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEtMCsGA1UECxMkTWljcm9z
# b2Z0IElyZWxhbmQgT3BlcmF0aW9ucyBMaW1pdGVkMSYwJAYDVQQLEx1UaGFsZXMg
# VFNTIEVTTjoxNzlFLTRCQjAtODI0NjElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUt
# U3RhbXAgU2VydmljZaCCDjkwggTxMIID2aADAgECAhMzAAABDKp4btzMQkzBAAAA
# AAEMMA0GCSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNo
# aW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29y
# cG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEw
# MB4XDTE5MTAyMzIzMTkxNloXDTIxMDEyMTIzMTkxNlowgcoxCzAJBgNVBAYTAlVT
# MQswCQYDVQQIEwJXQTEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9z
# b2Z0IENvcnBvcmF0aW9uMS0wKwYDVQQLEyRNaWNyb3NvZnQgSXJlbGFuZCBPcGVy
# YXRpb25zIExpbWl0ZWQxJjAkBgNVBAsTHVRoYWxlcyBUU1MgRVNOOjE3OUUtNEJC
# MC04MjQ2MSUwIwYDVQQDExxNaWNyb3NvZnQgVGltZS1TdGFtcCBTZXJ2aWNlMIIB
# IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq5011+XqVJmQKtiw39igeEMv
# CLcZ1forbmxsDkpnCN1SrThKI+n2Pr3zqTzJVgdJFCoKm1ks1gtRJ7HaL6tDkrOw
# 8XJmfJaxyQAluCQ+e40NI+A4w+u59Gy89AVY5lJNrmCva6gozfg1kxw6abV5WWr+
# PjEpNCshO4hxv3UqgMcCKnT2YVSZzF1Gy7APub1fY0P1vNEuOFKrNCEEvWIKRrqs
# eyBB73G8KD2yw6jfz0VKxNSRAdhJV/ghOyrDt5a+L6C3m1rpr8sqiof3iohv3ANI
# gNqw6ex+4+G+B7JMbIHbGpPdebedL6ePbuBCnbgJoDn340k0aw6ij21GvvUnkQID
# AQABo4IBGzCCARcwHQYDVR0OBBYEFAlCOq9DDIa0A0oqgKtM5vjuZeK+MB8GA1Ud
# IwQYMBaAFNVjOlyKMZDzQ3t8RhvFM2hahW1VMFYGA1UdHwRPME0wS6BJoEeGRWh0
# dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kvY3JsL3Byb2R1Y3RzL01pY1RpbVN0
# YVBDQV8yMDEwLTA3LTAxLmNybDBaBggrBgEFBQcBAQROMEwwSgYIKwYBBQUHMAKG
# Pmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9wa2kvY2VydHMvTWljVGltU3RhUENB
# XzIwMTAtMDctMDEuY3J0MAwGA1UdEwEB/wQCMAAwEwYDVR0lBAwwCgYIKwYBBQUH
# AwgwDQYJKoZIhvcNAQELBQADggEBAET3xBg/IZ9zdOfwbDGK7cK3qKYt/qUOlbRB
# zgeNjb32K86nGeRGkBee10dVOEGWUw6KtBeWh1LQ70b64/tLtiLcsf9JzaAyDYb1
# sRmMi5fjRZ753TquaT8V7NJ7RfEuYfvZlubfQD0MVbU4tzsdZdYuxE37V2J9pN89
# j7GoFNtAnSnCn1MRxENAILgt9XzeQzTEDhFYW0N2DNphTkRPXGjpDmwi6WtkJ5fv
# 0iTyB4dwEC+/ed0lGbFLcytJoMwfTNMdH6gcnHlMzsniornGFZa5PPiV78XoZ9Fe
# upKo8ZKNGhLLLB5GTtqfHex5no3ioVSq+NthvhX0I/V+iXJsopowggZxMIIEWaAD
# AgECAgphCYEqAAAAAAACMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzET
# MBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMV
# TWljcm9zb2Z0IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBD
# ZXJ0aWZpY2F0ZSBBdXRob3JpdHkgMjAxMDAeFw0xMDA3MDEyMTM2NTVaFw0yNTA3
# MDEyMTQ2NTVaMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw
# DgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24x
# JjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwMIIBIjANBgkq
# hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqR0NvHcRijog7PwTl/X6f2mUa3RUENWl
# CgCChfvtfGhLLF/Fw+Vhwna3PmYrW/AVUycEMR9BGxqVHc4JE458YTBZsTBED/Fg
# iIRUQwzXTbg4CLNC3ZOs1nMwVyaCo0UN0Or1R4HNvyRgMlhgRvJYR4YyhB50YWeR
# X4FUsc+TTJLBxKZd0WETbijGGvmGgLvfYfxGwScdJGcSchohiq9LZIlQYrFd/Xcf
# PfBXday9ikJNQFHRD5wGPmd/9WbAA5ZEfu/QS/1u5ZrKsajyeioKMfDaTgaRtogI
# Neh4HLDpmc085y9Euqf03GS9pAHBIAmTeM38vMDJRF1eFpwBBU8iTQIDAQABo4IB
# 5jCCAeIwEAYJKwYBBAGCNxUBBAMCAQAwHQYDVR0OBBYEFNVjOlyKMZDzQ3t8RhvF
# M2hahW1VMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1UdDwQEAwIBhjAP
# BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNX2VsuP6KJcYmjRPZSQW9fOmhjE
# MFYGA1UdHwRPME0wS6BJoEeGRWh0dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kv
# Y3JsL3Byb2R1Y3RzL01pY1Jvb0NlckF1dF8yMDEwLTA2LTIzLmNybDBaBggrBgEF
# BQcBAQROMEwwSgYIKwYBBQUHMAKGPmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9w
# a2kvY2VydHMvTWljUm9vQ2VyQXV0XzIwMTAtMDYtMjMuY3J0MIGgBgNVHSABAf8E
# gZUwgZIwgY8GCSsGAQQBgjcuAzCBgTA9BggrBgEFBQcCARYxaHR0cDovL3d3dy5t
# aWNyb3NvZnQuY29tL1BLSS9kb2NzL0NQUy9kZWZhdWx0Lmh0bTBABggrBgEFBQcC
# AjA0HjIgHQBMAGUAZwBhAGwAXwBQAG8AbABpAGMAeQBfAFMAdABhAHQAZQBtAGUA
# bgB0AC4gHTANBgkqhkiG9w0BAQsFAAOCAgEAB+aIUQ3ixuCYP4FxAz2do6Ehb7Pr
# psz1Mb7PBeKp/vpXbRkws8LFZslq3/Xn8Hi9x6ieJeP5vO1rVFcIK1GCRBL7uVOM
# zPRgEop2zEBAQZvcXBf/XPleFzWYJFZLdO9CEMivv3/Gf/I3fVo/HPKZeUqRUgCv
# OA8X9S95gWXZqbVr5MfO9sp6AG9LMEQkIjzP7QOllo9ZKby2/QThcJ8ySif9Va8v
# /rbljjO7Yl+a21dA6fHOmWaQjP9qYn/dxUoLkSbiOewZSnFjnXshbcOco6I8+n99
# lmqQeKZt0uGc+R38ONiU9MalCpaGpL2eGq4EQoO4tYCbIjggtSXlZOz39L9+Y1kl
# D3ouOVd2onGqBooPiRa6YacRy5rYDkeagMXQzafQ732D8OE7cQnfXXSYIghh2rBQ
# Hm+98eEA3+cxB6STOvdlR3jo+KhIq/fecn5ha293qYHLpwmsObvsxsvYgrRyzR30
# uIUBHoD7G4kqVDmyW9rIDVWZeodzOwjmmC3qjeAzLhIp9cAvVCch98isTtoouLGp
# 25ayp0Kiyc8ZQU3ghvkqmqMRZjDTu3QyS99je/WZii8bxyGvWbWu3EQ8l1Bx16HS
# xVXjad5XwdHeMMD9zOZN+w2/XU/pnR4ZOC+8z1gFLu8NoFA12u8JJxzVs341Hgi6
# 2jbb01+P3nSISRKhggLLMIICNAIBATCB+KGB0KSBzTCByjELMAkGA1UEBhMCVVMx
# CzAJBgNVBAgTAldBMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
# ZnQgQ29ycG9yYXRpb24xLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
# dGlvbnMgTGltaXRlZDEmMCQGA1UECxMdVGhhbGVzIFRTUyBFU046MTc5RS00QkIw
# LTgyNDYxJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1wIFNlcnZpY2WiIwoB
# ATAHBgUrDgMCGgMVAMsg9FQ9pgPLXI2Ld5z7xDS0QAZ9oIGDMIGApH4wfDELMAkG
# A1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQx
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9z
# b2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAwDQYJKoZIhvcNAQEFBQACBQDipo0MMCIY
# DzIwMjAwNzAxMTIxODIwWhgPMjAyMDA3MDIxMjE4MjBaMHQwOgYKKwYBBAGEWQoE
# ATEsMCowCgIFAOKmjQwCAQAwBwIBAAICE70wBwIBAAICEeIwCgIFAOKn3owCAQAw
# NgYKKwYBBAGEWQoEAjEoMCYwDAYKKwYBBAGEWQoDAqAKMAgCAQACAwehIKEKMAgC
# AQACAwGGoDANBgkqhkiG9w0BAQUFAAOBgQCOPjlHOH8nYtgt2XnpKXenxPUR03ED
# xPBm8XR5Z1vIq53RU9jG6yYcYNTdK+q38SGZtu0W/SgagTfKCQhjhRakuv7rGSs2
# dlhx9LGCoc/q1vqmZpRSjkqWVcc/NzmldUWIWnLlV6rmLGoDmfCH5BcsiU6Eo6wU
# iUVwnnXoqsCaBzGCAw0wggMJAgEBMIGTMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQI
# EwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
# ZnQgQ29ycG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBD
# QSAyMDEwAhMzAAABDKp4btzMQkzBAAAAAAEMMA0GCWCGSAFlAwQCAQUAoIIBSjAa
# BgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwLwYJKoZIhvcNAQkEMSIEIDpwhjyu
# zgu3Kmxpnpz86ZlthBqEzG5vaEMOkYRyuFCaMIH6BgsqhkiG9w0BCRACLzGB6jCB
# 5zCB5DCBvQQgg5AWKX7M1+m2//+V7qmRvt1K/ww5Muu8XzGJBqygVCkwgZgwgYCk
# fjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYDVQQD
# Ex1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMAITMwAAAQyqeG7czEJMwQAA
# AAABDDAiBCD11urvv5vgo4gFVQ2NMVrzgxT87Yuiq16YdswYbaYeITANBgkqhkiG
# 9w0BAQsFAASCAQAi3q8hwcT2ft4b2EleaiyZxOImV/cKusmth1dtCh5/Jb0GbOld
# f5cSalrjf42MNPodWAtgmWozkYrQF6HxnsOiYiamfRA8E3E7xyRMy7AFfAhjcwMi
# xaW4Iye6E1Ec6LtULANxfDtG/KIdCWdZxKqOezL3nzFNQWmm1mXPV+UnKpnJkA3E
# DsQOUWk8J6ojDurhrP536WI+3arg8PcnppHBLd/xNKYdlsTb+6qndgzKXkDDt1CV
# 4zCyuZ7bO8eyZAmNoSZz22k7vus9UjBz/CDhXylo20N43nr29rWPItUgH4uvOGQn
# t26Y/yjBaQImz32psrfJEMbQ7cl789s8WOx8
# SIG # End signature block

View File

@@ -172,7 +172,7 @@ get_current_os_name() {
return 0
elif [ "$uname" = "FreeBSD" ]; then
echo "freebsd"
return 0
return 0
elif [ "$uname" = "Linux" ]; then
local linux_platform_name
linux_platform_name="$(get_linux_platform_name)" || { echo "linux" && return 0 ; }
@@ -728,11 +728,12 @@ downloadcurl() {
# Append feed_credential as late as possible before calling curl to avoid logging feed_credential
remote_path="${remote_path}${feed_credential}"
local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
local failed=false
if [ -z "$out_path" ]; then
curl --retry 10 -sSL -f --create-dirs "$remote_path" || failed=true
curl $curl_options "$remote_path" || failed=true
else
curl --retry 10 -sSL -f --create-dirs -o "$out_path" "$remote_path" || failed=true
curl $curl_options -o "$out_path" "$remote_path" || failed=true
fi
if [ "$failed" = true ]; then
say_verbose "Curl download failed"
@@ -748,12 +749,12 @@ downloadwget() {
# Append feed_credential as late as possible before calling wget to avoid logging feed_credential
remote_path="${remote_path}${feed_credential}"
local wget_options="--tries 20 --waitretry 2 --connect-timeout 15 "
local failed=false
if [ -z "$out_path" ]; then
wget -q --tries 10 -O - "$remote_path" || failed=true
wget -q $wget_options -O - "$remote_path" || failed=true
else
wget --tries 10 -O "$out_path" "$remote_path" || failed=true
wget $wget_options -O "$out_path" "$remote_path" || failed=true
fi
if [ "$failed" = true ]; then
say_verbose "Wget download failed"

File diff suppressed because it is too large Load Diff

View File

@@ -27,7 +27,7 @@
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^2.8.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^5.16.0",
"eslint": "^6.8.0",
"eslint-plugin-github": "^2.0.0",
"prettier": "^1.19.1",
"typescript": "^3.6.4"

View File

@@ -1,6 +1,7 @@
#!/bin/bash
SVC_NAME="{{SvcNameVar}}"
SVC_NAME=${SVC_NAME// /_}
SVC_DESCRIPTION="{{SvcDescription}}"
user_id=`id -u`

View File

@@ -9,7 +9,7 @@ fi
# Determine OS type
# Debian based OS (Debian, Ubuntu, Linux Mint) has /etc/debian_version
# Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7) has /etc/redhat-release
# Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7) has /etc/redhat-release
# SUSE based OS (OpenSUSE, SUSE Enterprise) has ID_LIKE=suse in /etc/os-release
function print_errormessage()
@@ -70,8 +70,8 @@ then
exit 1
fi
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt install -y libicu66 || apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
if [ $? -ne 0 ]
then
echo "'apt' failed with exit code '$?'"
@@ -99,8 +99,8 @@ then
exit 1
fi
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt-get install -y libicu66 || apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
if [ $? -ne 0 ]
then
echo "'apt-get' failed with exit code '$?'"
@@ -116,12 +116,12 @@ then
elif [ -e /etc/redhat-release ]
then
echo "The current OS is Fedora based"
echo "--------Redhat Version--------"
echo "--Fedora/RHEL/CentOS Version--"
cat /etc/redhat-release
echo "------------------------------"
# use dnf on fedora
# use yum on centos and redhat
# use yum on centos and rhel
if [ -e /etc/fedora-release ]
then
command -v dnf
@@ -191,7 +191,7 @@ then
redhatRelease=$(</etc/redhat-release)
if [[ $redhatRelease == "CentOS release 6."* || $redhatRelease == "Red Hat Enterprise Linux Server release 6."* ]]
then
echo "The current OS is Red Hat Enterprise Linux 6 or Centos 6"
echo "The current OS is Red Hat Enterprise Linux 6 or CentOS 6"
# Install known dependencies, as a best effort.
# The remaining dependencies are covered by the GitHub doc that will be shown by `print_rhel6message`

View File

@@ -0,0 +1,13 @@
const { spawn } = require('child_process');
// argv[0] = node
// argv[1] = macos-run-invoker.js
var shell = process.argv[2];
var args = process.argv.slice(3);
console.log(`::debug::macos-run-invoker: ${shell}`);
console.log(`::debug::macos-run-invoker: ${JSON.stringify(args)}`);
var launch = spawn(shell, args, { stdio: 'inherit' });
launch.on('exit', function (code) {
if (code !== 0) {
process.exit(code);
}
});

View File

@@ -1,6 +1,7 @@
#!/bin/bash
SVC_NAME="{{SvcNameVar}}"
SVC_NAME=${SVC_NAME// /_}
SVC_DESCRIPTION="{{SvcDescription}}"
SVC_CMD=$1
@@ -62,12 +63,25 @@ function install()
sed "s/{{User}}/${run_as_user}/g; s/{{Description}}/$(echo ${SVC_DESCRIPTION} | sed -e 's/[\/&]/\\&/g')/g; s/{{RunnerRoot}}/$(echo ${RUNNER_ROOT} | sed -e 's/[\/&]/\\&/g')/g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
mv "${TEMP_PATH}" "${UNIT_PATH}" || failed "failed to copy unit file"
# Recent Fedora based Linux (CentOS/Redhat) has SELinux enabled by default
# We need to restore security context on the unit file we added otherwise SystemD have no access to it.
command -v getenforce > /dev/null
if [ $? -eq 0 ]
then
selinuxEnabled=$(getenforce)
if [[ $selinuxEnabled == "Enforcing" ]]
then
# SELinux is enabled, we will need to Restore SELinux Context for the service file
restorecon -r -v "${UNIT_PATH}" || failed "failed to restore SELinux context on ${UNIT_PATH}"
fi
fi
# unit file should not be executable and world writable
chmod 664 ${UNIT_PATH} || failed "failed to set permissions on ${UNIT_PATH}"
chmod 664 "${UNIT_PATH}" || failed "failed to set permissions on ${UNIT_PATH}"
systemctl daemon-reload || failed "failed to reload daemons"
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
cp ./bin/runsvc.sh ./runsvc.sh || failed "failed to copy runsvc.sh"
chown ${run_as_uid}:${run_as_gid} ./runsvc.sh || failed "failed to set owner for runsvc.sh"
chmod 755 ./runsvc.sh || failed "failed to set permission for runsvc.sh"

View File

@@ -15,6 +15,9 @@ namespace GitHub.Runner.Common
[DataContract]
public sealed class RunnerSettings
{
[DataMember(Name = "IsHostedServer", EmitDefaultValue = false)]
private bool? _isHostedServer;
[DataMember(EmitDefaultValue = false)]
public int AgentId { get; set; }
@@ -42,6 +45,21 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)]
public string MonitorSocketAddress { get; set; }
[IgnoreDataMember]
public bool IsHostedServer
{
get
{
// Old runners do not have this property. Hosted runners likely don't have this property either.
return _isHostedServer ?? true;
}
set
{
_isHostedServer = value;
}
}
/// <summary>
// Computed property for convenience. Can either return:
// 1. If runner was configured at the repo level, returns something like: "myorg/myrepo"
@@ -69,6 +87,15 @@ namespace GitHub.Runner.Common
return repoOrOrgName;
}
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (_isHostedServer.HasValue && _isHostedServer.Value)
{
_isHostedServer = null;
}
}
}
[ServiceLocator(Default = typeof(ConfigurationStore))]
@@ -78,10 +105,12 @@ namespace GitHub.Runner.Common
bool IsServiceConfigured();
bool HasCredentials();
CredentialData GetCredentials();
CredentialData GetMigratedCredentials();
RunnerSettings GetSettings();
void SaveCredential(CredentialData credential);
void SaveSettings(RunnerSettings settings);
void DeleteCredential();
void DeleteMigratedCredential();
void DeleteSettings();
}
@@ -90,9 +119,11 @@ namespace GitHub.Runner.Common
private string _binPath;
private string _configFilePath;
private string _credFilePath;
private string _migratedCredFilePath;
private string _serviceConfigFilePath;
private CredentialData _creds;
private CredentialData _migratedCreds;
private RunnerSettings _settings;
public override void Initialize(IHostContext hostContext)
@@ -114,6 +145,9 @@ namespace GitHub.Runner.Common
_credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials);
Trace.Info("CredFilePath: {0}", _credFilePath);
_migratedCredFilePath = hostContext.GetConfigFile(WellKnownConfigFile.MigratedCredentials);
Trace.Info("MigratedCredFilePath: {0}", _migratedCredFilePath);
_serviceConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Service);
Trace.Info("ServiceConfigFilePath: {0}", _serviceConfigFilePath);
}
@@ -123,7 +157,7 @@ namespace GitHub.Runner.Common
public bool HasCredentials()
{
Trace.Info("HasCredentials()");
bool credsStored = (new FileInfo(_credFilePath)).Exists;
bool credsStored = (new FileInfo(_credFilePath)).Exists || (new FileInfo(_migratedCredFilePath)).Exists;
Trace.Info("stored {0}", credsStored);
return credsStored;
}
@@ -154,6 +188,16 @@ namespace GitHub.Runner.Common
return _creds;
}
public CredentialData GetMigratedCredentials()
{
if (_migratedCreds == null && File.Exists(_migratedCredFilePath))
{
_migratedCreds = IOUtil.LoadObject<CredentialData>(_migratedCredFilePath);
}
return _migratedCreds;
}
public RunnerSettings GetSettings()
{
if (_settings == null)
@@ -206,6 +250,12 @@ namespace GitHub.Runner.Common
public void DeleteCredential()
{
IOUtil.Delete(_credFilePath, default(CancellationToken));
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
}
public void DeleteMigratedCredential()
{
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
}
public void DeleteSettings()

View File

@@ -19,11 +19,13 @@ namespace GitHub.Runner.Common
{
Runner,
Credentials,
MigratedCredentials,
RSACredentials,
Service,
CredentialStore,
Certificates,
Options,
SetupInfo,
}
public static class Constants
@@ -85,6 +87,7 @@ namespace GitHub.Runner.Common
public static class Args
{
public static readonly string Auth = "auth";
public static readonly string Labels = "labels";
public static readonly string MonitorSocketAddress = "monitorsocketaddress";
public static readonly string Name = "name";
public static readonly string Pool = "pool";
@@ -134,6 +137,9 @@ namespace GitHub.Runner.Common
public const int RunnerUpdating = 3;
public const int RunOnceRunnerUpdating = 4;
}
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
public static readonly string WorkerCrash = "WORKER_CRASH";
}
public static class RunnerEvent

View File

@@ -1,19 +1,18 @@
using GitHub.Runner.Common.Util;
using System;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.Tracing;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Reflection;
using System.Runtime.Loader;
using System.Threading;
using System.Threading.Tasks;
using System.Diagnostics;
using System.Net.Http;
using System.Diagnostics.Tracing;
using GitHub.DistributedTask.Logging;
using System.Net.Http.Headers;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
@@ -24,7 +23,7 @@ namespace GitHub.Runner.Common
CancellationToken RunnerShutdownToken { get; }
ShutdownReason RunnerShutdownReason { get; }
ISecretMasker SecretMasker { get; }
ProductInfoHeaderValue UserAgent { get; }
List<ProductInfoHeaderValue> UserAgents { get; }
RunnerWebProxy WebProxy { get; }
string GetDirectory(WellKnownDirectory directory);
string GetConfigFile(WellKnownConfigFile configFile);
@@ -54,7 +53,7 @@ namespace GitHub.Runner.Common
private readonly ConcurrentDictionary<Type, object> _serviceInstances = new ConcurrentDictionary<Type, object>();
private readonly ConcurrentDictionary<Type, Type> _serviceTypes = new ConcurrentDictionary<Type, Type>();
private readonly ISecretMasker _secretMasker = new SecretMasker();
private readonly ProductInfoHeaderValue _userAgent = new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version);
private readonly List<ProductInfoHeaderValue> _userAgents = new List<ProductInfoHeaderValue>() { new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version) };
private CancellationTokenSource _runnerShutdownTokenSource = new CancellationTokenSource();
private object _perfLock = new object();
private Tracing _trace;
@@ -72,7 +71,7 @@ namespace GitHub.Runner.Common
public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token;
public ShutdownReason RunnerShutdownReason { get; private set; }
public ISecretMasker SecretMasker => _secretMasker;
public ProductInfoHeaderValue UserAgent => _userAgent;
public List<ProductInfoHeaderValue> UserAgents => _userAgents;
public RunnerWebProxy WebProxy => _webProxy;
public HostContext(string hostType, string logFile = null)
{
@@ -89,6 +88,7 @@ namespace GitHub.Runner.Common
this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.UriDataEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.XmlDataEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.TrimDoubleQuotes);
// Create the trace manager.
if (string.IsNullOrEmpty(logFile))
@@ -189,6 +189,17 @@ namespace GitHub.Runner.Common
{
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
}
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
if (File.Exists(credFile))
{
var credData = IOUtil.LoadObject<CredentialData>(credFile);
if (credData != null &&
credData.Data.TryGetValue("clientId", out var clientId))
{
_userAgents.Add(new ProductInfoHeaderValue($"RunnerId", clientId));
}
}
}
public string GetDirectory(WellKnownDirectory directory)
@@ -281,6 +292,12 @@ namespace GitHub.Runner.Common
".credentials");
break;
case WellKnownConfigFile.MigratedCredentials:
path = Path.Combine(
GetDirectory(WellKnownDirectory.Root),
".credentials_migrated");
break;
case WellKnownConfigFile.RSACredentials:
path = Path.Combine(
GetDirectory(WellKnownDirectory.Root),
@@ -316,6 +333,13 @@ namespace GitHub.Runner.Common
GetDirectory(WellKnownDirectory.Root),
".options");
break;
case WellKnownConfigFile.SetupInfo:
path = Path.Combine(
GetDirectory(WellKnownDirectory.Root),
".setup_info");
break;
default:
throw new NotSupportedException($"Unexpected well known config file: '{configFile}'");
}
@@ -590,9 +614,8 @@ namespace GitHub.Runner.Common
{
public static HttpClientHandler CreateHttpClientHandler(this IHostContext context)
{
HttpClientHandler clientHandler = new HttpClientHandler();
clientHandler.Proxy = context.WebProxy;
return clientHandler;
var handlerFactory = context.GetService<IHttpClientHandlerFactory>();
return handlerFactory.CreateClientHandler(context.WebProxy);
}
}

View File

@@ -0,0 +1,19 @@
using System.Net.Http;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(HttpClientHandlerFactory))]
public interface IHttpClientHandlerFactory : IRunnerService
{
HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy);
}
public class HttpClientHandlerFactory : RunnerService, IHttpClientHandlerFactory
{
public HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy)
{
return new HttpClientHandler() { Proxy = webProxy };
}
}
}

View File

@@ -22,6 +22,7 @@ namespace GitHub.Runner.Common
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken);
}
public sealed class JobServer : RunnerService, IJobServer
@@ -113,5 +114,14 @@ namespace GitHub.Runner.Common
CheckConnection();
return _taskClient.GetTimelineAsync(scopeIdentifier, hubName, planId, timelineId, includeRecords: true, cancellationToken: cancellationToken);
}
//-----------------------------------------------------------------
// Action download info
//-----------------------------------------------------------------
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken)
{
CheckConnection();
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, actions, cancellationToken: cancellationToken);
}
}
}

View File

@@ -41,7 +41,7 @@ namespace GitHub.Runner.Common
// job request
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, CancellationToken cancellationToken);
Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, CancellationToken cancellationToken);
Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken);
// agent package
@@ -296,10 +296,10 @@ namespace GitHub.Runner.Common
// JobRequest
//-----------------------------------------------------------------
public Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, CancellationToken cancellationToken = default(CancellationToken))
public Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId = null, CancellationToken cancellationToken = default(CancellationToken))
{
CheckConnection(RunnerConnectionType.JobRequest);
return _requestTaskAgentClient.RenewAgentRequestAsync(poolId, requestId, lockToken, cancellationToken: cancellationToken);
return _requestTaskAgentClient.RenewAgentRequestAsync(poolId, requestId, lockToken, orchestrationId: orchestrationId, cancellationToken: cancellationToken);
}
public Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken = default(CancellationToken))
@@ -334,5 +334,20 @@ namespace GitHub.Runner.Common
CheckConnection(RunnerConnectionType.Generic);
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState);
}
//-----------------------------------------------------------------
// Runner Auth Url
//-----------------------------------------------------------------
public Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId)
{
CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.GetAgentAuthUrlAsync(runnerPoolId, runnerId);
}
public Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error)
{
CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.ReportAgentAuthUrlMigrationErrorAsync(runnerPoolId, runnerId, error);
}
}
}

View File

@@ -96,13 +96,14 @@ namespace GitHub.Runner.Common
Trace.Info($"WRITE: {message}");
if (!Silent)
{
if(colorCode != null)
if (colorCode != null)
{
Console.ForegroundColor = colorCode.Value;
Console.Write(message);
Console.ResetColor();
}
else {
else
{
Console.Write(message);
}
}
@@ -120,13 +121,14 @@ namespace GitHub.Runner.Common
Trace.Info($"WRITE LINE: {line}");
if (!Silent)
{
if(colorCode != null)
if (colorCode != null)
{
Console.ForegroundColor = colorCode.Value;
Console.WriteLine(line);
Console.ResetColor();
}
else {
else
{
Console.WriteLine(line);
}
}

View File

@@ -39,6 +39,7 @@ namespace GitHub.Runner.Listener
private readonly string[] validArgs =
{
Constants.Runner.CommandLine.Args.Auth,
Constants.Runner.CommandLine.Args.Labels,
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
Constants.Runner.CommandLine.Args.Name,
Constants.Runner.CommandLine.Args.Pool,
@@ -249,6 +250,24 @@ namespace GitHub.Runner.Listener
return GetArg(Constants.Runner.CommandLine.Args.StartupType);
}
public ISet<string> GetLabels()
{
var labelSet = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
string labels = GetArgOrPrompt(
name: Constants.Runner.CommandLine.Args.Labels,
description: $"This runner will have the following labels: 'self-hosted', '{VarUtil.OS}', '{VarUtil.OSArchitecture}' \nEnter any additional labels (ex. label-1,label-2):",
defaultValue: string.Empty,
validator: Validators.LabelsValidator,
isOptional: true);
if (!string.IsNullOrEmpty(labels))
{
labelSet = labels.Split(',').Where(x => !string.IsNullOrEmpty(x)).ToHashSet<string>(StringComparer.OrdinalIgnoreCase);
}
return labelSet;
}
//
// Private helpers.
//
@@ -280,7 +299,8 @@ namespace GitHub.Runner.Listener
string name,
string description,
string defaultValue,
Func<string, bool> validator)
Func<string, bool> validator,
bool isOptional = false)
{
// Check for the arg in the command line parser.
ArgUtil.NotNull(validator, nameof(validator));
@@ -311,7 +331,8 @@ namespace GitHub.Runner.Listener
secret: Constants.Runner.CommandLine.Args.Secrets.Any(x => string.Equals(x, name, StringComparison.OrdinalIgnoreCase)),
defaultValue: defaultValue,
validator: validator,
unattended: Unattended);
unattended: Unattended,
isOptional: isOptional);
}
private string GetEnvArg(string name)

View File

@@ -86,17 +86,17 @@ namespace GitHub.Runner.Listener.Configuration
RunnerSettings runnerSettings = new RunnerSettings();
bool isHostedServer = false;
// Loop getting url and creds until you can connect
ICredentialProvider credProvider = null;
VssCredentials creds = null;
_term.WriteSection("Authentication");
while (true)
{
// Get the URL
// When testing against a dev deployment of Actions Service, set this environment variable
var useDevActionsServiceUrl = Environment.GetEnvironmentVariable("USE_DEV_ACTIONS_SERVICE_URL");
var inputUrl = command.GetUrl();
if (!inputUrl.Contains("github.com", StringComparison.OrdinalIgnoreCase) &&
!inputUrl.Contains("github.localhost", StringComparison.OrdinalIgnoreCase))
if (inputUrl.Contains("codedev.ms", StringComparison.OrdinalIgnoreCase)
|| useDevActionsServiceUrl != null)
{
runnerSettings.ServerUrl = inputUrl;
// Get the credentials
@@ -117,7 +117,20 @@ namespace GitHub.Runner.Listener.Configuration
try
{
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
isHostedServer = await IsHostedServer(runnerSettings.ServerUrl, creds);
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
// Warn if the Actions server url and GHES server url has different Host
if (!runnerSettings.IsHostedServer)
{
// Example actionsServerUrl is https://my-ghes/_services/pipelines/[...]
// Example githubServerUrl is https://my-ghes
var actionsServerUrl = new Uri(runnerSettings.ServerUrl);
var githubServerUrl = new Uri(runnerSettings.GitHubUrl);
if (!string.Equals(actionsServerUrl.Authority, githubServerUrl.Authority, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"GitHub Actions is not properly configured in GHES. GHES url: {runnerSettings.GitHubUrl}, Actions url: {runnerSettings.ServerUrl}.");
}
}
// Validate can connect.
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), creds);
@@ -168,6 +181,9 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteLine();
var userLabels = command.GetLabels();
_term.WriteLine();
var agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
Trace.Verbose("Returns {0} agents", agents.Count);
agent = agents.FirstOrDefault();
@@ -177,7 +193,7 @@ namespace GitHub.Runner.Listener.Configuration
if (command.GetReplace())
{
// Update existing agent with new PublicKey, agent version.
agent = UpdateExistingAgent(agent, publicKey);
agent = UpdateExistingAgent(agent, publicKey, userLabels);
try
{
@@ -194,13 +210,13 @@ namespace GitHub.Runner.Listener.Configuration
else if (command.Unattended)
{
// if not replace and it is unattended config.
throw new TaskAgentExistsException($"Pool {runnerSettings.PoolId} already contains a runner with name {runnerSettings.AgentName}.");
throw new TaskAgentExistsException($"A runner exists with the same name {runnerSettings.AgentName}.");
}
}
else
{
// Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey);
// Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels);
try
{
@@ -218,44 +234,11 @@ namespace GitHub.Runner.Listener.Configuration
// Add Agent Id to settings
runnerSettings.AgentId = agent.Id;
// respect the serverUrl resolve by server.
// in case of agent configured using collection url instead of account url.
string agentServerUrl;
if (agent.Properties.TryGetValidatedValue<string>("ServerUrl", out agentServerUrl) &&
!string.IsNullOrEmpty(agentServerUrl))
{
Trace.Info($"Agent server url resolve by server: '{agentServerUrl}'.");
// we need make sure the Schema/Host/Port component of the url remain the same.
UriBuilder inputServerUrl = new UriBuilder(runnerSettings.ServerUrl);
UriBuilder serverReturnedServerUrl = new UriBuilder(agentServerUrl);
if (Uri.Compare(inputServerUrl.Uri, serverReturnedServerUrl.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0)
{
inputServerUrl.Path = serverReturnedServerUrl.Path;
Trace.Info($"Replace server returned url's scheme://host:port component with user input server url's scheme://host:port: '{inputServerUrl.Uri.AbsoluteUri}'.");
runnerSettings.ServerUrl = inputServerUrl.Uri.AbsoluteUri;
}
else
{
runnerSettings.ServerUrl = agentServerUrl;
}
}
// See if the server supports our OAuth key exchange for credentials
if (agent.Authorization != null &&
agent.Authorization.ClientId != Guid.Empty &&
agent.Authorization.AuthorizationUrl != null)
{
UriBuilder configServerUrl = new UriBuilder(runnerSettings.ServerUrl);
UriBuilder oauthEndpointUrlBuilder = new UriBuilder(agent.Authorization.AuthorizationUrl);
if (!isHostedServer && Uri.Compare(configServerUrl.Uri, oauthEndpointUrlBuilder.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0)
{
oauthEndpointUrlBuilder.Scheme = configServerUrl.Scheme;
oauthEndpointUrlBuilder.Host = configServerUrl.Host;
oauthEndpointUrlBuilder.Port = configServerUrl.Port;
Trace.Info($"Set oauth endpoint url's scheme://host:port component to match runner configure url's scheme://host:port: '{oauthEndpointUrlBuilder.Uri.AbsoluteUri}'.");
}
var credentialData = new CredentialData
{
Scheme = Constants.Configuration.OAuth,
@@ -263,7 +246,6 @@ namespace GitHub.Runner.Listener.Configuration
{
{ "clientId", agent.Authorization.ClientId.ToString("D") },
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
{ "oauthEndpointUrl", oauthEndpointUrlBuilder.Uri.AbsoluteUri },
},
};
@@ -291,7 +273,7 @@ namespace GitHub.Runner.Listener.Configuration
{
// there are two exception messages server send that indicate clock skew.
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
Trace.Error("Catch exception during test agent connection.");
Trace.Error(ex);
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
@@ -381,7 +363,6 @@ namespace GitHub.Runner.Listener.Configuration
}
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
bool isHostedServer = await IsHostedServer(settings.ServerUrl, creds);
await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
var agents = await _runnerServer.GetAgentsAsync(settings.PoolId, settings.AgentName);
@@ -404,7 +385,7 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteLine("Cannot connect to server, because config files are missing. Skipping removing runner from the server.");
}
//delete credential config files
//delete credential config files
currentAction = "Removing .credentials";
if (hasCredentials)
{
@@ -418,7 +399,7 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteLine("Does not exist. Skipping " + currentAction);
}
//delete settings config file
//delete settings config file
currentAction = "Removing .runner";
if (isConfigured)
{
@@ -459,7 +440,7 @@ namespace GitHub.Runner.Listener.Configuration
}
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey)
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels)
{
ArgUtil.NotNull(agent, nameof(agent));
agent.Authorization = new TaskAgentAuthorization
@@ -467,18 +448,25 @@ namespace GitHub.Runner.Listener.Configuration
PublicKey = new TaskAgentPublicKey(publicKey.Exponent, publicKey.Modulus),
};
// update - update instead of delete so we don't lose labels etc...
// update should replace the existing labels
agent.Version = BuildConstants.RunnerPackage.Version;
agent.OSDescription = RuntimeInformation.OSDescription;
agent.Labels.Add("self-hosted");
agent.Labels.Add(VarUtil.OS);
agent.Labels.Add(VarUtil.OSArchitecture);
agent.Labels.Clear();
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
foreach (var userLabel in userLabels)
{
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
}
return agent;
}
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey)
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels)
{
TaskAgent agent = new TaskAgent(agentName)
{
@@ -491,43 +479,43 @@ namespace GitHub.Runner.Listener.Configuration
OSDescription = RuntimeInformation.OSDescription,
};
agent.Labels.Add("self-hosted");
agent.Labels.Add(VarUtil.OS);
agent.Labels.Add(VarUtil.OSArchitecture);
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
foreach (var userLabel in userLabels)
{
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
}
return agent;
}
private async Task<bool> IsHostedServer(string serverUrl, VssCredentials credentials)
private bool IsHostedServer(UriBuilder gitHubUrl)
{
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
var locationServer = HostContext.GetService<ILocationServer>();
VssConnection connection = VssUtil.CreateConnection(new Uri(serverUrl), credentials);
await locationServer.ConnectAsync(connection);
try
{
var connectionData = await locationServer.GetConnectionDataAsync();
Trace.Info($"Server deployment type: {connectionData.DeploymentType}");
return connectionData.DeploymentType.HasFlag(DeploymentFlags.Hosted);
}
catch (Exception ex)
{
// Since the DeploymentType is Enum, deserialization exception means there is a new Enum member been added.
// It's more likely to be Hosted since OnPremises is always behind and customer can update their agent if are on-prem
Trace.Error(ex);
return true;
}
return string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
}
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runner-registration";
if (IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runner-registration";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runner-registration";
}
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
httpClient.DefaultRequestHeaders.UserAgent.Add(HostContext.UserAgent);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
var bodyObject = new Dictionary<string, string>()
{

View File

@@ -50,6 +50,18 @@ namespace GitHub.Runner.Listener.Configuration
}
CredentialData credData = store.GetCredentials();
var migratedCred = store.GetMigratedCredentials();
if (migratedCred != null)
{
credData = migratedCred;
// Re-write .credentials with Token URL
store.SaveCredential(credData);
// Delete .credentials_migrated
store.DeleteMigratedCredential();
}
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);
credProv.CredentialData = credData;

View File

@@ -1,6 +1,5 @@
using System;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
@@ -29,7 +28,7 @@ namespace GitHub.Runner.Listener.Configuration
var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null);
// For back compat with .credential file that doesn't has 'oauthEndpointUrl' section
var oathEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
var oauthEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
ArgUtil.NotNullOrEmpty(clientId, nameof(clientId));
ArgUtil.NotNullOrEmpty(authorizationUrl, nameof(authorizationUrl));
@@ -39,7 +38,7 @@ namespace GitHub.Runner.Listener.Configuration
var keyManager = context.GetService<IRSAKeyManager>();
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
var clientCredential = new VssOAuthJwtBearerClientCredential(clientId, authorizationUrl, signingCredentials);
var agentCredential = new VssOAuthCredential(new Uri(oathEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
var agentCredential = new VssOAuthCredential(new Uri(oauthEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
// Construct a credentials cache with a single OAuth credential for communication. The windows credential
// is explicitly set to null to ensure we never do that negotiation.

View File

@@ -20,7 +20,8 @@ namespace GitHub.Runner.Listener.Configuration
bool secret,
string defaultValue,
Func<String, bool> validator,
bool unattended);
bool unattended,
bool isOptional = false);
}
public sealed class PromptManager : RunnerService, IPromptManager
@@ -56,7 +57,8 @@ namespace GitHub.Runner.Listener.Configuration
bool secret,
string defaultValue,
Func<string, bool> validator,
bool unattended)
bool unattended,
bool isOptional = false)
{
Trace.Info(nameof(ReadValue));
ArgUtil.NotNull(validator, nameof(validator));
@@ -70,6 +72,10 @@ namespace GitHub.Runner.Listener.Configuration
{
return defaultValue;
}
else if (isOptional)
{
return string.Empty;
}
// Otherwise throw.
throw new Exception($"Invalid configuration provided for {argName}. Terminating unattended configuration.");
@@ -85,18 +91,28 @@ namespace GitHub.Runner.Listener.Configuration
{
_terminal.Write($"[press Enter for {defaultValue}] ");
}
else if (isOptional){
_terminal.Write($"[press Enter to skip] ");
}
// Read and trim the value.
value = secret ? _terminal.ReadSecret() : _terminal.ReadLine();
value = value?.Trim() ?? string.Empty;
// Return the default if not specified.
if (string.IsNullOrEmpty(value) && !string.IsNullOrEmpty(defaultValue))
if (string.IsNullOrEmpty(value))
{
Trace.Info($"Falling back to the default: '{defaultValue}'");
return defaultValue;
if (!string.IsNullOrEmpty(defaultValue))
{
Trace.Info($"Falling back to the default: '{defaultValue}'");
return defaultValue;
}
else if (isOptional)
{
return string.Empty;
}
}
// Return the value if it is not empty and it is valid.
// Otherwise try the loop again.
if (!string.IsNullOrEmpty(value))

View File

@@ -1,6 +1,7 @@
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using System;
using System.Linq;
using System.IO;
using System.Security.Principal;
@@ -46,6 +47,21 @@ namespace GitHub.Runner.Listener.Configuration
string.Equals(value, "N", StringComparison.CurrentCultureIgnoreCase);
}
public static bool LabelsValidator(string labels)
{
if (!string.IsNullOrEmpty(labels))
{
var labelSet = labels.Split(',').Where(x => !string.IsNullOrEmpty(x)).ToHashSet<string>(StringComparer.OrdinalIgnoreCase);
if (labelSet.Any(x => x.Length > 256))
{
return false;
}
}
return true;
}
public static bool NonEmptyValidator(string value)
{
return !string.IsNullOrEmpty(value);

View File

@@ -12,12 +12,14 @@ using System.Linq;
using GitHub.Services.Common;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Services.WebApi.Jwt;
namespace GitHub.Runner.Listener
{
[ServiceLocator(Default = typeof(JobDispatcher))]
public interface IJobDispatcher : IRunnerService
{
bool Busy { get; }
TaskCompletionSource<bool> RunOnceJobCompleted { get; }
void Run(Pipelines.AgentJobRequestMessage message, bool runOnce = false);
bool Cancel(JobCancelMessage message);
@@ -69,6 +71,8 @@ namespace GitHub.Runner.Listener
public TaskCompletionSource<bool> RunOnceJobCompleted => _runOnceJobCompleted;
public bool Busy { get; private set; }
public void Run(Pipelines.AgentJobRequestMessage jobRequestMessage, bool runOnce = false)
{
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
@@ -83,15 +87,30 @@ namespace GitHub.Runner.Listener
}
}
var orchestrationId = string.Empty;
var systemConnection = jobRequestMessage.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (systemConnection?.Authorization != null &&
systemConnection.Authorization.Parameters.TryGetValue("AccessToken", out var accessToken) &&
!string.IsNullOrEmpty(accessToken))
{
var jwt = JsonWebToken.Create(accessToken);
var claims = jwt.ExtractClaims();
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
if (!string.IsNullOrEmpty(orchestrationId))
{
Trace.Info($"Pull OrchestrationId {orchestrationId} from JWT claims");
}
}
WorkerDispatcher newDispatch = new WorkerDispatcher(jobRequestMessage.JobId, jobRequestMessage.RequestId);
if (runOnce)
{
Trace.Info("Start dispatcher for one time used runner.");
newDispatch.WorkerDispatch = RunOnceAsync(jobRequestMessage, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
newDispatch.WorkerDispatch = RunOnceAsync(jobRequestMessage, orchestrationId, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
}
else
{
newDispatch.WorkerDispatch = RunAsync(jobRequestMessage, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
newDispatch.WorkerDispatch = RunAsync(jobRequestMessage, orchestrationId, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
}
_jobInfos.TryAdd(newDispatch.JobId, newDispatch);
@@ -247,7 +266,7 @@ namespace GitHub.Runner.Listener
Task completedTask = await Task.WhenAny(jobDispatch.WorkerDispatch, Task.Delay(TimeSpan.FromSeconds(45)));
if (completedTask != jobDispatch.WorkerDispatch)
{
// at this point, the job exectuion might encounter some dead lock and even not able to be canclled.
// at this point, the job execution might encounter some dead lock and even not able to be cancelled.
// no need to localize the exception string should never happen.
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds.");
}
@@ -281,11 +300,11 @@ namespace GitHub.Runner.Listener
}
}
private async Task RunOnceAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
private async Task RunOnceAsync(Pipelines.AgentJobRequestMessage message, string orchestrationId, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
{
try
{
await RunAsync(message, previousJobDispatch, jobRequestCancellationToken, workerCancelTimeoutKillToken);
await RunAsync(message, orchestrationId, previousJobDispatch, jobRequestCancellationToken, workerCancelTimeoutKillToken);
}
finally
{
@@ -294,192 +313,292 @@ namespace GitHub.Runner.Listener
}
}
private async Task RunAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
private async Task RunAsync(Pipelines.AgentJobRequestMessage message, string orchestrationId, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
{
if (previousJobDispatch != null)
Busy = true;
try
{
Trace.Verbose($"Make sure the previous job request {previousJobDispatch.JobId} has successfully finished on worker.");
await EnsureDispatchFinished(previousJobDispatch);
}
else
{
Trace.Verbose($"This is the first job request.");
}
var term = HostContext.GetService<ITerminal>();
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
// first job request renew succeed.
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
var notification = HostContext.GetService<IJobNotification>();
// lock renew cancellation token.
using (var lockRenewalTokenSource = new CancellationTokenSource())
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
{
long requestId = message.RequestId;
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
// start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is canceled
// not even start worker if the first renew fail
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
if (renewJobRequest.IsCompleted)
if (previousJobDispatch != null)
{
// renew job request task complete means we run out of retry for the first job request renew.
Trace.Info($"Unable to renew job request for job {message.JobId} for the first time, stop dispatching job to worker.");
return;
Trace.Verbose($"Make sure the previous job request {previousJobDispatch.JobId} has successfully finished on worker.");
await EnsureDispatchFinished(previousJobDispatch);
}
else
{
Trace.Verbose($"This is the first job request.");
}
if (jobRequestCancellationToken.IsCancellationRequested)
var term = HostContext.GetService<ITerminal>();
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
// first job request renew succeed.
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
var notification = HostContext.GetService<IJobNotification>();
// lock renew cancellation token.
using (var lockRenewalTokenSource = new CancellationTokenSource())
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
{
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
long requestId = message.RequestId;
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
// complete job request with result Cancelled
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
return;
}
// start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
HostContext.WritePerfCounter($"JobRequestRenewed_{requestId.ToString()}");
// wait till first renew succeed or job request is canceled
// not even start worker if the first renew fail
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
Task<int> workerProcessTask = null;
object _outputLock = new object();
List<string> workerOutput = new List<string>();
using (var processChannel = HostContext.CreateService<IProcessChannel>())
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
// Start the process channel.
// It's OK if StartServer bubbles an execption after the worker process has already started.
// The worker will shutdown after 30 seconds if it hasn't received the job message.
processChannel.StartServer(
// Delegate to start the child process.
startProcess: (string pipeHandleOut, string pipeHandleIn) =>
{
// Validate args.
ArgUtil.NotNullOrEmpty(pipeHandleOut, nameof(pipeHandleOut));
ArgUtil.NotNullOrEmpty(pipeHandleIn, nameof(pipeHandleIn));
// Save STDOUT from worker, worker will use STDOUT report unhandle exception.
processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
lock (_outputLock)
{
workerOutput.Add(stdout.Data);
}
}
};
// Save STDERR from worker, worker will use STDERR on crash.
processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
lock (_outputLock)
{
workerOutput.Add(stderr.Data);
}
}
};
// Start the child process.
HostContext.WritePerfCounter("StartingWorkerProcess");
var assemblyDirectory = HostContext.GetDirectory(WellKnownDirectory.Bin);
string workerFileName = Path.Combine(assemblyDirectory, _workerProcessName);
workerProcessTask = processInvoker.ExecuteAsync(
workingDirectory: assemblyDirectory,
fileName: workerFileName,
arguments: "spawnclient " + pipeHandleOut + " " + pipeHandleIn,
environment: null,
requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: workerProcessCancelTokenSource.Token);
});
// Send the job request message.
// Kill the worker process if sending the job message times out. The worker
// process may have successfully received the job message.
try
if (renewJobRequest.IsCompleted)
{
Trace.Info($"Send job request message to worker for job {message.JobId}.");
HostContext.WritePerfCounter($"RunnerSendingJobToWorker_{message.JobId}");
using (var csSendJobRequest = new CancellationTokenSource(_channelTimeout))
{
await processChannel.SendAsync(
messageType: MessageType.NewJobRequest,
body: JsonUtility.ToString(message),
cancellationToken: csSendJobRequest.Token);
}
// renew job request task complete means we run out of retry for the first job request renew.
Trace.Info($"Unable to renew job request for job {message.JobId} for the first time, stop dispatching job to worker.");
return;
}
catch (OperationCanceledException)
{
// message send been cancelled.
// timeout 30 sec. kill worker.
Trace.Info($"Job request message sending for job {message.JobId} been cancelled, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
if (jobRequestCancellationToken.IsCancellationRequested)
{
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// not finish the job request since the job haven't run on worker at all, we will not going to set a result to server.
// complete job request with result Cancelled
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
return;
}
// we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
HostContext.WritePerfCounter($"JobRequestRenewed_{requestId.ToString()}");
HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}");
try
Task<int> workerProcessTask = null;
object _outputLock = new object();
List<string> workerOutput = new List<string>();
using (var processChannel = HostContext.CreateService<IProcessChannel>())
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
TaskResult resultOnAbandonOrCancel = TaskResult.Succeeded;
// wait for renewlock, worker process or cancellation token been fired.
var completedTask = await Task.WhenAny(renewJobRequest, workerProcessTask, Task.Delay(-1, jobRequestCancellationToken));
if (completedTask == workerProcessTask)
{
// worker finished successfully, complete job request with result, attach unhandled exception reported by worker, stop renew lock, job has finished.
int returnCode = await workerProcessTask;
Trace.Info($"Worker finished for job {message.JobId}. Code: " + returnCode);
string detailInfo = null;
if (!TaskResultUtil.IsValidReturnCode(returnCode))
// Start the process channel.
// It's OK if StartServer bubbles an execption after the worker process has already started.
// The worker will shutdown after 30 seconds if it hasn't received the job message.
processChannel.StartServer(
// Delegate to start the child process.
startProcess: (string pipeHandleOut, string pipeHandleIn) =>
{
detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
await LogWorkerProcessUnhandledException(message, detailInfo);
// Validate args.
ArgUtil.NotNullOrEmpty(pipeHandleOut, nameof(pipeHandleOut));
ArgUtil.NotNullOrEmpty(pipeHandleIn, nameof(pipeHandleIn));
// Save STDOUT from worker, worker will use STDOUT report unhandle exception.
processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
lock (_outputLock)
{
workerOutput.Add(stdout.Data);
}
}
};
// Save STDERR from worker, worker will use STDERR on crash.
processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
lock (_outputLock)
{
workerOutput.Add(stderr.Data);
}
}
};
// Start the child process.
HostContext.WritePerfCounter("StartingWorkerProcess");
var assemblyDirectory = HostContext.GetDirectory(WellKnownDirectory.Bin);
string workerFileName = Path.Combine(assemblyDirectory, _workerProcessName);
workerProcessTask = processInvoker.ExecuteAsync(
workingDirectory: assemblyDirectory,
fileName: workerFileName,
arguments: "spawnclient " + pipeHandleOut + " " + pipeHandleIn,
environment: null,
requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: workerProcessCancelTokenSource.Token);
});
// Send the job request message.
// Kill the worker process if sending the job message times out. The worker
// process may have successfully received the job message.
try
{
Trace.Info($"Send job request message to worker for job {message.JobId}.");
HostContext.WritePerfCounter($"RunnerSendingJobToWorker_{message.JobId}");
using (var csSendJobRequest = new CancellationTokenSource(_channelTimeout))
{
await processChannel.SendAsync(
messageType: MessageType.NewJobRequest,
body: JsonUtility.ToString(message),
cancellationToken: csSendJobRequest.Token);
}
}
catch (OperationCanceledException)
{
// message send been cancelled.
// timeout 30 sec. kill worker.
Trace.Info($"Job request message sending for job {message.JobId} been cancelled, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
Trace.Info($"finish job request for job {message.JobId} with result: {result}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}");
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// not finish the job request since the job haven't run on worker at all, we will not going to set a result to server.
return;
}
// we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}");
try
{
TaskResult resultOnAbandonOrCancel = TaskResult.Succeeded;
// wait for renewlock, worker process or cancellation token been fired.
var completedTask = await Task.WhenAny(renewJobRequest, workerProcessTask, Task.Delay(-1, jobRequestCancellationToken));
if (completedTask == workerProcessTask)
{
// worker finished successfully, complete job request with result, attach unhandled exception reported by worker, stop renew lock, job has finished.
int returnCode = await workerProcessTask;
Trace.Info($"Worker finished for job {message.JobId}. Code: " + returnCode);
string detailInfo = null;
if (!TaskResultUtil.IsValidReturnCode(returnCode))
{
detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
await LogWorkerProcessUnhandledException(message, detailInfo);
}
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
Trace.Info($"finish job request for job {message.JobId} with result: {result}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}");
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// complete job request
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
// print out unhandled exception happened in worker after we complete job request.
// when we run out of disk space, report back to server has higher priority.
if (!string.IsNullOrEmpty(detailInfo))
{
Trace.Error("Unhandled exception happened in worker:");
Trace.Error(detailInfo);
}
return;
}
else if (completedTask == renewJobRequest)
{
resultOnAbandonOrCancel = TaskResult.Abandoned;
}
else
{
resultOnAbandonOrCancel = TaskResult.Canceled;
}
// renew job request completed or job request cancellation token been fired for RunAsync(jobrequestmessage)
// cancel worker gracefully first, then kill it after worker cancel timeout
try
{
Trace.Info($"Send job cancellation message to worker for job {message.JobId}.");
using (var csSendCancel = new CancellationTokenSource(_channelTimeout))
{
var messageType = MessageType.CancelRequest;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
switch (HostContext.RunnerShutdownReason)
{
case ShutdownReason.UserCancelled:
messageType = MessageType.RunnerShutdown;
break;
case ShutdownReason.OperatingSystemShutdown:
messageType = MessageType.OperatingSystemShutdown;
break;
}
}
await processChannel.SendAsync(
messageType: messageType,
body: string.Empty,
cancellationToken: csSendCancel.Token);
}
}
catch (OperationCanceledException)
{
// message send been cancelled.
Trace.Info($"Job cancel message sending for job {message.JobId} been cancelled, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
}
// wait worker to exit
// if worker doesn't exit within timeout, then kill worker.
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
// worker haven't exit within cancellation timeout.
if (completedTask != workerProcessTask)
{
Trace.Info($"worker process for job {message.JobId} haven't exit within cancellation timout, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
// When worker doesn't exit within cancel timeout, the runner will kill the worker process and worker won't finish upload job logs.
// The runner will try to upload these logs at this time.
await TryUploadUnfinishedLogs(message);
}
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}");
// complete job request with cancel result, stop renew lock, job has finished.
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
@@ -488,115 +607,23 @@ namespace GitHub.Runner.Listener
await renewJobRequest;
// complete job request
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
// print out unhandled exception happened in worker after we complete job request.
// when we run out of disk space, report back to server has higher priority.
if (!string.IsNullOrEmpty(detailInfo))
{
Trace.Error("Unhandled exception happened in worker:");
Trace.Error(detailInfo);
}
return;
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
}
else if (completedTask == renewJobRequest)
finally
{
resultOnAbandonOrCancel = TaskResult.Abandoned;
// This should be the last thing to run so we don't notify external parties until actually finished
await notification.JobCompleted(message.JobId);
}
else
{
resultOnAbandonOrCancel = TaskResult.Canceled;
}
// renew job request completed or job request cancellation token been fired for RunAsync(jobrequestmessage)
// cancel worker gracefully first, then kill it after worker cancel timeout
try
{
Trace.Info($"Send job cancellation message to worker for job {message.JobId}.");
using (var csSendCancel = new CancellationTokenSource(_channelTimeout))
{
var messageType = MessageType.CancelRequest;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
switch (HostContext.RunnerShutdownReason)
{
case ShutdownReason.UserCancelled:
messageType = MessageType.RunnerShutdown;
break;
case ShutdownReason.OperatingSystemShutdown:
messageType = MessageType.OperatingSystemShutdown;
break;
}
}
await processChannel.SendAsync(
messageType: messageType,
body: string.Empty,
cancellationToken: csSendCancel.Token);
}
}
catch (OperationCanceledException)
{
// message send been cancelled.
Trace.Info($"Job cancel message sending for job {message.JobId} been cancelled, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
}
// wait worker to exit
// if worker doesn't exit within timeout, then kill worker.
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
// worker haven't exit within cancellation timeout.
if (completedTask != workerProcessTask)
{
Trace.Info($"worker process for job {message.JobId} haven't exit within cancellation timout, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
// When worker doesn't exit within cancel timeout, the runner will kill the worker process and worker won't finish upload job logs.
// The runner will try to upload these logs at this time.
await TryUploadUnfinishedLogs(message);
}
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}");
// complete job request with cancel result, stop renew lock, job has finished.
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// complete job request
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
}
finally
{
// This should be the last thing to run so we don't notify external parties until actually finished
await notification.JobCompleted(message.JobId);
}
}
}
finally
{
Busy = false;
}
}
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
var runnerServer = HostContext.GetService<IRunnerServer>();
TaskAgentJobRequest request = null;
@@ -609,7 +636,7 @@ namespace GitHub.Runner.Listener
{
try
{
request = await runnerServer.RenewAgentRequestAsync(poolId, requestId, lockToken, token);
request = await runnerServer.RenewAgentRequestAsync(poolId, requestId, lockToken, orchestrationId, token);
Trace.Info($"Successfully renew job request {requestId}, job is valid till {request.LockedUntil.Value}");
@@ -831,7 +858,6 @@ namespace GitHub.Runner.Listener
}
}
// TODO: We need send detailInfo back to DT in order to add an issue for the job
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null)
{
Trace.Entering();
@@ -925,8 +951,10 @@ namespace GitHub.Runner.Listener
ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord));
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++;
jobRecord.Issues.Add(new Issue() { Type = IssueType.Error, Message = errorMessage });
jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
}
catch (Exception ex)

View File

@@ -83,7 +83,7 @@ namespace GitHub.Runner.Listener
Trace.Info("Connecting to the Runner Server...");
await _runnerServer.ConnectAsync(new Uri(serverUrl), creds);
Trace.Info("VssConnection created");
_term.WriteLine();
_term.WriteSuccessMessage("Connected to GitHub");
_term.WriteLine();
@@ -118,6 +118,20 @@ namespace GitHub.Runner.Listener
Trace.Error("Catch exception during create session.");
Trace.Error(ex);
if (ex is VssOAuthTokenRequestException && creds.Federated is VssOAuthCredential vssOAuthCred)
{
// Check whether we get 401 because the runner registration already removed by the service.
// If the runner registration get deleted, we can't exchange oauth token.
Trace.Error("Test oauth app registration.");
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl));
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
{
_term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
return false;
}
}
if (!IsSessionCreationExceptionRetriable(ex))
{
_term.WriteError($"Failed to create session. {ex.Message}");

View File

@@ -102,7 +102,9 @@ namespace GitHub.Runner.Listener
IRunner runner = context.GetService<IRunner>();
try
{
return await runner.ExecuteCommand(command);
var returnCode = await runner.ExecuteCommand(command);
trace.Info($"Runner execution has finished with return code {returnCode}");
return returnCode;
}
catch (OperationCanceledException) when (context.RunnerShutdownToken.IsCancellationRequested)
{

View File

@@ -37,7 +37,7 @@ namespace GitHub.Runner.Listener
{
try
{
VssUtil.InitializeVssClientSettings(HostContext.UserAgent, HostContext.WebProxy);
VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy);
_inConfigStage = true;
_completedCommand.Reset();
@@ -466,6 +466,7 @@ Config Options:
--url string Repository to add the runner to. Required if unattended
--token string Registration token. Required if unattended
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
--replace Replace any existing runner with the same name (default false)");
#if OS_WINDOWS
@@ -478,7 +479,9 @@ Examples:
Configure a runner non-interactively:
.{separator}config.{ext} --unattended --url <url> --token <token>
Configure a runner non-interactively, replacing any existing runner with the same name:
.{separator}config.{ext} --unattended --url <url> --token <token> --replace [--name <name>]");
.{separator}config.{ext} --unattended --url <url> --token <token> --replace [--name <name>]
Configure a runner non-interactively with three extra labels:
.{separator}config.{ext} --unattended --url <url> --token <token> --labels L1,L2,L3");
#if OS_WINDOWS
_term.WriteLine($@" Configure a runner to run as a service:");
_term.WriteLine($@" .{separator}config.{ext} --url <url> --token <token> --runasservice");

View File

@@ -17,6 +17,7 @@ namespace GitHub.Runner.Listener
[ServiceLocator(Default = typeof(SelfUpdater))]
public interface ISelfUpdater : IRunnerService
{
bool Busy { get; }
Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token);
}
@@ -31,6 +32,8 @@ namespace GitHub.Runner.Listener
private int _poolId;
private int _agentId;
public bool Busy { get; private set; }
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -45,52 +48,60 @@ namespace GitHub.Runner.Listener
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
{
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
Busy = true;
try
{
Trace.Info($"Can't find available update package.");
return false;
}
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
{
Trace.Info($"Can't find available update package.");
return false;
}
Trace.Info($"An update is available.");
Trace.Info($"An update is available.");
// Print console line that warn user not shutdown runner.
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
// Print console line that warn user not shutdown runner.
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
await DownloadLatestRunner(token);
Trace.Info($"Download latest runner and unzip into runner root.");
await DownloadLatestRunner(token);
Trace.Info($"Download latest runner and unzip into runner root.");
// wait till all running job finish
await UpdateRunnerUpdateStateAsync("Waiting for current job finish running.");
// wait till all running job finish
await UpdateRunnerUpdateStateAsync("Waiting for current job finish running.");
await jobDispatcher.WaitAsync(token);
Trace.Info($"All running job has exited.");
await jobDispatcher.WaitAsync(token);
Trace.Info($"All running job has exited.");
// delete runner backup
DeletePreviousVersionRunnerBackup(token);
Trace.Info($"Delete old version runner backup.");
// delete runner backup
DeletePreviousVersionRunnerBackup(token);
Trace.Info($"Delete old version runner backup.");
// generate update script from template
await UpdateRunnerUpdateStateAsync("Generate and execute update script.");
// generate update script from template
await UpdateRunnerUpdateStateAsync("Generate and execute update script.");
string updateScript = GenerateUpdateScript(restartInteractiveRunner);
Trace.Info($"Generate update script into: {updateScript}");
string updateScript = GenerateUpdateScript(restartInteractiveRunner);
Trace.Info($"Generate update script into: {updateScript}");
// kick off update script
Process invokeScript = new Process();
// kick off update script
Process invokeScript = new Process();
#if OS_WINDOWS
invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace);
invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\"";
#elif (OS_OSX || OS_LINUX)
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
#endif
invokeScript.Start();
Trace.Info($"Update script start running");
invokeScript.Start();
Trace.Info($"Update script start running");
await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should back online within 10 seconds.");
await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should back online within 10 seconds.");
return true;
return true;
}
finally
{
Busy = false;
}
}
private async Task<bool> UpdateNeeded(string targetVersion, CancellationToken token)

View File

@@ -80,7 +80,12 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
// Validate args.
ArgUtil.NotNull(executionContext, nameof(executionContext));
executionContext.Output($"Syncing repository: {repoFullName}");
Uri repositoryUrl = new Uri($"https://github.com/{repoFullName}");
// Repository URL
var githubUrl = executionContext.GetGitHubContext("server_url");
var githubUri = new Uri(!string.IsNullOrEmpty(githubUrl) ? githubUrl : "https://github.com");
var portInfo = githubUri.IsDefaultPort ? string.Empty : $":{githubUri.Port}";
Uri repositoryUrl = new Uri($"{githubUri.Scheme}://{githubUri.Host}{portInfo}/{repoFullName}");
if (!repositoryUrl.IsAbsoluteUri)
{
throw new InvalidOperationException("Repository url need to be an absolute uri.");

View File

@@ -271,6 +271,14 @@ namespace GitHub.Runner.Sdk
// Indicate GitHub Actions process.
_proc.StartInfo.Environment["GITHUB_ACTIONS"] = "true";
// Set CI=true when no one else already set it.
// CI=true is common set in most CI provider in GitHub
if (!_proc.StartInfo.Environment.ContainsKey("CI") &&
Environment.GetEnvironmentVariable("CI") == null)
{
_proc.StartInfo.Environment["CI"] = "true";
}
// Hook up the events.
_proc.EnableRaisingEvents = true;
_proc.Exited += ProcessExitedHandler;
@@ -310,7 +318,12 @@ namespace GitHub.Runner.Sdk
}
}
using (var registration = cancellationToken.Register(async () => await CancelAndKillProcessTree(killProcessOnCancel)))
var cancellationFinished = new TaskCompletionSource<bool>();
using (var registration = cancellationToken.Register(async () =>
{
await CancelAndKillProcessTree(killProcessOnCancel);
cancellationFinished.TrySetResult(true);
}))
{
Trace.Info($"Process started with process id {_proc.Id}, waiting for process exit.");
while (true)
@@ -333,6 +346,13 @@ namespace GitHub.Runner.Sdk
// data buffers one last time before returning
ProcessOutput();
if (cancellationToken.IsCancellationRequested)
{
// Ensure cancellation also finish on the cancellationToken.Register thread.
await cancellationFinished.Task;
Trace.Info($"Process Cancellation finished.");
}
Trace.Info($"Finished process {_proc.Id} with exit code {_proc.ExitCode}, and elapsed time {_stopWatch.Elapsed}.");
}

View File

@@ -14,10 +14,10 @@ namespace GitHub.Runner.Sdk
{
public static class VssUtil
{
public static void InitializeVssClientSettings(ProductInfoHeaderValue additionalUserAgent, IWebProxy proxy)
public static void InitializeVssClientSettings(List<ProductInfoHeaderValue> additionalUserAgents, IWebProxy proxy)
{
var headerValues = new List<ProductInfoHeaderValue>();
headerValues.Add(additionalUserAgent);
headerValues.AddRange(additionalUserAgents);
headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})"));
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)

View File

@@ -11,6 +11,11 @@ namespace GitHub.Runner.Sdk
{
ArgUtil.NotNullOrEmpty(command, nameof(command));
trace?.Info($"Which: '{command}'");
if (Path.IsPathFullyQualified(command) && File.Exists(command))
{
trace?.Info($"Fully qualified path: '{command}'");
return command;
}
string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
if (string.IsNullOrEmpty(path))
{

View File

@@ -486,7 +486,10 @@ namespace GitHub.Runner.Worker
foreach (var property in command.Properties)
{
issue.Data[property.Key] = property.Value;
if (!string.Equals(property.Key, Constants.Runner.InternalTelemetryIssueDataKey, StringComparison.OrdinalIgnoreCase))
{
issue.Data[property.Key] = property.Value;
}
}
context.AddIssue(issue);

View File

@@ -1,31 +1,43 @@
using System;
using System;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container;
using GitHub.Services.Common;
using Newtonsoft.Json;
using WebApi = GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
namespace GitHub.Runner.Worker
{
public class PrepareResult
{
public PrepareResult(List<JobExtensionRunner> containerSetupSteps, Dictionary<Guid, IActionRunner> preStepTracker)
{
this.ContainerSetupSteps = containerSetupSteps;
this.PreStepTracker = preStepTracker;
}
public List<JobExtensionRunner> ContainerSetupSteps { get; set; }
public Dictionary<Guid, IActionRunner> PreStepTracker { get; set; }
}
[ServiceLocator(Default = typeof(ActionManager))]
public interface IActionManager : IRunnerService
{
Dictionary<Guid, ContainerInfo> CachedActionContainers { get; }
Task<List<JobExtensionRunner>> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps);
Task<PrepareResult> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps);
Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action);
}
@@ -35,11 +47,11 @@ namespace GitHub.Runner.Worker
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
private const int _defaultCopyBufferSize = 81920;
private const string _dotcomApiUrl = "https://api.github.com";
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new Dictionary<Guid, ContainerInfo>();
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
public async Task<List<JobExtensionRunner>> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps)
public async Task<PrepareResult> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps)
{
ArgUtil.NotNull(executionContext, nameof(executionContext));
ArgUtil.NotNull(steps, nameof(steps));
@@ -49,18 +61,24 @@ namespace GitHub.Runner.Worker
Dictionary<string, List<Guid>> imagesToBuild = new Dictionary<string, List<Guid>>(StringComparer.OrdinalIgnoreCase);
Dictionary<string, ActionContainer> imagesToBuildInfo = new Dictionary<string, ActionContainer>(StringComparer.OrdinalIgnoreCase);
List<JobExtensionRunner> containerSetupSteps = new List<JobExtensionRunner>();
Dictionary<Guid, IActionRunner> preStepTracker = new Dictionary<Guid, IActionRunner>();
IEnumerable<Pipelines.ActionStep> actions = steps.OfType<Pipelines.ActionStep>();
// TODO: Depreciate the PREVIEW_ACTION_TOKEN
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
// Log even if we aren't using it to ensure users know.
if (!string.IsNullOrEmpty(executionContext.Variables.Get("PREVIEW_ACTION_TOKEN")))
{
executionContext.Warning("The 'PREVIEW_ACTION_TOKEN' secret is depreciated. Please remove it from the repository's secrets");
executionContext.Warning("The 'PREVIEW_ACTION_TOKEN' secret is deprecated. Please remove it from the repository's secrets");
}
// Clear the cache (local runner)
// Clear the cache (for self-hosted runners)
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
var newActionMetadata = executionContext.Variables.GetBoolean("DistributedTask.NewActionMetadata") ?? false;
var repositoryActions = new List<Pipelines.ActionStep>();
foreach (var action in actions)
{
if (action.Reference.Type == Pipelines.ActionSourceType.ContainerRegistry)
@@ -78,7 +96,8 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'");
imagesToPull[containerReference.Image].Add(action.Id);
}
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository)
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && !newActionMetadata)
{
// only download the repository archive
await DownloadRepositoryActionAsync(executionContext, action);
@@ -111,6 +130,97 @@ namespace GitHub.Runner.Worker
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
}
}
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
{
var definition = LoadAction(executionContext, action);
if (definition.Data.Execution.HasPre)
{
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = action;
actionRunner.Stage = ActionRunStage.Pre;
actionRunner.Condition = definition.Data.Execution.InitCondition;
Trace.Info($"Add 'pre' execution for {action.Id}");
preStepTracker[action.Id] = actionRunner;
}
}
}
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && newActionMetadata)
{
repositoryActions.Add(action);
}
}
if (repositoryActions.Count > 0)
{
// Get the download info
var downloadInfos = await GetDownloadInfoAsync(executionContext, repositoryActions);
// Download each action
foreach (var action in repositoryActions)
{
var lookupKey = GetDownloadInfoLookupKey(action);
if (string.IsNullOrEmpty(lookupKey))
{
continue;
}
if (!downloadInfos.TryGetValue(lookupKey, out var downloadInfo))
{
throw new Exception($"Missing download info for {lookupKey}");
}
await DownloadRepositoryActionAsync(executionContext, downloadInfo);
}
// More preparation based on content in the repository (action.yml)
foreach (var action in repositoryActions)
{
var setupInfo = PrepareRepositoryActionAsync(executionContext, action);
if (setupInfo != null)
{
if (!string.IsNullOrEmpty(setupInfo.Image))
{
if (!imagesToPull.ContainsKey(setupInfo.Image))
{
imagesToPull[setupInfo.Image] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to pull image '{setupInfo.Image}'");
imagesToPull[setupInfo.Image].Add(action.Id);
}
else
{
ArgUtil.NotNullOrEmpty(setupInfo.ActionRepository, nameof(setupInfo.ActionRepository));
if (!imagesToBuild.ContainsKey(setupInfo.ActionRepository))
{
imagesToBuild[setupInfo.ActionRepository] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to build image '{setupInfo.Dockerfile}'");
imagesToBuild[setupInfo.ActionRepository].Add(action.Id);
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
}
}
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
{
var definition = LoadAction(executionContext, action);
if (definition.Data.Execution.HasPre)
{
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = action;
actionRunner.Stage = ActionRunStage.Pre;
actionRunner.Condition = definition.Data.Execution.InitCondition;
Trace.Info($"Add 'pre' execution for {action.Id}");
preStepTracker[action.Id] = actionRunner;
}
}
}
}
@@ -147,7 +257,7 @@ namespace GitHub.Runner.Worker
}
#endif
return containerSetupSteps;
return new PrepareResult(containerSetupSteps, preStepTracker);
}
public Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action)
@@ -239,14 +349,19 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action container env: {StringUtil.ConvertToJson(containerAction.Environment)}.");
}
if (!string.IsNullOrEmpty(containerAction.Pre))
{
Trace.Info($"Action container pre entrypoint: {containerAction.Pre}.");
}
if (!string.IsNullOrEmpty(containerAction.EntryPoint))
{
Trace.Info($"Action container entrypoint: {containerAction.EntryPoint}.");
}
if (!string.IsNullOrEmpty(containerAction.Cleanup))
if (!string.IsNullOrEmpty(containerAction.Post))
{
Trace.Info($"Action container cleanup entrypoint: {containerAction.Cleanup}.");
Trace.Info($"Action container post entrypoint: {containerAction.Post}.");
}
if (CachedActionContainers.TryGetValue(action.Id, out var container))
@@ -258,8 +373,9 @@ namespace GitHub.Runner.Worker
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.NodeJS)
{
var nodeAction = definition.Data.Execution as NodeJSActionExecutionData;
Trace.Info($"Action pre node.js file: {nodeAction.Pre ?? "N/A"}.");
Trace.Info($"Action node.js file: {nodeAction.Script}.");
Trace.Info($"Action cleanup node.js file: {nodeAction.Cleanup ?? "N/A"}.");
Trace.Info($"Action post node.js file: {nodeAction.Post ?? "N/A"}.");
}
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Plugin)
{
@@ -275,10 +391,16 @@ namespace GitHub.Runner.Worker
if (!string.IsNullOrEmpty(plugin.PostPluginTypeName))
{
pluginAction.Cleanup = plugin.PostPluginTypeName;
pluginAction.Post = plugin.PostPluginTypeName;
Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}.");
}
}
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
var compositeAction = definition.Data.Execution as CompositeActionExecutionData;
Trace.Info($"Load {compositeAction.Steps.Count} action steps.");
Trace.Verbose($"Details: {StringUtil.ConvertToJson(compositeAction.Steps)}");
}
else
{
throw new NotSupportedException(definition.Data.Execution.ExecutionType.ToString());
@@ -396,7 +518,12 @@ namespace GitHub.Runner.Worker
var imageName = $"{dockerManger.DockerInstanceLabel}:{Guid.NewGuid().ToString("N")}";
while (retryCount < 3)
{
buildExitCode = await dockerManger.DockerBuild(executionContext, setupInfo.Container.WorkingDirectory, Directory.GetParent(setupInfo.Container.Dockerfile).FullName, imageName);
buildExitCode = await dockerManger.DockerBuild(
executionContext,
setupInfo.Container.WorkingDirectory,
setupInfo.Container.Dockerfile,
Directory.GetParent(setupInfo.Container.Dockerfile).FullName,
imageName);
if (buildExitCode == 0)
{
break;
@@ -425,6 +552,80 @@ namespace GitHub.Runner.Worker
}
}
// This implementation is temporary and will be replaced with a REST API call to the service to resolve
private async Task<IDictionary<string, WebApi.ActionDownloadInfo>> GetDownloadInfoAsync(IExecutionContext executionContext, List<Pipelines.ActionStep> actions)
{
executionContext.Output("Getting action download info");
// Convert to action reference
var actionReferences = actions
.GroupBy(x => GetDownloadInfoLookupKey(x))
.Where(x => !string.IsNullOrEmpty(x.Key))
.Select(x =>
{
var action = x.First();
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
return new WebApi.ActionReference
{
NameWithOwner = repositoryReference.Name,
Ref = repositoryReference.Ref,
};
})
.ToList();
// Nothing to resolve?
if (actionReferences.Count == 0)
{
return new Dictionary<string, WebApi.ActionDownloadInfo>();
}
// Resolve download info
var jobServer = HostContext.GetService<IJobServer>();
var actionDownloadInfos = default(WebApi.ActionDownloadInfoCollection);
for (var attempt = 1; attempt <= 3; attempt++)
{
try
{
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Plan.ScopeIdentifier, executionContext.Plan.PlanType, executionContext.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
break;
}
catch (Exception ex) when (attempt < 3)
{
executionContext.Output($"Failed to resolve action download info. Error: {ex.Message}");
executionContext.Debug(ex.ToString());
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
{
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
executionContext.Output($"Retrying in {backoff.TotalSeconds} seconds");
await Task.Delay(backoff);
}
}
}
ArgUtil.NotNull(actionDownloadInfos, nameof(actionDownloadInfos));
ArgUtil.NotNull(actionDownloadInfos.Actions, nameof(actionDownloadInfos.Actions));
var apiUrl = GetApiUrl(executionContext);
var defaultAccessToken = executionContext.GetGitHubContext("token");
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
foreach (var actionDownloadInfo in actionDownloadInfos.Actions.Values)
{
// Add secret
HostContext.SecretMasker.AddValue(actionDownloadInfo.Authentication?.Token);
// Default auth token
if (string.IsNullOrEmpty(actionDownloadInfo.Authentication?.Token))
{
actionDownloadInfo.Authentication = new WebApi.ActionDownloadAuthentication { Token = defaultAccessToken };
}
}
return actionDownloadInfos.Actions;
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
{
Trace.Entering();
@@ -448,7 +649,8 @@ namespace GitHub.Runner.Worker
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref);
if (File.Exists(destDirectory + ".completed"))
string watermarkFile = GetWatermarkFilePath(destDirectory);
if (File.Exists(watermarkFile))
{
executionContext.Debug($"Action '{repositoryReference.Name}@{repositoryReference.Ref}' already downloaded at '{destDirectory}'.");
return;
@@ -461,27 +663,116 @@ namespace GitHub.Runner.Worker
executionContext.Output($"Download action repository '{repositoryReference.Name}@{repositoryReference.Ref}'");
}
#if OS_WINDOWS
string archiveLink = $"https://api.github.com/repos/{repositoryReference.Name}/zipball/{repositoryReference.Ref}";
#else
string archiveLink = $"https://api.github.com/repos/{repositoryReference.Name}/tarball/{repositoryReference.Ref}";
#endif
Trace.Info($"Download archive '{archiveLink}' to '{destDirectory}'.");
var configurationStore = HostContext.GetService<IConfigurationStore>();
var isHostedServer = configurationStore.GetSettings().IsHostedServer;
if (isHostedServer)
{
string apiUrl = GetApiUrl(executionContext);
string archiveLink = BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref);
var downloadDetails = new ActionDownloadDetails(archiveLink, ConfigureAuthorizationFromContext);
await DownloadRepositoryActionAsync(executionContext, downloadDetails, null, destDirectory);
return;
}
else
{
string apiUrl = GetApiUrl(executionContext);
// URLs to try:
var downloadAttempts = new List<ActionDownloadDetails> {
// A built-in action or an action the user has created, on their GHES instance
// Example: https://my-ghes/api/v3/repos/my-org/my-action/tarball/v1
new ActionDownloadDetails(
BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref),
ConfigureAuthorizationFromContext),
// The same action, on GitHub.com
// Example: https://api.github.com/repos/my-org/my-action/tarball/v1
new ActionDownloadDetails(
BuildLinkToActionArchive(_dotcomApiUrl, repositoryReference.Name, repositoryReference.Ref),
configureAuthorization: (e,h) => { /* no authorization for dotcom */ })
};
foreach (var downloadAttempt in downloadAttempts)
{
try
{
await DownloadRepositoryActionAsync(executionContext, downloadAttempt, null, destDirectory);
return;
}
catch (ActionNotFoundException)
{
Trace.Info($"Failed to find the action '{repositoryReference.Name}' at ref '{repositoryReference.Ref}' at {downloadAttempt.ArchiveLink}");
continue;
}
}
throw new ActionNotFoundException($"Failed to find the action '{repositoryReference.Name}' at ref '{repositoryReference.Ref}'. Paths attempted: {string.Join(", ", downloadAttempts.Select(d => d.ArchiveLink))}");
}
}
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo)
{
Trace.Entering();
ArgUtil.NotNull(executionContext, nameof(executionContext));
ArgUtil.NotNull(downloadInfo, nameof(downloadInfo));
ArgUtil.NotNullOrEmpty(downloadInfo.NameWithOwner, nameof(downloadInfo.NameWithOwner));
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.Ref));
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), downloadInfo.NameWithOwner.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), downloadInfo.Ref);
string watermarkFile = GetWatermarkFilePath(destDirectory);
if (File.Exists(watermarkFile))
{
executionContext.Debug($"Action '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' already downloaded at '{destDirectory}'.");
return;
}
else
{
// make sure we get a clean folder ready to use.
IOUtil.DeleteDirectory(destDirectory, executionContext.CancellationToken);
Directory.CreateDirectory(destDirectory);
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}'");
}
await DownloadRepositoryActionAsync(executionContext, null, downloadInfo, destDirectory);
}
private string GetApiUrl(IExecutionContext executionContext)
{
string apiUrl = executionContext.GetGitHubContext("api_url");
if (!string.IsNullOrEmpty(apiUrl))
{
return apiUrl;
}
// Once the api_url is set for hosted, we can remove this fallback (it doesn't make sense for GHES)
return _dotcomApiUrl;
}
private static string BuildLinkToActionArchive(string apiUrl, string repository, string @ref)
{
#if OS_WINDOWS
return $"{apiUrl}/repos/{repository}/zipball/{@ref}";
#else
return $"{apiUrl}/repos/{repository}/tarball/{@ref}";
#endif
}
// todo: Remove the parameter "actionDownloadDetails" when feature flag DistributedTask.NewActionMetadata is removed
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, ActionDownloadDetails actionDownloadDetails, WebApi.ActionDownloadInfo downloadInfo, string destDirectory)
{
//download and extract action in a temp folder and rename it on success
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
Directory.CreateDirectory(tempDirectory);
#if OS_WINDOWS
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.zip");
string link = downloadInfo?.ZipballUrl ?? actionDownloadDetails.ArchiveLink;
#else
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.tar.gz");
string link = downloadInfo?.TarballUrl ?? actionDownloadDetails.ArchiveLink;
#endif
Trace.Info($"Save archive '{archiveLink}' into {archiveFile}.");
Trace.Info($"Save archive '{link}' into {archiveFile}.");
try
{
int retryCount = 0;
// Allow up to 20 * 60s for any action to be downloaded from github graph.
@@ -498,55 +789,67 @@ namespace GitHub.Runner.Worker
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
if (string.IsNullOrEmpty(authToken))
// Legacy
if (downloadInfo == null)
{
// TODO: Depreciate the PREVIEW_ACTION_TOKEN
authToken = executionContext.Variables.Get("PREVIEW_ACTION_TOKEN");
}
if (!string.IsNullOrEmpty(authToken))
{
HostContext.SecretMasker.AddValue(authToken);
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
actionDownloadDetails.ConfigureAuthorization(executionContext, httpClient);
}
// FF DistributedTask.NewActionMetadata
else
{
var accessToken = executionContext.GetGitHubContext("token");
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadInfo.Authentication?.Token);
}
httpClient.DefaultRequestHeaders.UserAgent.Add(HostContext.UserAgent);
using (var result = await httpClient.GetStreamAsync(archiveLink))
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
using (var response = await httpClient.GetAsync(link))
{
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
await fs.FlushAsync(actionDownloadCancellation.Token);
if (response.IsSuccessStatusCode)
{
using (var result = await response.Content.ReadAsStreamAsync())
{
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
await fs.FlushAsync(actionDownloadCancellation.Token);
// download succeed, break out the retry loop.
break;
// download succeed, break out the retry loop.
break;
}
}
else if (response.StatusCode == HttpStatusCode.NotFound)
{
// It doesn't make sense to retry in this case, so just stop
throw new ActionNotFoundException(new Uri(link));
}
else
{
// Something else bad happened, let's go to our retry logic
response.EnsureSuccessStatusCode();
}
}
}
}
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
{
Trace.Info($"Action download has been cancelled.");
Trace.Info("Action download has been cancelled.");
throw;
}
catch (ActionNotFoundException)
{
Trace.Info($"The action at '{link}' does not exist");
throw;
}
catch (Exception ex) when (retryCount < 2)
{
retryCount++;
Trace.Error($"Fail to download archive '{archiveLink}' -- Attempt: {retryCount}");
Trace.Error($"Fail to download archive '{link}' -- Attempt: {retryCount}");
Trace.Error(ex);
if (actionDownloadTimeout.Token.IsCancellationRequested)
{
// action download didn't finish within timeout
executionContext.Warning($"Action '{archiveLink}' didn't finish download within {timeoutSeconds} seconds.");
executionContext.Warning($"Action '{link}' didn't finish download within {timeoutSeconds} seconds.");
}
else
{
executionContext.Warning($"Failed to download action '{archiveLink}'. Error {ex.Message}");
executionContext.Warning($"Failed to download action '{link}'. Error: {ex.Message}");
}
}
}
@@ -560,7 +863,7 @@ namespace GitHub.Runner.Worker
}
ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
executionContext.Debug($"Download '{archiveLink}' to '{archiveFile}'");
executionContext.Debug($"Download '{link}' to '{archiveFile}'");
var stagingDirectory = Path.Combine(tempDirectory, "_staging");
Directory.CreateDirectory(stagingDirectory);
@@ -610,7 +913,8 @@ namespace GitHub.Runner.Worker
}
Trace.Verbose("Create watermark file indicate action download succeed.");
File.WriteAllText(destDirectory + ".completed", DateTime.UtcNow.ToString());
string watermarkFile = GetWatermarkFilePath(destDirectory);
File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString());
executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'.");
Trace.Info("Finished getting action repository.");
@@ -634,6 +938,32 @@ namespace GitHub.Runner.Worker
}
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private void ConfigureAuthorizationFromContext(IExecutionContext executionContext, HttpClient httpClient)
{
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
if (string.IsNullOrEmpty(authToken))
{
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
authToken = executionContext.Variables.Get("PREVIEW_ACTION_TOKEN");
}
if (!string.IsNullOrEmpty(authToken))
{
HostContext.SecretMasker.AddValue(authToken);
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
else
{
var accessToken = executionContext.GetGitHubContext("token");
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
}
private string GetWatermarkFilePath(string directory) => directory + ".completed";
private ActionContainer PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
{
var repositoryReference = repositoryAction.Reference as Pipelines.RepositoryPathReference;
@@ -714,6 +1044,11 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action plugin: {(actionDefinitionData.Execution as PluginActionExecutionData).Plugin}, no more preparation.");
return null;
}
else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
Trace.Info($"Action composite: {(actionDefinitionData.Execution as CompositeActionExecutionData).Steps}, no more preparation.");
return null;
}
else
{
throw new NotSupportedException(actionDefinitionData.Execution.ExecutionType.ToString());
@@ -739,6 +1074,64 @@ namespace GitHub.Runner.Worker
throw new InvalidOperationException($"Can't find 'action.yml', 'action.yaml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
}
}
private static string GetDownloadInfoLookupKey(Pipelines.ActionStep action)
{
if (action.Reference.Type != Pipelines.ActionSourceType.Repository)
{
return null;
}
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
if (string.Equals(repositoryReference.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (!string.Equals(repositoryReference.RepositoryType, Pipelines.RepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase))
{
throw new NotSupportedException(repositoryReference.RepositoryType);
}
ArgUtil.NotNullOrEmpty(repositoryReference.Name, nameof(repositoryReference.Name));
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
}
private static string GetDownloadInfoLookupKey(WebApi.ActionDownloadInfo info)
{
ArgUtil.NotNullOrEmpty(info.NameWithOwner, nameof(info.NameWithOwner));
ArgUtil.NotNullOrEmpty(info.Ref, nameof(info.Ref));
return $"{info.NameWithOwner}@{info.Ref}";
}
private AuthenticationHeaderValue CreateAuthHeader(string token)
{
if (string.IsNullOrEmpty(token))
{
return null;
}
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
HostContext.SecretMasker.AddValue(base64EncodingToken);
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private class ActionDownloadDetails
{
public string ArchiveLink { get; }
public Action<IExecutionContext, HttpClient> ConfigureAuthorization { get; }
public ActionDownloadDetails(string archiveLink, Action<IExecutionContext, HttpClient> configureAuthorization)
{
ArchiveLink = archiveLink;
ConfigureAuthorization = configureAuthorization;
}
}
}
public sealed class Definition
@@ -766,13 +1159,15 @@ namespace GitHub.Runner.Worker
NodeJS,
Plugin,
Script,
Composite,
}
public sealed class ContainerActionExecutionData : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Container;
public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup);
public override bool HasPre => !string.IsNullOrEmpty(Pre);
public override bool HasPost => !string.IsNullOrEmpty(Post);
public string Image { get; set; }
@@ -782,51 +1177,74 @@ namespace GitHub.Runner.Worker
public MappingToken Environment { get; set; }
public string Cleanup { get; set; }
public string Pre { get; set; }
public string Post { get; set; }
}
public sealed class NodeJSActionExecutionData : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.NodeJS;
public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup);
public override bool HasPre => !string.IsNullOrEmpty(Pre);
public override bool HasPost => !string.IsNullOrEmpty(Post);
public string Script { get; set; }
public string Cleanup { get; set; }
public string Pre { get; set; }
public string Post { get; set; }
}
public sealed class PluginActionExecutionData : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Plugin;
public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup);
public override bool HasPre => false;
public override bool HasPost => !string.IsNullOrEmpty(Post);
public string Plugin { get; set; }
public string Cleanup { get; set; }
public string Post { get; set; }
}
public sealed class ScriptActionExecutionData : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Script;
public override bool HasPre => false;
public override bool HasPost => false;
}
public override bool HasCleanup => false;
public sealed class CompositeActionExecutionData : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Composite;
public override bool HasPre => false;
public override bool HasPost => false;
public List<Pipelines.ActionStep> Steps { get; set; }
}
public abstract class ActionExecutionData
{
private string _initCondition = $"{Constants.Expressions.Always}()";
private string _cleanupCondition = $"{Constants.Expressions.Always}()";
public abstract ActionExecutionType ExecutionType { get; }
public abstract bool HasCleanup { get; }
public abstract bool HasPre { get; }
public abstract bool HasPost { get; }
public string CleanupCondition
{
get { return _cleanupCondition; }
set { _cleanupCondition = value; }
}
public string InitCondition
{
get { return _initCondition; }
set { _initCondition = value; }
}
}
public class ContainerSetupInfo
@@ -863,4 +1281,3 @@ namespace GitHub.Runner.Worker
public string ActionRepository { get; set; }
}
}

View File

@@ -14,6 +14,7 @@ using YamlDotNet.Core;
using YamlDotNet.Core.Events;
using System.Globalization;
using System.Linq;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
@@ -22,18 +23,16 @@ namespace GitHub.Runner.Worker
{
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> contextData);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> contextData);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token, IDictionary<string, PipelineContextData> contextData);
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
}
public sealed class ActionManifestManager : RunnerService, IActionManifestManager
{
private TemplateSchema _actionManifestSchema;
private IReadOnlyList<String> _fileTable;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -54,22 +53,39 @@ namespace GitHub.Runner.Worker
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
{
var context = CreateContext(executionContext, null);
var templateContext = CreateContext(executionContext);
ActionDefinitionData actionDefinition = new ActionDefinitionData();
// Clean up file name real quick
// Instead of using Regex which can be computationally expensive,
// we can just remove the # of characters from the fileName according to the length of the basePath
string basePath = HostContext.GetDirectory(WellKnownDirectory.Actions);
string fileRelativePath = manifestFile;
if (manifestFile.Contains(basePath))
{
fileRelativePath = manifestFile.Remove(0, basePath.Length + 1);
}
try
{
var token = default(TemplateToken);
// Get the file ID
var fileId = context.GetFileId(manifestFile);
_fileTable = context.GetFileTable();
var fileId = templateContext.GetFileId(fileRelativePath);
// Add this file to the FileTable in executionContext if it hasn't been added already
// we use > since fileID is 1 indexed
if (fileId > executionContext.FileTable.Count)
{
executionContext.FileTable.Add(fileRelativePath);
}
// Read the file
var fileContent = File.ReadAllText(manifestFile);
using (var stringReader = new StringReader(fileContent))
{
var yamlObjectReader = new YamlObjectReader(null, stringReader);
token = TemplateReader.Read(context, "action-root", yamlObjectReader, fileId, out _);
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
token = TemplateReader.Read(templateContext, "action-root", yamlObjectReader, fileId, out _);
}
var actionMapping = token.AssertMapping("action manifest root");
@@ -88,11 +104,11 @@ namespace GitHub.Runner.Worker
break;
case "inputs":
ConvertInputs(context, actionPair.Value, actionDefinition);
ConvertInputs(templateContext, actionPair.Value, actionDefinition);
break;
case "runs":
actionDefinition.Execution = ConvertRuns(context, actionPair.Value);
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionPair.Value);
break;
default:
Trace.Info($"Ignore action property {propertyName}.");
@@ -103,24 +119,24 @@ namespace GitHub.Runner.Worker
catch (Exception ex)
{
Trace.Error(ex);
context.Errors.Add(ex);
templateContext.Errors.Add(ex);
}
if (context.Errors.Count > 0)
if (templateContext.Errors.Count > 0)
{
foreach (var error in context.Errors)
foreach (var error in templateContext.Errors)
{
Trace.Error($"Action.yml load error: {error.Message}");
executionContext.Error(error.Message);
}
throw new ArgumentException($"Fail to load {manifestFile}");
throw new ArgumentException($"Fail to load {fileRelativePath}");
}
if (actionDefinition.Execution == null)
{
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
throw new ArgumentException($"Top level 'runs:' section is required for {manifestFile}");
throw new ArgumentException($"Top level 'runs:' section is required for {fileRelativePath}");
}
else
{
@@ -133,13 +149,13 @@ namespace GitHub.Runner.Worker
public List<string> EvaluateContainerArguments(
IExecutionContext executionContext,
SequenceToken token,
IDictionary<string, PipelineContextData> contextData)
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = new List<string>();
if (token != null)
{
var context = CreateContext(executionContext, contextData);
var context = CreateContext(executionContext, extraExpressionValues);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-args", token, 0, null, omitHeader: true);
@@ -172,13 +188,13 @@ namespace GitHub.Runner.Worker
public Dictionary<string, string> EvaluateContainerEnvironment(
IExecutionContext executionContext,
MappingToken token,
IDictionary<string, PipelineContextData> contextData)
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
if (token != null)
{
var context = CreateContext(executionContext, contextData);
var context = CreateContext(executionContext, extraExpressionValues);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-env", token, 0, null, omitHeader: true);
@@ -216,13 +232,12 @@ namespace GitHub.Runner.Worker
public string EvaluateDefaultInput(
IExecutionContext executionContext,
string inputName,
TemplateToken token,
IDictionary<string, PipelineContextData> contextData)
TemplateToken token)
{
string result = "";
if (token != null)
{
var context = CreateContext(executionContext, contextData);
var context = CreateContext(executionContext);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(context, "input-default-context", token, 0, null, omitHeader: true);
@@ -247,7 +262,7 @@ namespace GitHub.Runner.Worker
private TemplateContext CreateContext(
IExecutionContext executionContext,
IDictionary<string, PipelineContextData> contextData)
IDictionary<string, PipelineContextData> extraExpressionValues = null)
{
var result = new TemplateContext
{
@@ -261,27 +276,38 @@ namespace GitHub.Runner.Worker
TraceWriter = executionContext.ToTemplateTraceWriter(),
};
if (contextData?.Count > 0)
// Expression values from execution context
foreach (var pair in executionContext.ExpressionValues)
{
foreach (var pair in contextData)
result.ExpressionValues[pair.Key] = pair.Value;
}
// Extra expression values
if (extraExpressionValues?.Count > 0)
{
foreach (var pair in extraExpressionValues)
{
result.ExpressionValues[pair.Key] = pair.Value;
}
}
// Add the file table
if (_fileTable?.Count > 0)
// Expression functions from execution context
foreach (var item in executionContext.ExpressionFunctions)
{
for (var i = 0 ; i < _fileTable.Count ; i++)
{
result.GetFileId(_fileTable[i]);
}
result.ExpressionFunctions.Add(item);
}
// Add the file table from the Execution Context
for (var i = 0; i < executionContext.FileTable.Count; i++)
{
result.GetFileId(executionContext.FileTable[i]);
}
return result;
}
private ActionExecutionData ConvertRuns(
IExecutionContext executionContext,
TemplateContext context,
TemplateToken inputsToken)
{
@@ -293,9 +319,14 @@ namespace GitHub.Runner.Worker
var envToken = default(MappingToken);
var mainToken = default(StringToken);
var pluginToken = default(StringToken);
var preToken = default(StringToken);
var preEntrypointToken = default(StringToken);
var preIfToken = default(StringToken);
var postToken = default(StringToken);
var postEntrypointToken = default(StringToken);
var postIfToken = default(StringToken);
var stepsLoaded = default(List<Pipelines.ActionStep>);
foreach (var run in runsMapping)
{
var runsKey = run.Key.AssertString("runs key").Value;
@@ -331,6 +362,24 @@ namespace GitHub.Runner.Worker
case "post-if":
postIfToken = run.Value.AssertString("post-if");
break;
case "pre":
preToken = run.Value.AssertString("pre");
break;
case "pre-entrypoint":
preEntrypointToken = run.Value.AssertString("pre-entrypoint");
break;
case "pre-if":
preIfToken = run.Value.AssertString("pre-if");
break;
case "steps":
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
var steps = run.Value.AssertSequence("steps");
var evaluator = executionContext.ToPipelineTemplateEvaluator();
stepsLoaded = evaluator.LoadCompositeSteps(steps);
break;
}
throw new Exception("You aren't supposed to be using Composite Actions yet!");
default:
Trace.Info($"Ignore run property {runsKey}.");
break;
@@ -353,7 +402,9 @@ namespace GitHub.Runner.Worker
Arguments = argsToken,
EntryPoint = entrypointToken?.Value,
Environment = envToken,
Cleanup = postEntrypointToken?.Value,
Pre = preEntrypointToken?.Value,
InitCondition = preIfToken?.Value ?? "always()",
Post = postEntrypointToken?.Value,
CleanupCondition = postIfToken?.Value ?? "always()"
};
}
@@ -362,18 +413,35 @@ namespace GitHub.Runner.Worker
{
if (string.IsNullOrEmpty(mainToken?.Value))
{
throw new ArgumentNullException($"Entry javascript fils is not provided.");
throw new ArgumentNullException($"Entry javascript file is not provided.");
}
else
{
return new NodeJSActionExecutionData()
{
Script = mainToken.Value,
Cleanup = postToken?.Value,
Pre = preToken?.Value,
InitCondition = preIfToken?.Value ?? "always()",
Post = postToken?.Value,
CleanupCondition = postIfToken?.Value ?? "always()"
};
}
}
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase) && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
if (stepsLoaded == null)
{
// TODO: Add a more helpful error message + including file name, etc. to show user that it's because of their yaml file
throw new ArgumentNullException($"No steps provided.");
}
else
{
return new CompositeActionExecutionData()
{
Steps = stepsLoaded,
};
}
}
else
{
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker' or 'node12' instead.");

View File

@@ -0,0 +1,33 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.Runner.Worker
{
public class ActionNotFoundException : Exception
{
public ActionNotFoundException(Uri actionUri)
: base(FormatMessage(actionUri))
{
}
public ActionNotFoundException(string message)
: base(message)
{
}
public ActionNotFoundException(string message, System.Exception inner)
: base(message, inner)
{
}
protected ActionNotFoundException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
private static string FormatMessage(Uri actionUri)
{
return $"An action could not be found at the URI '{actionUri}'";
}
}
}

View File

@@ -18,6 +18,7 @@ namespace GitHub.Runner.Worker
{
public enum ActionRunStage
{
Pre,
Main,
Post,
}
@@ -26,7 +27,7 @@ namespace GitHub.Runner.Worker
public interface IActionRunner : IStep, IRunnerService
{
ActionRunStage Stage { get; set; }
Boolean TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context);
bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context);
Pipelines.ActionStep Action { get; set; }
}
@@ -81,20 +82,25 @@ namespace GitHub.Runner.Worker
ActionExecutionData handlerData = definition.Data?.Execution;
ArgUtil.NotNull(handlerData, nameof(handlerData));
if (handlerData.HasPre &&
Action.Reference is Pipelines.RepositoryPathReference repoAction &&
string.Equals(repoAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
{
ExecutionContext.Warning($"`pre` execution is not supported for local action from '{repoAction.Path}'");
}
// The action has post cleanup defined.
// we need to create timeline record for them and add them to the step list that StepRunner is using
if (handlerData.HasCleanup && Stage == ActionRunStage.Main)
if (handlerData.HasPost && (Stage == ActionRunStage.Pre || Stage == ActionRunStage.Main))
{
string postDisplayName = null;
if (this.DisplayName.StartsWith(PipelineTemplateConstants.RunDisplayPrefix))
string postDisplayName = $"Post {this.DisplayName}";
if (Stage == ActionRunStage.Pre &&
this.DisplayName.StartsWith("Pre ", StringComparison.OrdinalIgnoreCase))
{
postDisplayName = $"Post {this.DisplayName.Substring(PipelineTemplateConstants.RunDisplayPrefix.Length)}";
// Trim the leading `Pre ` from the display name.
// Otherwise, we will get `Post Pre xxx` as DisplayName for the Post step.
postDisplayName = $"Post {this.DisplayName.Substring("Pre ".Length)}";
}
else
{
postDisplayName = $"Post {this.DisplayName}";
}
var repositoryReference = Action.Reference as RepositoryPathReference;
var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}";
var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" :
@@ -108,7 +114,7 @@ namespace GitHub.Runner.Worker
actionRunner.Condition = handlerData.CleanupCondition;
actionRunner.DisplayName = postDisplayName;
ExecutionContext.RegisterPostJobStep($"{actionRunner.Action.Name}_post", actionRunner);
ExecutionContext.RegisterPostJobStep(actionRunner);
}
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
@@ -142,10 +148,12 @@ namespace GitHub.Runner.Worker
// Load the inputs.
ExecutionContext.Debug("Loading inputs");
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues);
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues, ExecutionContext.ExpressionFunctions);
var userInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (KeyValuePair<string, string> input in inputs)
{
userInputs.Add(input.Key);
string message = "";
if (definition.Data?.Deprecated?.TryGetValue(input.Key, out message) == true)
{
@@ -153,26 +161,47 @@ namespace GitHub.Runner.Worker
}
}
var validInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
if (handlerData.ExecutionType == ActionExecutionType.Container)
{
// container action always accept 'entryPoint' and 'args' as inputs
// https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepswithargs
validInputs.Add("entryPoint");
validInputs.Add("args");
}
// Merge the default inputs from the definition
if (definition.Data?.Inputs != null)
{
var manifestManager = HostContext.GetService<IActionManifestManager>();
foreach (var input in (definition.Data?.Inputs))
foreach (var input in definition.Data.Inputs)
{
string key = input.Key.AssertString("action input name").Value;
validInputs.Add(key);
if (!inputs.ContainsKey(key))
{
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
foreach (var data in ExecutionContext.ExpressionValues)
{
evaluateContext[data.Key] = data.Value;
}
inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value, evaluateContext);
inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value);
}
}
}
// Validate inputs only for actions with action.yml
if (Action.Reference.Type == Pipelines.ActionSourceType.Repository)
{
var unexpectedInputs = new List<string>();
foreach (var input in userInputs)
{
if (!validInputs.Contains(input))
{
unexpectedInputs.Add(input);
}
}
if (unexpectedInputs.Count > 0)
{
ExecutionContext.Warning($"Unexpected input(s) '{string.Join("', '", unexpectedInputs)}', valid inputs are ['{string.Join("', '", validInputs)}']");
}
}
// Load the action environment.
ExecutionContext.Debug("Loading env");
var environment = new Dictionary<String, String>(VarUtil.EnvironmentVariableKeyComparer);
@@ -293,10 +322,14 @@ namespace GitHub.Runner.Worker
return displayName;
}
// Try evaluating fully
var templateEvaluator = context.ToPipelineTemplateEvaluator();
try
{
didFullyEvaluate = templateEvaluator.TryEvaluateStepDisplayName(tokenToParse, contextData, out displayName);
if (tokenToParse.CheckHasRequiredContext(contextData, context.ExpressionFunctions))
{
var templateEvaluator = context.ToPipelineTemplateEvaluator();
displayName = templateEvaluator.EvaluateStepDisplayName(tokenToParse, contextData, context.ExpressionFunctions);
didFullyEvaluate = true;
}
}
catch (TemplateValidationException e)
{

View File

@@ -61,6 +61,7 @@ namespace GitHub.Runner.Worker.Container
foreach (var volume in container.Volumes)
{
UserMountVolumes[volume] = volume;
MountVolumes.Add(new MountVolume(volume));
}
}

View File

@@ -17,7 +17,7 @@ namespace GitHub.Runner.Worker.Container
string DockerInstanceLabel { get; }
Task<DockerVersion> DockerVersion(IExecutionContext context);
Task<int> DockerPull(IExecutionContext context, string image);
Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string tag);
Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag);
Task<string> DockerCreate(IExecutionContext context, ContainerInfo container);
Task<int> DockerRun(IExecutionContext context, ContainerInfo container, EventHandler<ProcessDataReceivedEventArgs> stdoutDataReceived, EventHandler<ProcessDataReceivedEventArgs> stderrDataReceived);
Task<int> DockerStart(IExecutionContext context, string containerId);
@@ -87,9 +87,9 @@ namespace GitHub.Runner.Worker.Container
return await ExecuteDockerCommandAsync(context, "pull", image, context.CancellationToken);
}
public async Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string tag)
public async Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag)
{
return await ExecuteDockerCommandAsync(context, "build", $"-t {tag} \"{dockerFile}\"", workingDirectory, context.CancellationToken);
return await ExecuteDockerCommandAsync(context, "build", $"-t {tag} -f \"{dockerFile}\" \"{dockerContext}\"", workingDirectory, context.CancellationToken);
}
public async Task<string> DockerCreate(IExecutionContext context, ContainerInfo container)
@@ -130,6 +130,13 @@ namespace GitHub.Runner.Worker.Container
// Watermark for GitHub Action environment
dockerOptions.Add("-e GITHUB_ACTIONS=true");
// Set CI=true when no one else already set it.
// CI=true is common set in most CI provider in GitHub
if (!container.ContainerEnvironmentVariables.ContainsKey("CI"))
{
dockerOptions.Add("-e CI=true");
}
foreach (var volume in container.MountVolumes)
{
// replace `"` with `\"` and add `"{0}"` to all path.
@@ -189,6 +196,13 @@ namespace GitHub.Runner.Worker.Container
// Watermark for GitHub Action environment
dockerOptions.Add("-e GITHUB_ACTIONS=true");
// Set CI=true when no one else already set it.
// CI=true is common set in most CI provider in GitHub
if (!container.ContainerEnvironmentVariables.ContainsKey("CI"))
{
dockerOptions.Add("-e CI=true");
}
if (!string.IsNullOrEmpty(container.ContainerEntryPoint))
{
dockerOptions.Add($"--entrypoint \"{container.ContainerEntryPoint}\"");

View File

@@ -47,9 +47,9 @@ namespace GitHub.Runner.Worker
condition: $"{PipelineTemplateConstants.Always}()",
displayName: "Stop containers",
data: data);
executionContext.Debug($"Register post job cleanup for stopping/deleting containers.");
executionContext.RegisterPostJobStep(nameof(StopContainersAsync), postJobStep);
executionContext.RegisterPostJobStep(postJobStep);
// Check whether we are inside a container.
// Our container feature requires to map working directory from host to the container.
@@ -180,6 +180,11 @@ namespace GitHub.Runner.Worker
foreach (var volume in container.UserMountVolumes)
{
Trace.Info($"User provided volume: {volume.Value}");
var mount = new MountVolume(volume.Value);
if (string.Equals(mount.SourceVolumePath, "/", StringComparison.OrdinalIgnoreCase))
{
executionContext.Warning($"Volume mount {volume.Value} is going to mount '/' into the container which may cause file ownership change in the entire file system and cause Actions Runner to lose permission to access the disk.");
}
}
// Pull down docker image with retry up to 3 times

View File

@@ -1,14 +1,15 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Web;
using GitHub.Runner.Worker.Container;
using GitHub.Services.WebApi;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
@@ -16,12 +17,11 @@ using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container;
using GitHub.Services.WebApi;
using Newtonsoft.Json;
using System.Text;
using System.Collections;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Expressions2;
namespace GitHub.Runner.Worker
{
@@ -39,27 +39,31 @@ namespace GitHub.Runner.Worker
string ContextName { get; }
Task ForceCompleted { get; }
TaskResult? Result { get; set; }
TaskResult? Outcome { get; set; }
string ResultCode { get; set; }
TaskResult? CommandResult { get; set; }
CancellationToken CancellationToken { get; }
List<ServiceEndpoint> Endpoints { get; }
TaskOrchestrationPlanReference Plan { get; }
PlanFeatures Features { get; }
Variables Variables { get; }
Dictionary<string, string> IntraActionState { get; }
HashSet<string> OutputVariables { get; }
IDictionary<String, IDictionary<String, String>> JobDefaults { get; }
Dictionary<string, VariableValue> JobOutputs { get; }
IDictionary<String, String> EnvironmentVariables { get; }
IDictionary<String, ContextScope> Scopes { get; }
IList<String> FileTable { get; }
StepsContext StepsContext { get; }
DictionaryContextData ExpressionValues { get; }
IList<IFunctionInfo> ExpressionFunctions { get; }
List<string> PrependPath { get; }
ContainerInfo Container { get; set; }
List<ContainerInfo> ServiceContainers { get; }
JobContext JobContext { get; }
// Only job level ExecutionContext has JobSteps
Queue<IStep> JobSteps { get; }
List<IStep> JobSteps { get; }
// Only job level ExecutionContext has PostJobSteps
Stack<IStep> PostJobSteps { get; }
@@ -100,17 +104,18 @@ namespace GitHub.Runner.Worker
// others
void ForceTaskComplete();
void RegisterPostJobStep(string refName, IStep step);
void RegisterPostJobStep(IStep step);
void RegisterNestedStep(IStep step, DictionaryContextData inputsData, int location, Dictionary<string, string> envData);
}
public sealed class ExecutionContext : RunnerService, IExecutionContext
{
private const int _maxIssueCount = 10;
private const int _throttlingDelayReportThreshold = 10 * 1000; // Don't report throttling with less than 10 seconds delay
private readonly TimelineRecord _record = new TimelineRecord();
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>();
private readonly object _loggerLock = new object();
private readonly HashSet<string> _outputvariables = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
private readonly object _matchersLock = new object();
private event OnMatcherChanged _onMatcherChanged;
@@ -138,27 +143,32 @@ namespace GitHub.Runner.Worker
public Task ForceCompleted => _forceCompleted.Task;
public CancellationToken CancellationToken => _cancellationTokenSource.Token;
public List<ServiceEndpoint> Endpoints { get; private set; }
public TaskOrchestrationPlanReference Plan { get; private set; }
public Variables Variables { get; private set; }
public Dictionary<string, string> IntraActionState { get; private set; }
public HashSet<string> OutputVariables => _outputvariables;
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; private set; }
public Dictionary<string, VariableValue> JobOutputs { get; private set; }
public IDictionary<String, String> EnvironmentVariables { get; private set; }
public IDictionary<String, ContextScope> Scopes { get; private set; }
public IList<String> FileTable { get; private set; }
public StepsContext StepsContext { get; private set; }
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
public IList<IFunctionInfo> ExpressionFunctions { get; } = new List<IFunctionInfo>();
public bool WriteDebug { get; private set; }
public List<string> PrependPath { get; private set; }
public ContainerInfo Container { get; set; }
public List<ContainerInfo> ServiceContainers { get; private set; }
// Only job level ExecutionContext has JobSteps
public Queue<IStep> JobSteps { get; private set; }
public List<IStep> JobSteps { get; private set; }
// Only job level ExecutionContext has PostJobSteps
public Stack<IStep> PostJobSteps { get; private set; }
public bool EchoOnActionCommand { get; set; }
// Only job level ExecutionContext has StepsWithPostRegistered
public HashSet<Guid> StepsWithPostRegistered { get; private set; }
public bool EchoOnActionCommand { get; set; }
public TaskResult? Result
{
@@ -172,6 +182,8 @@ namespace GitHub.Runner.Worker
}
}
public TaskResult? Outcome { get; set; }
public TaskResult? CommandResult { get; set; }
private string ContextType => _record.RecordType;
@@ -242,12 +254,47 @@ namespace GitHub.Runner.Worker
});
}
public void RegisterPostJobStep(string refName, IStep step)
public void RegisterPostJobStep(IStep step)
{
step.ExecutionContext = Root.CreatePostChild(step.DisplayName, refName, IntraActionState);
if (step is IActionRunner actionRunner && !Root.StepsWithPostRegistered.Add(actionRunner.Action.Id))
{
Trace.Info($"'post' of '{actionRunner.DisplayName}' already push to post step stack.");
return;
}
step.ExecutionContext = Root.CreatePostChild(step.DisplayName, IntraActionState);
Root.PostJobSteps.Push(step);
}
/// <summary>
/// Helper function used in CompositeActionHandler::RunAsync to
/// add a child node, aka a step, to the current job to the Root.JobSteps based on the location.
/// </summary>
public void RegisterNestedStep(IStep step, DictionaryContextData inputsData, int location, Dictionary<string, string> envData)
{
// TODO: For UI purposes, look at figuring out how to condense steps in one node => maybe use the same previous GUID
var newGuid = Guid.NewGuid();
step.ExecutionContext = Root.CreateChild(newGuid, step.DisplayName, newGuid.ToString("N"), null, null);
step.ExecutionContext.ExpressionValues["inputs"] = inputsData;
// Add the composite action environment variables to each step.
// If the key already exists, we override it since the composite action env variables will have higher precedence
// Note that for each composite action step, it's environment variables will be set in the StepRunner automatically
// step.ExecutionContext.SetEnvironmentVariables(envData);
#if OS_WINDOWS
var envContext = new DictionaryContextData();
#else
var envContext = new CaseSensitiveDictionaryContextData();
#endif
foreach (var pair in envData)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
step.ExecutionContext.ExpressionValues["env"] = envContext;
Root.JobSteps.Insert(location, step);
}
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null)
{
Trace.Entering();
@@ -259,6 +306,7 @@ namespace GitHub.Runner.Worker
child.Features = Features;
child.Variables = Variables;
child.Endpoints = Endpoints;
child.Plan = Plan;
if (intraActionState == null)
{
child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
@@ -268,6 +316,7 @@ namespace GitHub.Runner.Worker
child.IntraActionState = intraActionState;
}
child.EnvironmentVariables = EnvironmentVariables;
child.JobDefaults = JobDefaults;
child.Scopes = Scopes;
child.FileTable = FileTable;
child.StepsContext = StepsContext;
@@ -275,6 +324,10 @@ namespace GitHub.Runner.Worker
{
child.ExpressionValues[pair.Key] = pair.Value;
}
foreach (var item in ExpressionFunctions)
{
child.ExpressionFunctions.Add(item);
}
child._cancellationTokenSource = new CancellationTokenSource();
child.WriteDebug = WriteDebug;
child._parentExecutionContext = this;
@@ -315,7 +368,7 @@ namespace GitHub.Runner.Worker
}
// report total delay caused by server throttling.
if (_totalThrottlingDelayInMilliseconds > 0)
if (_totalThrottlingDelayInMilliseconds > _throttlingDelayReportThreshold)
{
this.Warning($"The job has experienced {TimeSpan.FromMilliseconds(_totalThrottlingDelayInMilliseconds).TotalSeconds} seconds total delay caused by server throttling.");
}
@@ -343,10 +396,20 @@ namespace GitHub.Runner.Worker
}
}
_cancellationTokenSource?.Dispose();
if (Root != this)
{
// only dispose TokenSource for step level ExecutionContext
_cancellationTokenSource?.Dispose();
}
_logger.End();
if (!string.IsNullOrEmpty(ContextName))
{
StepsContext.SetOutcome(ScopeName, ContextName, (Outcome ?? Result ?? TaskResult.Succeeded).ToActionResult());
StepsContext.SetConclusion(ScopeName, ContextName, (Result ?? TaskResult.Succeeded).ToActionResult());
}
return Result.Value;
}
@@ -545,7 +608,8 @@ namespace GitHub.Runner.Worker
_cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
// Features
// Plan
Plan = message.Plan;
Features = PlanUtil.GetFeatures(message.Plan);
// Endpoints
@@ -557,6 +621,12 @@ namespace GitHub.Runner.Worker
// Environment variables shared across all actions
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
// Job defaults shared across all actions
JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase);
// Job Outputs
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
// Service container info
ServiceContainers = new List<ContainerInfo>();
@@ -576,12 +646,6 @@ namespace GitHub.Runner.Worker
// File table
FileTable = new List<String>(message.FileTable ?? new string[0]);
// Expression functions
if (Variables.GetBoolean("System.HashFilesV2") == true)
{
ExpressionConstants.UpdateFunction<Handlers.HashFiles>("hashFiles", 1, byte.MaxValue);
}
// Expression values
if (message.ContextData?.Count > 0)
{
@@ -599,8 +663,13 @@ namespace GitHub.Runner.Worker
var githubAccessToken = new StringContextData(Variables.Get("system.github.token"));
var base64EncodedToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{githubAccessToken}"));
HostContext.SecretMasker.AddValue(base64EncodedToken);
var githubJob = Variables.Get("system.github.job");
var githubContext = new GitHubContext();
githubContext["token"] = githubAccessToken;
if (!string.IsNullOrEmpty(githubJob))
{
githubContext["job"] = new StringContextData(githubJob);
}
var githubDictionary = ExpressionValues["github"].AssertDictionary("github");
foreach (var pair in githubDictionary)
{
@@ -620,11 +689,14 @@ namespace GitHub.Runner.Worker
PrependPath = new List<string>();
// JobSteps for job ExecutionContext
JobSteps = new Queue<IStep>();
JobSteps = new List<IStep>();
// PostJobSteps for job ExecutionContext
PostJobSteps = new Stack<IStep>();
// StepsWithPostRegistered for job ExecutionContext
StepsWithPostRegistered = new HashSet<Guid>();
// Job timeline record.
InitializeTimelineRecord(
timelineId: message.Timeline.Id,
@@ -736,7 +808,7 @@ namespace GitHub.Runner.Worker
var owners = config.Matchers.Select(x => $"'{x.Owner}'");
var joinedOwners = string.Join(", ", owners);
// todo: loc
this.Output($"Added matchers: {joinedOwners}. Problem matchers scan action output for known warning or error strings and report these inline.");
this.Debug($"Added matchers: {joinedOwners}. Problem matchers scan action output for known warning or error strings and report these inline.");
}
}
@@ -778,7 +850,7 @@ namespace GitHub.Runner.Worker
owners = removedMatchers.Select(x => $"'{x.Owner}'");
var joinedOwners = string.Join(", ", owners);
// todo: loc
this.Output($"Removed matchers: {joinedOwners}");
this.Debug($"Removed matchers: {joinedOwners}");
}
}
@@ -817,7 +889,8 @@ namespace GitHub.Runner.Worker
{
Interlocked.Add(ref _totalThrottlingDelayInMilliseconds, Convert.ToInt64(data.Delay.TotalMilliseconds));
if (!_throttlingReported)
if (!_throttlingReported &&
_totalThrottlingDelayInMilliseconds > _throttlingDelayReportThreshold)
{
this.Warning(string.Format("The job is currently being throttled by the server. You may experience delays in console line output, job status reporting, and action log uploads."));
@@ -825,7 +898,7 @@ namespace GitHub.Runner.Worker
}
}
private IExecutionContext CreatePostChild(string displayName, string refName, Dictionary<string, string> intraActionState)
private IExecutionContext CreatePostChild(string displayName, Dictionary<string, string> intraActionState)
{
if (!_expandedForPostJob)
{
@@ -834,7 +907,8 @@ namespace GitHub.Runner.Worker
_childTimelineRecordOrder = _childTimelineRecordOrder * 2;
}
return CreateChild(Guid.NewGuid(), displayName, refName, null, null, intraActionState, _childTimelineRecordOrder - Root.PostJobSteps.Count);
var newGuid = Guid.NewGuid();
return CreateChild(newGuid, displayName, newGuid.ToString("N"), null, null, intraActionState, _childTimelineRecordOrder - Root.PostJobSteps.Count);
}
}
@@ -893,11 +967,19 @@ namespace GitHub.Runner.Worker
}
}
public static PipelineTemplateEvaluator ToPipelineTemplateEvaluator(this IExecutionContext context)
public static IEnumerable<KeyValuePair<string, object>> ToExpressionState(this IExecutionContext context)
{
var templateTrace = context.ToTemplateTraceWriter();
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
return new PipelineTemplateEvaluator(templateTrace, schema, context.FileTable);
return new[] { new KeyValuePair<string, object>(nameof(IExecutionContext), context) };
}
public static PipelineTemplateEvaluator ToPipelineTemplateEvaluator(this IExecutionContext context, ObjectTemplating.ITraceWriter traceWriter = null)
{
if (traceWriter == null)
{
traceWriter = context.ToTemplateTraceWriter();
}
var schema = PipelineTemplateSchemaFactory.GetSchema();
return new PipelineTemplateEvaluator(traceWriter, schema, context.FileTable);
}
public static ObjectTemplating.ITraceWriter ToTemplateTraceWriter(this IExecutionContext context)
@@ -912,6 +994,7 @@ namespace GitHub.Runner.Worker
internal TemplateTraceWriter(IExecutionContext executionContext)
{
ArgUtil.NotNull(executionContext, nameof(executionContext));
_executionContext = executionContext;
}

View File

@@ -1,162 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(ExpressionManager))]
public interface IExpressionManager : IRunnerService
{
ConditionResult Evaluate(IExecutionContext context, string condition, bool hostTracingOnly = false);
}
public sealed class ExpressionManager : RunnerService, IExpressionManager
{
public ConditionResult Evaluate(IExecutionContext executionContext, string condition, bool hostTracingOnly = false)
{
ArgUtil.NotNull(executionContext, nameof(executionContext));
ConditionResult result = new ConditionResult();
var expressionTrace = new TraceWriter(Trace, hostTracingOnly ? null : executionContext);
var tree = Parse(executionContext, expressionTrace, condition);
var expressionResult = tree.Evaluate(expressionTrace, HostContext.SecretMasker, state: executionContext, options: null);
result.Value = expressionResult.IsTruthy;
result.Trace = expressionTrace.Trace;
return result;
}
private static IExpressionNode Parse(IExecutionContext executionContext, TraceWriter expressionTrace, string condition)
{
ArgUtil.NotNull(executionContext, nameof(executionContext));
if (string.IsNullOrWhiteSpace(condition))
{
condition = $"{PipelineTemplateConstants.Success}()";
}
var parser = new ExpressionParser();
var namedValues = executionContext.ExpressionValues.Keys.Select(x => new NamedValueInfo<ContextValueNode>(x)).ToArray();
var functions = new IFunctionInfo[]
{
new FunctionInfo<AlwaysNode>(name: Constants.Expressions.Always, minParameters: 0, maxParameters: 0),
new FunctionInfo<CancelledNode>(name: Constants.Expressions.Cancelled, minParameters: 0, maxParameters: 0),
new FunctionInfo<FailureNode>(name: Constants.Expressions.Failure, minParameters: 0, maxParameters: 0),
new FunctionInfo<SuccessNode>(name: Constants.Expressions.Success, minParameters: 0, maxParameters: 0),
};
return parser.CreateTree(condition, expressionTrace, namedValues, functions) ?? new SuccessNode();
}
private sealed class TraceWriter : DistributedTask.Expressions2.ITraceWriter
{
private readonly IExecutionContext _executionContext;
private readonly Tracing _trace;
private readonly StringBuilder _traceBuilder = new StringBuilder();
public string Trace => _traceBuilder.ToString();
public TraceWriter(Tracing trace, IExecutionContext executionContext)
{
ArgUtil.NotNull(trace, nameof(trace));
_trace = trace;
_executionContext = executionContext;
}
public void Info(string message)
{
_trace.Info(message);
_executionContext?.Debug(message);
_traceBuilder.AppendLine(message);
}
public void Verbose(string message)
{
_trace.Verbose(message);
_executionContext?.Debug(message);
}
}
private sealed class AlwaysNode : Function
{
protected override Object EvaluateCore(EvaluationContext context, out ResultMemory resultMemory)
{
resultMemory = null;
return true;
}
}
private sealed class CancelledNode : Function
{
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var executionContext = evaluationContext.State as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Cancelled;
}
}
private sealed class FailureNode : Function
{
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var executionContext = evaluationContext.State as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Failure;
}
}
private sealed class SuccessNode : Function
{
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var executionContext = evaluationContext.State as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Success;
}
}
private sealed class ContextValueNode : NamedValue
{
protected override Object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var jobContext = evaluationContext.State as IExecutionContext;
ArgUtil.NotNull(jobContext, nameof(jobContext));
return jobContext.ExpressionValues[Name];
}
}
}
public class ConditionResult
{
public ConditionResult(bool value = false, string trace = null)
{
this.Value = value;
this.Trace = trace;
}
public bool Value { get; set; }
public string Trace { get; set; }
public static implicit operator ConditionResult(bool value)
{
return new ConditionResult(value);
}
}
}

View File

@@ -0,0 +1,25 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
namespace GitHub.Runner.Worker.Expressions
{
public sealed class AlwaysFunction : Function
{
protected override Object EvaluateCore(EvaluationContext context, out ResultMemory resultMemory)
{
resultMemory = null;
return true;
}
}
}

View File

@@ -0,0 +1,31 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
namespace GitHub.Runner.Worker.Expressions
{
public sealed class CancelledFunction : Function
{
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var templateContext = evaluationContext.State as TemplateContext;
ArgUtil.NotNull(templateContext, nameof(templateContext));
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Cancelled;
}
}
}

View File

@@ -0,0 +1,31 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
namespace GitHub.Runner.Worker.Expressions
{
public sealed class FailureFunction : Function
{
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var templateContext = evaluationContext.State as TemplateContext;
ArgUtil.NotNull(templateContext, nameof(templateContext));
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Failure;
}
}
}

View File

@@ -8,29 +8,12 @@ using System.Reflection;
using System.Threading;
using System.Collections.Generic;
namespace GitHub.Runner.Worker.Handlers
namespace GitHub.Runner.Worker.Expressions
{
public class FunctionTrace : ITraceWriter
public sealed class HashFilesFunction : Function
{
private GitHub.DistributedTask.Expressions2.ITraceWriter _trace;
private const int _hashFileTimeoutSeconds = 120;
public FunctionTrace(GitHub.DistributedTask.Expressions2.ITraceWriter trace)
{
_trace = trace;
}
public void Info(string message)
{
_trace.Info(message);
}
public void Verbose(string message)
{
_trace.Info(message);
}
}
public sealed class HashFiles : Function
{
protected sealed override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
@@ -82,7 +65,7 @@ namespace GitHub.Runner.Worker.Handlers
string node = Path.Combine(runnerRoot, "externals", "node12", "bin", $"node{IOUtil.ExeExtension}");
string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty;
var p = new ProcessInvoker(new FunctionTrace(context.Trace));
var p = new ProcessInvoker(new HashFilesTrace(context.Trace));
p.ErrorDataReceived += ((_, data) =>
{
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
@@ -108,19 +91,48 @@ namespace GitHub.Runner.Worker.Handlers
}
env["patterns"] = string.Join(Environment.NewLine, patterns);
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
cancellationToken: new CancellationTokenSource(TimeSpan.FromSeconds(120)).Token).GetAwaiter().GetResult();
if (exitCode != 0)
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(_hashFileTimeoutSeconds)))
{
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
try
{
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
cancellationToken: tokenSource.Token).GetAwaiter().GetResult();
if (exitCode != 0)
{
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
}
}
catch (OperationCanceledException) when (tokenSource.IsCancellationRequested)
{
throw new TimeoutException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') couldn't finish within {_hashFileTimeoutSeconds} seconds.");
}
return hashResult;
}
}
private sealed class HashFilesTrace : ITraceWriter
{
private GitHub.DistributedTask.Expressions2.ITraceWriter _trace;
public HashFilesTrace(GitHub.DistributedTask.Expressions2.ITraceWriter trace)
{
_trace = trace;
}
public void Info(string message)
{
_trace.Info(message);
}
return hashResult;
public void Verbose(string message)
{
_trace.Info(message);
}
}
}
}

View File

@@ -0,0 +1,31 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
namespace GitHub.Runner.Worker.Expressions
{
public sealed class SuccessFunction : Function
{
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var templateContext = evaluationContext.State as TemplateContext;
ArgUtil.NotNull(templateContext, nameof(templateContext));
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Success;
}
}
}

View File

@@ -10,14 +10,19 @@ namespace GitHub.Runner.Worker
{
"action",
"actor",
"api_url",
"base_ref",
"event_name",
"event_path",
"graphql_url",
"head_ref",
"job",
"ref",
"repository",
"repository_owner",
"run_id",
"run_number",
"server_url",
"sha",
"workflow",
"workspace",

View File

@@ -0,0 +1,96 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Handlers
{
[ServiceLocator(Default = typeof(CompositeActionHandler))]
public interface ICompositeActionHandler : IHandler
{
CompositeActionExecutionData Data { get; set; }
}
public sealed class CompositeActionHandler : Handler, ICompositeActionHandler
{
public CompositeActionExecutionData Data { get; set; }
public Task RunAsync(ActionRunStage stage)
{
// Validate args.
Trace.Entering();
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
ArgUtil.NotNull(Inputs, nameof(Inputs));
var githubContext = ExecutionContext.ExpressionValues["github"] as GitHubContext;
ArgUtil.NotNull(githubContext, nameof(githubContext));
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
// Resolve action steps
var actionSteps = Data.Steps;
// Create Context Data to reuse for each composite action step
var inputsData = new DictionaryContextData();
foreach (var i in Inputs)
{
inputsData[i.Key] = new StringContextData(i.Value);
}
// Add each composite action step to the front of the queue
int location = 0;
foreach (Pipelines.ActionStep aStep in actionSteps)
{
// Ex:
// runs:
// using: "composite"
// steps:
// - uses: example/test-composite@v2 (a)
// - run echo hello world (b)
// - run echo hello world 2 (c)
//
// ethanchewy/test-composite/action.yaml
// runs:
// using: "composite"
// steps:
// - run echo hello world 3 (d)
// - run echo hello world 4 (e)
//
// Steps processed as follow:
// | a |
// | a | => | d |
// (Run step d)
// | a |
// | a | => | e |
// (Run step e)
// | a |
// (Run step a)
// | b |
// (Run step b)
// | c |
// (Run step c)
// Done.
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = aStep;
actionRunner.Stage = stage;
actionRunner.Condition = aStep.Condition;
actionRunner.DisplayName = aStep.DisplayName;
ExecutionContext.RegisterNestedStep(actionRunner, inputsData, location, Environment);
location++;
}
return Task.CompletedTask;
}
}
}

View File

@@ -52,7 +52,12 @@ namespace GitHub.Runner.Worker.Handlers
ExecutionContext.Output($"Dockerfile for action: '{dockerFile}'.");
var imageName = $"{dockerManger.DockerInstanceLabel}:{ExecutionContext.Id.ToString("N")}";
var buildExitCode = await dockerManger.DockerBuild(ExecutionContext, ExecutionContext.GetGitHubContext("workspace"), Directory.GetParent(dockerFile).FullName, imageName);
var buildExitCode = await dockerManger.DockerBuild(
ExecutionContext,
ExecutionContext.GetGitHubContext("workspace"),
dockerFile,
Directory.GetParent(dockerFile).FullName,
imageName);
if (buildExitCode != 0)
{
throw new InvalidOperationException($"Docker build failed with exit code {buildExitCode}");
@@ -82,9 +87,13 @@ namespace GitHub.Runner.Worker.Handlers
container.ContainerEntryPoint = Inputs.GetValueOrDefault("entryPoint");
}
}
else if (stage == ActionRunStage.Pre)
{
container.ContainerEntryPoint = Data.Pre;
}
else if (stage == ActionRunStage.Post)
{
container.ContainerEntryPoint = Data.Cleanup;
container.ContainerEntryPoint = Data.Post;
}
// create inputs context for template evaluation
@@ -97,14 +106,14 @@ namespace GitHub.Runner.Worker.Handlers
}
}
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
evaluateContext["inputs"] = inputsContext;
var extraExpressionValues = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
extraExpressionValues["inputs"] = inputsContext;
var manifestManager = HostContext.GetService<IActionManifestManager>();
if (Data.Arguments != null)
{
container.ContainerEntryPointArgs = "";
var evaluatedArgs = manifestManager.EvaluateContainerArguments(ExecutionContext, Data.Arguments, evaluateContext);
var evaluatedArgs = manifestManager.EvaluateContainerArguments(ExecutionContext, Data.Arguments, extraExpressionValues);
foreach (var arg in evaluatedArgs)
{
if (!string.IsNullOrEmpty(arg))
@@ -124,7 +133,7 @@ namespace GitHub.Runner.Worker.Handlers
if (Data.Environment != null)
{
var evaluatedEnv = manifestManager.EvaluateContainerEnvironment(ExecutionContext, Data.Environment, evaluateContext);
var evaluatedEnv = manifestManager.EvaluateContainerEnvironment(ExecutionContext, Data.Environment, extraExpressionValues);
foreach (var env in evaluatedEnv)
{
if (!this.Environment.ContainsKey(env.Key))

View File

@@ -66,6 +66,11 @@ namespace GitHub.Runner.Worker.Handlers
handler = HostContext.CreateService<IRunnerPluginHandler>();
(handler as IRunnerPluginHandler).Data = data as PluginActionExecutionData;
}
else if (data.ExecutionType == ActionExecutionType.Composite)
{
handler = HostContext.CreateService<ICompositeActionHandler>();
(handler as ICompositeActionHandler).Data = data as CompositeActionExecutionData;
}
else
{
// This should never happen.

View File

@@ -60,9 +60,13 @@ namespace GitHub.Runner.Worker.Handlers
{
target = Data.Script;
}
else if (stage == ActionRunStage.Pre)
{
target = Data.Pre;
}
else if (stage == ActionRunStage.Post)
{
target = Data.Cleanup;
target = Data.Post;
}
ArgUtil.NotNullOrEmpty(target, nameof(target));

View File

@@ -352,15 +352,24 @@ namespace GitHub.Runner.Worker.Handlers
if (File.Exists(gitConfigPath))
{
// Check if the config contains the workflow repository url
var qualifiedRepository = _executionContext.GetGitHubContext("repository");
var configMatch = $"url = https://github.com/{qualifiedRepository}";
var serverUrl = _executionContext.GetGitHubContext("server_url");
serverUrl = !string.IsNullOrEmpty(serverUrl) ? serverUrl : "https://github.com";
var host = new Uri(serverUrl, UriKind.Absolute).Host;
var nameWithOwner = _executionContext.GetGitHubContext("repository");
var patterns = new[] {
$"url = {serverUrl}/{nameWithOwner}",
$"url = git@{host}:{nameWithOwner}.git",
};
var content = File.ReadAllText(gitConfigPath);
foreach (var line in content.Split("\n").Select(x => x.Trim()))
{
if (String.Equals(line, configMatch, StringComparison.OrdinalIgnoreCase))
foreach (var pattern in patterns)
{
repositoryPath = directoryPath;
break;
if (String.Equals(line, pattern, StringComparison.OrdinalIgnoreCase))
{
repositoryPath = directoryPath;
break;
}
}
}
}

View File

@@ -31,7 +31,7 @@ namespace GitHub.Runner.Worker.Handlers
}
else if (stage == ActionRunStage.Post)
{
plugin = Data.Cleanup;
plugin = Data.Post;
}
ArgUtil.NotNullOrEmpty(plugin, nameof(plugin));

View File

@@ -58,12 +58,21 @@ namespace GitHub.Runner.Worker.Handlers
string shellCommandPath = null;
bool validateShellOnHost = !(StepHost is ContainerStepHost);
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>());
Inputs.TryGetValue("shell", out var shell);
string shell = null;
if (!Inputs.TryGetValue("shell", out shell) || string.IsNullOrEmpty(shell))
{
// TODO: figure out how defaults interact with template later
// for now, we won't check job.defaults if we are inside a template.
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
{
runDefaults.TryGetValue("shell", out shell);
}
}
if (string.IsNullOrEmpty(shell))
{
#if OS_WINDOWS
shellCommand = "pwsh";
if(validateShellOnHost)
if (validateShellOnHost)
{
shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
if (string.IsNullOrEmpty(shellCommandPath))
@@ -139,11 +148,36 @@ namespace GitHub.Runner.Worker.Handlers
Inputs.TryGetValue("script", out var contents);
contents = contents ?? string.Empty;
Inputs.TryGetValue("workingDirectory", out var workingDirectory);
string workingDirectory = null;
if (!Inputs.TryGetValue("workingDirectory", out workingDirectory))
{
// TODO: figure out how defaults interact with template later
// for now, we won't check job.defaults if we are inside a template.
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
{
if (runDefaults.TryGetValue("working-directory", out workingDirectory))
{
ExecutionContext.Debug("Overwrite 'working-directory' base on job defaults.");
}
}
}
var workspaceDir = githubContext["workspace"] as StringContextData;
workingDirectory = Path.Combine(workspaceDir, workingDirectory ?? string.Empty);
Inputs.TryGetValue("shell", out var shell);
string shell = null;
if (!Inputs.TryGetValue("shell", out shell) || string.IsNullOrEmpty(shell))
{
// TODO: figure out how defaults interact with template later
// for now, we won't check job.defaults if we are inside a template.
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
{
if (runDefaults.TryGetValue("shell", out shell))
{
ExecutionContext.Debug("Overwrite 'shell' base on job defaults.");
}
}
}
var isContainerStepHost = StepHost is ContainerStepHost;
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>());
@@ -225,6 +259,16 @@ namespace GitHub.Runner.Worker.Handlers
// dump out the command
var fileName = isContainerStepHost ? shellCommand : commandPath;
#if OS_OSX
if (Environment.ContainsKey("DYLD_INSERT_LIBRARIES")) // We don't check `isContainerStepHost` because we don't support container on macOS
{
// launch `node macOSRunInvoker.js shell args` instead of `shell args` to avoid macOS SIP remove `DYLD_INSERT_LIBRARIES` when launch process
string node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
string macOSRunInvoker = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "macos-run-invoker.js");
arguments = $"\"{macOSRunInvoker.Replace("\"", "\\\"")}\" \"{fileName.Replace("\"", "\\\"")}\" {arguments}";
fileName = node12;
}
#endif
ExecutionContext.Debug($"{fileName} {arguments}");
using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager))

View File

@@ -110,9 +110,9 @@ namespace GitHub.Runner.Worker.Handlers
// try to resolve path inside container if the request path is part of the mount volume
#if OS_WINDOWS
if (Container.MountVolumes.Exists(x => path.StartsWith(x.SourceVolumePath, StringComparison.OrdinalIgnoreCase)))
if (Container.MountVolumes.Exists(x => !string.IsNullOrEmpty(x.SourceVolumePath) && path.StartsWith(x.SourceVolumePath, StringComparison.OrdinalIgnoreCase)))
#else
if (Container.MountVolumes.Exists(x => path.StartsWith(x.SourceVolumePath)))
if (Container.MountVolumes.Exists(x => !string.IsNullOrEmpty(x.SourceVolumePath) && path.StartsWith(x.SourceVolumePath)))
#endif
{
return Container.TranslateToContainerPath(path);
@@ -149,14 +149,14 @@ namespace GitHub.Runner.Worker.Handlers
throw new NotSupportedException(msg);
}
nodeExternal = "node12_alpine";
executionContext.Output($"Container distribution is alpine. Running JavaScript Action with external tool: {nodeExternal}");
executionContext.Debug($"Container distribution is alpine. Running JavaScript Action with external tool: {nodeExternal}");
return nodeExternal;
}
}
}
// Optimistically use the default
nodeExternal = "node12";
executionContext.Output($"Running JavaScript Action with default external tool: {nodeExternal}");
executionContext.Debug($"Running JavaScript Action with default external tool: {nodeExternal}");
return nodeExternal;
}

View File

@@ -21,7 +21,7 @@ namespace GitHub.Runner.Worker
}
set
{
this["status"] = new StringContextData(value.ToString());
this["status"] = new StringContextData(value.ToString().ToLowerInvariant());
}
}

View File

@@ -1,10 +1,14 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.Serialization;
using System.Threading.Tasks;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
@@ -14,6 +18,16 @@ using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
[DataContract]
public class SetupInfo
{
[DataMember]
public string Group { get; set; }
[DataMember]
public string Detail { get; set; }
}
[ServiceLocator(Default = typeof(JobExtension))]
public interface IJobExtension : IRunnerService
@@ -49,6 +63,58 @@ namespace GitHub.Runner.Worker
context.Start();
context.Debug($"Starting: Set up job");
context.Output($"Current runner version: '{BuildConstants.RunnerPackage.Version}'");
var setting = HostContext.GetService<IConfigurationStore>().GetSettings();
var credFile = HostContext.GetConfigFile(WellKnownConfigFile.Credentials);
if (File.Exists(credFile))
{
var credData = IOUtil.LoadObject<CredentialData>(credFile);
if (credData != null &&
credData.Data.TryGetValue("clientId", out var clientId))
{
// print out HostName for self-hosted runner
context.Output($"Runner name: '{setting.AgentName}'");
context.Output($"Machine name: '{Environment.MachineName}'");
}
}
var setupInfoFile = HostContext.GetConfigFile(WellKnownConfigFile.SetupInfo);
if (File.Exists(setupInfoFile))
{
Trace.Info($"Load machine setup info from {setupInfoFile}");
try
{
var setupInfo = IOUtil.LoadObject<List<SetupInfo>>(setupInfoFile);
if (setupInfo?.Count > 0)
{
foreach (var info in setupInfo)
{
if (!string.IsNullOrEmpty(info?.Detail))
{
var groupName = info.Group;
if (string.IsNullOrEmpty(groupName))
{
groupName = "Machine Setup Info";
}
context.Output($"##[group]{groupName}");
var multiLines = info.Detail.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)
{
context.Output(line);
}
context.Output("##[endgroup]");
}
}
}
}
catch (Exception ex)
{
context.Output($"Fail to load and print machine setup info: {ex.Message}");
Trace.Error(ex);
}
}
var repoFullName = context.GetGitHubContext("repository");
ArgUtil.NotNull(repoFullName, nameof(repoFullName));
context.Debug($"Primary repository: {repoFullName}");
@@ -76,12 +142,24 @@ namespace GitHub.Runner.Worker
context.SetRunnerContext("workspace", Path.Combine(_workDirectory, trackingConfig.PipelineDirectory));
context.SetGitHubContext("workspace", Path.Combine(_workDirectory, trackingConfig.WorkspaceDirectory));
// Temporary hack for GHES alpha
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
if (string.IsNullOrEmpty(context.GetGitHubContext("server_url")) && !runnerSettings.IsHostedServer && !string.IsNullOrEmpty(runnerSettings.GitHubUrl))
{
var url = new Uri(runnerSettings.GitHubUrl);
var portInfo = url.IsDefaultPort ? string.Empty : $":{url.Port.ToString(CultureInfo.InvariantCulture)}";
context.SetGitHubContext("server_url", $"{url.Scheme}://{url.Host}{portInfo}");
context.SetGitHubContext("api_url", $"{url.Scheme}://{url.Host}{portInfo}/api/v3");
context.SetGitHubContext("graphql_url", $"{url.Scheme}://{url.Host}{portInfo}/api/graphql");
}
// Evaluate the job-level environment variables
context.Debug("Evaluating job-level environment variables");
var templateEvaluator = context.ToPipelineTemplateEvaluator();
foreach (var token in message.EnvironmentVariables)
{
var environmentVariables = templateEvaluator.EvaluateStepEnvironment(token, jobContext.ExpressionValues, VarUtil.EnvironmentVariableKeyComparer);
var environmentVariables = templateEvaluator.EvaluateStepEnvironment(token, jobContext.ExpressionValues, jobContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
foreach (var pair in environmentVariables)
{
context.EnvironmentVariables[pair.Key] = pair.Value ?? string.Empty;
@@ -91,7 +169,7 @@ namespace GitHub.Runner.Worker
// Evaluate the job container
context.Debug("Evaluating job container");
var container = templateEvaluator.EvaluateJobContainer(message.JobContainer, jobContext.ExpressionValues);
var container = templateEvaluator.EvaluateJobContainer(message.JobContainer, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
if (container != null)
{
jobContext.Container = new Container.ContainerInfo(HostContext, container);
@@ -99,7 +177,7 @@ namespace GitHub.Runner.Worker
// Evaluate the job service containers
context.Debug("Evaluating job service containers");
var serviceContainers = templateEvaluator.EvaluateJobServiceContainers(message.JobServiceContainers, jobContext.ExpressionValues);
var serviceContainers = templateEvaluator.EvaluateJobServiceContainers(message.JobServiceContainers, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
if (serviceContainers?.Count > 0)
{
foreach (var pair in serviceContainers)
@@ -110,12 +188,32 @@ namespace GitHub.Runner.Worker
}
}
// Evaluate the job defaults
context.Debug("Evaluating job defaults");
foreach (var token in message.Defaults)
{
var defaults = token.AssertMapping("defaults");
if (defaults.Any(x => string.Equals(x.Key.AssertString("defaults key").Value, "run", StringComparison.OrdinalIgnoreCase)))
{
context.JobDefaults["run"] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
var defaultsRun = defaults.First(x => string.Equals(x.Key.AssertString("defaults key").Value, "run", StringComparison.OrdinalIgnoreCase));
var jobDefaults = templateEvaluator.EvaluateJobDefaultsRun(defaultsRun.Value, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
foreach (var pair in jobDefaults)
{
if (!string.IsNullOrEmpty(pair.Value))
{
context.JobDefaults["run"][pair.Key] = pair.Value;
}
}
}
}
// Build up 2 lists of steps, pre-job, job
// Download actions not already in the cache
Trace.Info("Downloading actions");
var actionManager = HostContext.GetService<IActionManager>();
var prepareSteps = await actionManager.PrepareActionsAsync(context, message.Steps);
preJobSteps.AddRange(prepareSteps);
var prepareResult = await actionManager.PrepareActionsAsync(context, message.Steps);
preJobSteps.AddRange(prepareResult.ContainerSetupSteps);
// Add start-container steps, record and stop-container steps
if (jobContext.Container != null || jobContext.ServiceContainers.Count > 0)
@@ -156,9 +254,23 @@ namespace GitHub.Runner.Worker
actionRunner.TryEvaluateDisplayName(contextData, context);
jobSteps.Add(actionRunner);
if (prepareResult.PreStepTracker.TryGetValue(step.Id, out var preStep))
{
Trace.Info($"Adding pre-{action.DisplayName}.");
preStep.TryEvaluateDisplayName(contextData, context);
preStep.DisplayName = $"Pre {preStep.DisplayName}";
preJobSteps.Add(preStep);
}
}
}
var intraActionStates = new Dictionary<Guid, Dictionary<string, string>>();
foreach (var preStep in prepareResult.PreStepTracker)
{
intraActionStates[preStep.Key] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
}
// Create execution context for pre-job steps
foreach (var step in preJobSteps)
{
@@ -169,6 +281,12 @@ namespace GitHub.Runner.Worker
Guid stepId = Guid.NewGuid();
extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, null, null, stepId.ToString("N"));
}
else if (step is IActionRunner actionStep)
{
ArgUtil.NotNull(actionStep, step.DisplayName);
Guid stepId = Guid.NewGuid();
actionStep.ExecutionContext = jobContext.CreateChild(stepId, actionStep.DisplayName, stepId.ToString("N"), null, null, intraActionStates[actionStep.Action.Id]);
}
}
// Create execution context for job steps
@@ -177,7 +295,8 @@ namespace GitHub.Runner.Worker
if (step is IActionRunner actionStep)
{
ArgUtil.NotNull(actionStep, step.DisplayName);
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, actionStep.Action.ScopeName, actionStep.Action.ContextName);
intraActionStates.TryGetValue(actionStep.Action.Id, out var intraActionState);
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, actionStep.Action.ScopeName, actionStep.Action.ContextName, intraActionState);
}
}
@@ -242,6 +361,58 @@ namespace GitHub.Runner.Worker
context.Start();
context.Debug("Starting: Complete job");
// Evaluate job outputs
if (message.JobOutputs != null && message.JobOutputs.Type != TokenType.Null)
{
try
{
context.Output($"Evaluate and set job outputs");
// Populate env context for each step
Trace.Info("Initialize Env context for evaluating job outputs");
#if OS_WINDOWS
var envContext = new DictionaryContextData();
#else
var envContext = new CaseSensitiveDictionaryContextData();
#endif
context.ExpressionValues["env"] = envContext;
foreach (var pair in context.EnvironmentVariables)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
Trace.Info("Initialize steps context for evaluating job outputs");
context.ExpressionValues["steps"] = context.StepsContext.GetScope(context.ScopeName);
var templateEvaluator = context.ToPipelineTemplateEvaluator();
var outputs = templateEvaluator.EvaluateJobOutput(message.JobOutputs, context.ExpressionValues, context.ExpressionFunctions);
foreach (var output in outputs)
{
if (string.IsNullOrEmpty(output.Value))
{
context.Debug($"Skip output '{output.Key}' since it's empty");
continue;
}
if (!string.Equals(output.Value, HostContext.SecretMasker.MaskSecrets(output.Value)))
{
context.Warning($"Skip output '{output.Key}' since it may contain secret.");
continue;
}
context.Output($"Set output '{output.Key}'");
jobContext.JobOutputs[output.Key] = output.Value;
}
}
catch (Exception ex)
{
context.Result = TaskResult.Failed;
context.Error($"Fail to evaluate job outputs");
context.Error(ex);
jobContext.Result = TaskResultUtil.MergeTaskResults(jobContext.Result, TaskResult.Failed);
}
}
if (context.Variables.GetBoolean(Constants.Variables.Actions.RunnerDebug) ?? false)
{
Trace.Info("Support log upload starting.");

View File

@@ -5,21 +5,13 @@ using GitHub.Services.Common;
using GitHub.Services.WebApi;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Net.Http;
using System.Text;
using System.IO.Compression;
using System.Diagnostics;
using Newtonsoft.Json.Linq;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.ObjectTemplating;
namespace GitHub.Runner.Worker
{
@@ -122,13 +114,6 @@ namespace GitHub.Runner.Worker
_tempDirectoryManager = HostContext.GetService<ITempDirectoryManager>();
_tempDirectoryManager.InitializeTempDirectory(jobContext);
// // Expand container properties
// jobContext.Container?.ExpandProperties(jobContext.Variables);
// foreach (var sidecar in jobContext.SidecarContainers)
// {
// sidecar.ExpandProperties(jobContext.Variables);
// }
// Get the job extension.
Trace.Info("Getting job extension.");
IJobExtension jobExtension = HostContext.CreateService<IJobExtension>();
@@ -167,7 +152,7 @@ namespace GitHub.Runner.Worker
{
foreach (var step in jobSteps)
{
jobContext.JobSteps.Enqueue(step);
jobContext.JobSteps.Add(step);
}
await stepsRunner.RunAsync(jobContext);
@@ -231,7 +216,7 @@ namespace GitHub.Runner.Worker
}
Trace.Info("Raising job completed event.");
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result);
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result, jobContext.JobOutputs);
var completeJobRetryLimit = 5;
var exceptions = new List<Exception>();
@@ -254,6 +239,12 @@ namespace GitHub.Runner.Worker
Trace.Error(ex);
return TaskResult.Failed;
}
catch (TaskOrchestrationPlanTerminatedException ex)
{
Trace.Error($"TaskOrchestrationPlanTerminatedException received, while attempting to raise JobCompletedEvent for job {message.JobId}.");
Trace.Error(ex);
return TaskResult.Failed;
}
catch (Exception ex)
{
Trace.Error($"Catch exception while attempting to raise JobCompletedEvent for job {message.JobId}, job request {message.RequestId}.");

View File

@@ -56,13 +56,22 @@ namespace GitHub.Runner.Worker
}
}
public void SetResult(
public void SetConclusion(
string scopeName,
string stepName,
string result)
ActionResult conclusion)
{
var step = GetStep(scopeName, stepName);
step["result"] = new StringContextData(result);
step["conclusion"] = new StringContextData(conclusion.ToString().ToLowerInvariant());
}
public void SetOutcome(
string scopeName,
string stepName,
ActionResult outcome)
{
var step = GetStep(scopeName, stepName);
step["outcome"] = new StringContextData(outcome.ToString().ToLowerInvariant());
}
private DictionaryContextData GetStep(string scopeName, string stepName)

View File

@@ -1,8 +1,6 @@
using GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.Runner.Common.Util;
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Expressions2;
@@ -10,8 +8,13 @@ using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Expressions;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
@@ -56,18 +59,15 @@ namespace GitHub.Runner.Worker
checkPostJobActions = true;
while (jobContext.PostJobSteps.TryPop(out var postStep))
{
jobContext.JobSteps.Enqueue(postStep);
jobContext.JobSteps.Add(postStep);
}
continue;
}
var step = jobContext.JobSteps.Dequeue();
IStep nextStep = null;
if (jobContext.JobSteps.Count > 0)
{
nextStep = jobContext.JobSteps.Peek();
}
var step = jobContext.JobSteps[0];
jobContext.JobSteps.RemoveAt(0);
var nextStep = jobContext.JobSteps.Count > 0 ? jobContext.JobSteps[0] : null;
Trace.Info($"Processing step: DisplayName='{step.DisplayName}'");
ArgUtil.NotNull(step.ExecutionContext, nameof(step.ExecutionContext));
@@ -76,6 +76,13 @@ namespace GitHub.Runner.Worker
// Start
step.ExecutionContext.Start();
// Expression functions
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<AlwaysFunction>(PipelineTemplateConstants.Always, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<CancelledFunction>(PipelineTemplateConstants.Cancelled, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<FailureFunction>(PipelineTemplateConstants.Failure, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>(PipelineTemplateConstants.HashFiles, 1, byte.MaxValue));
// Initialize scope
if (InitializeScope(step, scopeInputs))
{
@@ -86,129 +93,161 @@ namespace GitHub.Runner.Worker
#else
var envContext = new CaseSensitiveDictionaryContextData();
#endif
step.ExecutionContext.ExpressionValues["env"] = envContext;
// Global env
foreach (var pair in step.ExecutionContext.EnvironmentVariables)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
// Stomps over with outside step env
if (step.ExecutionContext.ExpressionValues.TryGetValue("env", out var envContextData))
{
#if OS_WINDOWS
var dict = envContextData as DictionaryContextData;
#else
var dict = envContextData as CaseSensitiveDictionaryContextData;
#endif
foreach (var pair in dict)
{
envContext[pair.Key] = pair.Value;
}
}
step.ExecutionContext.ExpressionValues["env"] = envContext;
bool evaluateStepEnvFailed = false;
if (step is IActionRunner actionStep)
{
// Set GITHUB_ACTION
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
// Evaluate and merge action's env block to env context
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, VarUtil.EnvironmentVariableKeyComparer);
foreach (var env in actionEnvironment)
try
{
envContext[env.Key] = new StringContextData(env.Value ?? string.Empty);
}
}
var expressionManager = HostContext.GetService<IExpressionManager>();
try
{
// Register job cancellation call back only if job cancellation token not been fire before each step run
if (!jobContext.CancellationToken.IsCancellationRequested)
{
// Test the condition again. The job was canceled after the condition was originally evaluated.
jobCancelRegister = jobContext.CancellationToken.Register(() =>
// Evaluate and merge action's env block to env context
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
foreach (var env in actionEnvironment)
{
// mark job as cancelled
jobContext.Result = TaskResult.Canceled;
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
ConditionResult conditionReTestResult;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
step.ExecutionContext.Debug($"Skip Re-evaluate condition on runner shutdown.");
conditionReTestResult = false;
}
else
{
try
{
conditionReTestResult = expressionManager.Evaluate(step.ExecutionContext, step.Condition, hostTracingOnly: true);
}
catch (Exception ex)
{
// Cancel the step since we get exception while re-evaluate step condition.
Trace.Info("Caught exception from expression when re-test condition on job cancellation.");
step.ExecutionContext.Error(ex);
conditionReTestResult = false;
}
}
if (!conditionReTestResult.Value)
{
// Cancel the step.
Trace.Info("Cancel current running step.");
step.ExecutionContext.CancelToken();
}
});
}
else
{
if (jobContext.Result != TaskResult.Canceled)
{
// mark job as cancelled
jobContext.Result = TaskResult.Canceled;
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
envContext[env.Key] = new StringContextData(env.Value ?? string.Empty);
}
}
// Evaluate condition.
step.ExecutionContext.Debug($"Evaluating condition for step: '{step.DisplayName}'");
Exception conditionEvaluateError = null;
ConditionResult conditionResult;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
step.ExecutionContext.Debug($"Skip evaluate condition on runner shutdown.");
conditionResult = false;
}
else
{
try
{
conditionResult = expressionManager.Evaluate(step.ExecutionContext, step.Condition);
}
catch (Exception ex)
{
Trace.Info("Caught exception from expression.");
Trace.Error(ex);
conditionResult = false;
conditionEvaluateError = ex;
}
}
// no evaluate error but condition is false
if (!conditionResult.Value && conditionEvaluateError == null)
{
// Condition == false
Trace.Info("Skipping step due to condition evaluation.");
CompleteStep(step, nextStep, TaskResult.Skipped, resultCode: conditionResult.Trace);
}
else if (conditionEvaluateError != null)
catch (Exception ex)
{
// fail the step since there is an evaluate error.
step.ExecutionContext.Error(conditionEvaluateError);
Trace.Info("Caught exception from expression for step.env");
evaluateStepEnvFailed = true;
step.ExecutionContext.Error(ex);
CompleteStep(step, nextStep, TaskResult.Failed);
}
else
{
// Run the step.
await RunStepAsync(step, jobContext.CancellationToken);
CompleteStep(step, nextStep);
}
}
finally
if (!evaluateStepEnvFailed)
{
if (jobCancelRegister != null)
try
{
jobCancelRegister?.Dispose();
jobCancelRegister = null;
// Register job cancellation call back only if job cancellation token not been fire before each step run
if (!jobContext.CancellationToken.IsCancellationRequested)
{
// Test the condition again. The job was canceled after the condition was originally evaluated.
jobCancelRegister = jobContext.CancellationToken.Register(() =>
{
// mark job as cancelled
jobContext.Result = TaskResult.Canceled;
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only
var conditionReTestResult = false;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
step.ExecutionContext.Debug($"Skip Re-evaluate condition on runner shutdown.");
}
else
{
try
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionReTestTraceWriter);
var condition = new BasicExpressionToken(null, null, null, step.Condition);
conditionReTestResult = templateEvaluator.EvaluateStepIf(condition, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, step.ExecutionContext.ToExpressionState());
}
catch (Exception ex)
{
// Cancel the step since we get exception while re-evaluate step condition.
Trace.Info("Caught exception from expression when re-test condition on job cancellation.");
step.ExecutionContext.Error(ex);
}
}
if (!conditionReTestResult)
{
// Cancel the step.
Trace.Info("Cancel current running step.");
step.ExecutionContext.CancelToken();
}
});
}
else
{
if (jobContext.Result != TaskResult.Canceled)
{
// mark job as cancelled
jobContext.Result = TaskResult.Canceled;
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
}
}
// Evaluate condition.
step.ExecutionContext.Debug($"Evaluating condition for step: '{step.DisplayName}'");
var conditionTraceWriter = new ConditionTraceWriter(Trace, step.ExecutionContext);
var conditionResult = false;
var conditionEvaluateError = default(Exception);
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
step.ExecutionContext.Debug($"Skip evaluate condition on runner shutdown.");
}
else
{
try
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionTraceWriter);
var condition = new BasicExpressionToken(null, null, null, step.Condition);
conditionResult = templateEvaluator.EvaluateStepIf(condition, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, step.ExecutionContext.ToExpressionState());
}
catch (Exception ex)
{
Trace.Info("Caught exception from expression.");
Trace.Error(ex);
conditionEvaluateError = ex;
}
}
// no evaluate error but condition is false
if (!conditionResult && conditionEvaluateError == null)
{
// Condition == false
Trace.Info("Skipping step due to condition evaluation.");
CompleteStep(step, nextStep, TaskResult.Skipped, resultCode: conditionTraceWriter.Trace);
}
else if (conditionEvaluateError != null)
{
// fail the step since there is an evaluate error.
step.ExecutionContext.Error(conditionEvaluateError);
CompleteStep(step, nextStep, TaskResult.Failed);
}
else
{
// Run the step.
await RunStepAsync(step, jobContext.CancellationToken);
CompleteStep(step, nextStep);
}
}
finally
{
if (jobCancelRegister != null)
{
jobCancelRegister?.Dispose();
jobCancelRegister = null;
}
}
}
}
@@ -248,7 +287,7 @@ namespace GitHub.Runner.Worker
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
try
{
timeoutMinutes = templateEvaluator.EvaluateStepTimeout(step.Timeout, step.ExecutionContext.ExpressionValues);
timeoutMinutes = templateEvaluator.EvaluateStepTimeout(step.Timeout, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions);
}
catch (Exception ex)
{
@@ -339,7 +378,7 @@ namespace GitHub.Runner.Worker
var continueOnError = false;
try
{
continueOnError = templateEvaluator.EvaluateStepContinueOnError(step.ContinueOnError, step.ExecutionContext.ExpressionValues);
continueOnError = templateEvaluator.EvaluateStepContinueOnError(step.ContinueOnError, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions);
}
catch (Exception ex)
{
@@ -351,6 +390,7 @@ namespace GitHub.Runner.Worker
if (continueOnError)
{
step.ExecutionContext.Outcome = step.ExecutionContext.Result;
step.ExecutionContext.Result = TaskResult.Succeeded;
Trace.Info($"Updated step result (continue on error)");
}
@@ -386,12 +426,16 @@ namespace GitHub.Runner.Worker
scope = scopesToInitialize.Pop();
executionContext.Debug($"Initializing scope '{scope.Name}'");
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.ParentName);
executionContext.ExpressionValues["inputs"] = !String.IsNullOrEmpty(scope.ParentName) ? scopeInputs[scope.ParentName] : null;
// TODO: Fix this temporary workaround for Composite Actions
if (!executionContext.ExpressionValues.ContainsKey("inputs") && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
executionContext.ExpressionValues["inputs"] = !String.IsNullOrEmpty(scope.ParentName) ? scopeInputs[scope.ParentName] : null;
}
var templateEvaluator = executionContext.ToPipelineTemplateEvaluator();
var inputs = default(DictionaryContextData);
try
{
inputs = templateEvaluator.EvaluateStepScopeInputs(scope.Inputs, executionContext.ExpressionValues);
inputs = templateEvaluator.EvaluateStepScopeInputs(scope.Inputs, executionContext.ExpressionValues, executionContext.ExpressionFunctions);
}
catch (Exception ex)
{
@@ -409,7 +453,11 @@ namespace GitHub.Runner.Worker
// Setup expression values
var scopeName = executionContext.ScopeName;
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scopeName);
executionContext.ExpressionValues["inputs"] = string.IsNullOrEmpty(scopeName) ? null : scopeInputs[scopeName];
// TODO: Fix this temporary workaround for Composite Actions
if (!executionContext.ExpressionValues.ContainsKey("inputs") && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
executionContext.ExpressionValues["inputs"] = string.IsNullOrEmpty(scopeName) ? null : scopeInputs[scopeName];
}
return true;
}
@@ -447,7 +495,7 @@ namespace GitHub.Runner.Worker
var outputs = default(DictionaryContextData);
try
{
outputs = templateEvaluator.EvaluateStepScopeOutputs(scope.Outputs, executionContext.ExpressionValues);
outputs = templateEvaluator.EvaluateStepScopeOutputs(scope.Outputs, executionContext.ExpressionValues, executionContext.ExpressionFunctions);
}
catch (Exception ex)
{
@@ -475,5 +523,43 @@ namespace GitHub.Runner.Worker
executionContext.Complete(result, resultCode: resultCode);
}
private sealed class ConditionTraceWriter : ObjectTemplating::ITraceWriter
{
private readonly IExecutionContext _executionContext;
private readonly Tracing _trace;
private readonly StringBuilder _traceBuilder = new StringBuilder();
public string Trace => _traceBuilder.ToString();
public ConditionTraceWriter(Tracing trace, IExecutionContext executionContext)
{
ArgUtil.NotNull(trace, nameof(trace));
_trace = trace;
_executionContext = executionContext;
}
public void Error(string format, params Object[] args)
{
var message = StringUtil.Format(format, args);
_trace.Error(message);
_executionContext?.Debug(message);
}
public void Info(string format, params Object[] args)
{
var message = StringUtil.Format(format, args);
_trace.Info(message);
_executionContext?.Debug(message);
_traceBuilder.AppendLine(message);
}
public void Verbose(string format, params Object[] args)
{
var message = StringUtil.Format(format, args);
_trace.Verbose(message);
_executionContext?.Debug(message);
}
}
}
}

View File

@@ -40,7 +40,7 @@ namespace GitHub.Runner.Worker
// Validate args.
ArgUtil.NotNullOrEmpty(pipeIn, nameof(pipeIn));
ArgUtil.NotNullOrEmpty(pipeOut, nameof(pipeOut));
VssUtil.InitializeVssClientSettings(HostContext.UserAgent, HostContext.WebProxy);
VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy);
var jobRunner = HostContext.CreateService<IJobRunner>();
using (var channel = HostContext.CreateService<IProcessChannel>())

View File

@@ -32,7 +32,8 @@
"one-of": [
"container-runs",
"node12-runs",
"plugin-runs"
"plugin-runs",
"composite-runs"
]
},
"container-runs": {
@@ -43,6 +44,8 @@
"entrypoint": "non-empty-string",
"args": "container-runs-args",
"env": "container-runs-env",
"pre-entrypoint": "non-empty-string",
"pre-if": "non-empty-string",
"post-entrypoint": "non-empty-string",
"post-if": "non-empty-string"
}
@@ -67,6 +70,8 @@
"properties": {
"using": "non-empty-string",
"main": "non-empty-string",
"pre": "non-empty-string",
"pre-if": "non-empty-string",
"post": "non-empty-string",
"post-if": "non-empty-string"
}
@@ -79,6 +84,36 @@
}
}
},
"composite-runs": {
"mapping": {
"properties": {
"using": "non-empty-string",
"steps": "composite-steps"
}
}
},
"composite-steps": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"inputs",
"job",
"runner",
"env",
"always(0,0)",
"failure(0,0)",
"cancelled(0,0)",
"success(0,0)",
"hashFiles(1,255)"
],
"sequence": {
"item-type": "any"
}
},
"container-runs-context": {
"context": [
"inputs"
@@ -90,10 +125,9 @@
"github",
"strategy",
"matrix",
"steps",
"job",
"runner",
"env"
"hashFiles(1,255)"
],
"string": {}
},

View File

@@ -5,7 +5,7 @@ using GitHub.DistributedTask.Expressions2.Sdk.Functions;
namespace GitHub.DistributedTask.Expressions2
{
public static class ExpressionConstants
internal static class ExpressionConstants
{
static ExpressionConstants()
{
@@ -15,7 +15,7 @@ namespace GitHub.DistributedTask.Expressions2
AddFunction<Join>("join", 1, 2);
AddFunction<StartsWith>("startsWith", 2, 2);
AddFunction<ToJson>("toJson", 1, 1);
AddFunction<HashFiles>("hashFiles", 1, 1);
AddFunction<FromJson>("fromJson", 1, 1);
}
private static void AddFunction<T>(String name, Int32 minParameters, Int32 maxParameters)
@@ -24,12 +24,6 @@ namespace GitHub.DistributedTask.Expressions2
WellKnownFunctions.Add(name, new FunctionInfo<T>(name, minParameters, maxParameters));
}
public static void UpdateFunction<T>(String name, Int32 minParameters, Int32 maxParameters)
where T : Function, new()
{
WellKnownFunctions[name] = new FunctionInfo<T>(name, minParameters, maxParameters);
}
internal static readonly String False = "false";
internal static readonly String Infinity = "Infinity";
internal static readonly Int32 MaxDepth = 50;

View File

@@ -0,0 +1,24 @@
using System;
using System.IO;
using GitHub.DistributedTask.Pipelines.ContextData;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace GitHub.DistributedTask.Expressions2.Sdk.Functions
{
internal sealed class FromJson : Function
{
protected sealed override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
var json = Parameters[0].Evaluate(context).ConvertToString();
using (var stringReader = new StringReader(json))
using (var jsonReader = new JsonTextReader(stringReader) { DateParseHandling = DateParseHandling.None, FloatParseHandling = FloatParseHandling.Double })
{
var token = JToken.ReadFrom(jsonReader);
return token.ToPipelineContextData();
}
}
}}

View File

@@ -1,122 +0,0 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Minimatch;
using System.IO;
using System.Security.Cryptography;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
namespace GitHub.DistributedTask.Expressions2.Sdk.Functions
{
internal sealed class HashFiles : Function
{
protected sealed override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
// hashFiles() only works on the runner and only works with files under GITHUB_WORKSPACE
// Since GITHUB_WORKSPACE is set by runner, I am using that as the fact of this code runs on server or runner.
if (context.State is ObjectTemplating.TemplateContext templateContext &&
templateContext.ExpressionValues.TryGetValue(PipelineTemplateConstants.GitHub, out var githubContextData) &&
githubContextData is DictionaryContextData githubContext &&
githubContext.TryGetValue(PipelineTemplateConstants.Workspace, out var workspace) == true &&
workspace is StringContextData workspaceData)
{
string searchRoot = workspaceData.Value;
string pattern = Parameters[0].Evaluate(context).ConvertToString();
// Convert slashes on Windows
if (s_isWindows)
{
pattern = pattern.Replace('\\', '/');
}
// Root the pattern
if (!Path.IsPathRooted(pattern))
{
var patternRoot = s_isWindows ? searchRoot.Replace('\\', '/').TrimEnd('/') : searchRoot.TrimEnd('/');
pattern = string.Concat(patternRoot, "/", pattern);
}
// Get all files
context.Trace.Info($"Search root directory: '{searchRoot}'");
context.Trace.Info($"Search pattern: '{pattern}'");
var files = Directory.GetFiles(searchRoot, "*", SearchOption.AllDirectories)
.Select(x => s_isWindows ? x.Replace('\\', '/') : x)
.OrderBy(x => x, StringComparer.Ordinal)
.ToList();
if (files.Count == 0)
{
throw new ArgumentException($"hashFiles('{ExpressionUtility.StringEscape(pattern)}') failed. Directory '{searchRoot}' is empty");
}
else
{
context.Trace.Info($"Found {files.Count} files");
}
// Match
var matcher = new Minimatcher(pattern, s_minimatchOptions);
files = matcher.Filter(files)
.Select(x => s_isWindows ? x.Replace('/', '\\') : x)
.ToList();
if (files.Count == 0)
{
throw new ArgumentException($"hashFiles('{ExpressionUtility.StringEscape(pattern)}') failed. Search pattern '{pattern}' doesn't match any file under '{searchRoot}'");
}
else
{
context.Trace.Info($"{files.Count} matches to hash");
}
// Hash each file
List<byte> filesSha256 = new List<byte>();
foreach (var file in files)
{
context.Trace.Info($"Hash {file}");
using (SHA256 sha256hash = SHA256.Create())
{
using (var fileStream = File.OpenRead(file))
{
filesSha256.AddRange(sha256hash.ComputeHash(fileStream));
}
}
}
// Hash the hashes
using (SHA256 sha256hash = SHA256.Create())
{
var hashBytes = sha256hash.ComputeHash(filesSha256.ToArray());
StringBuilder hashString = new StringBuilder();
for (int i = 0; i < hashBytes.Length; i++)
{
hashString.Append(hashBytes[i].ToString("x2"));
}
var result = hashString.ToString();
context.Trace.Info($"Final hash result: '{result}'");
return result;
}
}
else
{
throw new InvalidOperationException("'hashfiles' expression function is only supported under runner context.");
}
}
private static readonly bool s_isWindows = Environment.OSVersion.Platform != PlatformID.Unix && Environment.OSVersion.Platform != PlatformID.MacOSX;
// Only support basic globbing (* ? and []) and globstar (**)
private static readonly Options s_minimatchOptions = new Options
{
Dot = true,
NoBrace = true,
NoCase = s_isWindows,
NoComment = true,
NoExt = true,
NoNegate = true,
};
}
}

View File

@@ -82,7 +82,7 @@ namespace GitHub.DistributedTask.WebApi
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(5.1, 1),
version: new ApiResourceVersion(6.0, 2),
userState: userState,
cancellationToken: cancellationToken,
content: content);
@@ -109,7 +109,7 @@ namespace GitHub.DistributedTask.WebApi
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(5.1, 1),
version: new ApiResourceVersion(6.0, 2),
userState: userState,
cancellationToken: cancellationToken).ConfigureAwait(false))
{
@@ -164,7 +164,7 @@ namespace GitHub.DistributedTask.WebApi
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(5.1, 1),
version: new ApiResourceVersion(6.0, 2),
queryParameters: queryParams,
userState: userState,
cancellationToken: cancellationToken);
@@ -227,7 +227,7 @@ namespace GitHub.DistributedTask.WebApi
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(5.1, 1),
version: new ApiResourceVersion(6.0, 2),
queryParameters: queryParams,
userState: userState,
cancellationToken: cancellationToken);
@@ -257,7 +257,7 @@ namespace GitHub.DistributedTask.WebApi
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(5.1, 1),
version: new ApiResourceVersion(6.0, 2),
userState: userState,
cancellationToken: cancellationToken,
content: content);
@@ -287,7 +287,7 @@ namespace GitHub.DistributedTask.WebApi
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(5.1, 1),
version: new ApiResourceVersion(6.0, 2),
userState: userState,
cancellationToken: cancellationToken,
content: content);
@@ -779,5 +779,65 @@ namespace GitHub.DistributedTask.WebApi
userState: userState,
cancellationToken: cancellationToken);
}
/// <summary>
/// [Preview API]
/// </summary>
/// <param name="poolId"></param>
/// <param name="agentId"></param>
/// <param name="userState"></param>
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
public Task<String> GetAgentAuthUrlAsync(
int poolId,
int agentId,
object userState = null,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("GET");
Guid locationId = new Guid("a82a119c-1e46-44b6-8d75-c82a79cf975b");
object routeValues = new { poolId = poolId, agentId = agentId };
return SendAsync<String>(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1),
userState: userState,
cancellationToken: cancellationToken);
}
/// <summary>
/// [Preview API]
/// </summary>
/// <param name="poolId"></param>
/// <param name="agentId"></param>
/// <param name="error"></param>
/// <param name="userState"></param>
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
[EditorBrowsable(EditorBrowsableState.Never)]
public virtual async Task ReportAgentAuthUrlMigrationErrorAsync(
int poolId,
int agentId,
string error,
object userState = null,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
Guid locationId = new Guid("a82a119c-1e46-44b6-8d75-c82a79cf975b");
object routeValues = new { poolId = poolId, agentId = agentId };
HttpContent content = new ObjectContent<string>(error, new VssJsonMediaTypeFormatter(true));
using (HttpResponseMessage response = await SendAsync(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1),
userState: userState,
cancellationToken: cancellationToken,
content: content).ConfigureAwait(false))
{
return;
}
}
}
}

View File

@@ -317,5 +317,37 @@ namespace GitHub.DistributedTask.WebApi
userState: userState,
cancellationToken: cancellationToken);
}
/// <summary>
/// [Preview API] Resolves information required to download actions (URL, token) defined in an orchestration.
/// </summary>
/// <param name="scopeIdentifier">The project GUID to scope the request</param>
/// <param name="hubName">The name of the server hub: "build" for the Build server or "rm" for the Release Management server</param>
/// <param name="planId"></param>
/// <param name="actionReferenceList"></param>
/// <param name="userState"></param>
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
public virtual Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(
Guid scopeIdentifier,
string hubName,
Guid planId,
ActionReferenceList actionReferenceList,
object userState = null,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
Guid locationId = new Guid("27d7f831-88c1-4719-8ca1-6a061dad90eb");
object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId };
HttpContent content = new ObjectContent<ActionReferenceList>(actionReferenceList, new VssJsonMediaTypeFormatter(true));
return SendAsync<ActionDownloadInfoCollection>(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1),
userState: userState,
cancellationToken: cancellationToken,
content: content);
}
}
}

View File

@@ -60,6 +60,20 @@ namespace GitHub.DistributedTask.Logging
return SecurityElement.Escape(value);
}
public static String TrimDoubleQuotes(String value)
{
var trimmed = string.Empty;
if (!string.IsNullOrEmpty(value) &&
value.Length > 8 &&
value.StartsWith('"') &&
value.EndsWith('"'))
{
trimmed = value.Substring(1, value.Length - 2);
}
return trimmed;
}
private static string Base64StringEscapeShift(String value, int shift)
{
var bytes = Encoding.UTF8.GetBytes(value);

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
@@ -22,10 +23,27 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
{
var context = definition[i].Value.AssertSequence($"{TemplateConstants.Context}");
definition.RemoveAt(i);
Context = context
.Select(x => x.AssertString($"{TemplateConstants.Context} item").Value)
.Distinct()
.ToArray();
var readerContext = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var evaluatorContext = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
foreach (TemplateToken item in context)
{
var itemStr = item.AssertString($"{TemplateConstants.Context} item").Value;
readerContext.Add(itemStr);
// Remove min/max parameter info
var paramIndex = itemStr.IndexOf('(');
if (paramIndex > 0)
{
evaluatorContext.Add(String.Concat(itemStr.Substring(0, paramIndex + 1), ")"));
}
else
{
evaluatorContext.Add(itemStr);
}
}
ReaderContext = readerContext.ToArray();
EvaluatorContext = evaluatorContext.ToArray();
}
else if (String.Equals(definitionKey.Value, TemplateConstants.Description, StringComparison.Ordinal))
{
@@ -40,7 +58,17 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
internal abstract DefinitionType DefinitionType { get; }
internal String[] Context { get; private set; } = new String[0];
/// <summary>
/// Used by the template reader to determine allowed expression values and functions.
/// Also used by the template reader to validate function min/max parameters.
/// </summary>
internal String[] ReaderContext { get; private set; } = new String[0];
/// <summary>
/// Used by the template evaluator to determine allowed expression values and functions.
/// The min/max parameter info is omitted.
/// </summary>
internal String[] EvaluatorContext { get; private set; } = new String[0];
internal abstract void Validate(
TemplateSchema schema,

View File

@@ -30,8 +30,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
foreach (var propertiesPair in properties)
{
var propertyName = propertiesPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.Properties} key");
var propertyValue = propertiesPair.Value.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.Properties} value");
Properties.Add(propertyName.Value, new PropertyValue(propertyValue.Value));
Properties.Add(propertyName.Value, new PropertyValue(propertiesPair.Value));
}
break;
@@ -85,7 +84,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
}
else
{
throw new ArgumentException($"Property '{TemplateConstants.LooseKeyType}' is defined but '{TemplateConstants.LooseValueType}' is not defined");
throw new ArgumentException($"Property '{TemplateConstants.LooseKeyType}' is defined but '{TemplateConstants.LooseValueType}' is not defined on '{name}'");
}
}
// Otherwise validate loose value type not be defined
@@ -95,16 +94,21 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
}
// Lookup each property
foreach (var property in Properties.Values)
foreach (var property in Properties)
{
schema.GetDefinition(property.Type);
if (String.IsNullOrEmpty(property.Value.Type))
{
throw new ArgumentException($"Type not specified for the '{property.Key}' property on the '{name}' type");
}
schema.GetDefinition(property.Value.Type);
}
if (!String.IsNullOrEmpty(Inherits))
{
var inherited = schema.GetDefinition(Inherits);
if (inherited.Context.Length > 0)
if (inherited.ReaderContext.Length > 0)
{
throw new NotSupportedException($"Property '{TemplateConstants.Context}' is not supported on inhertied definitions");
}

View File

@@ -62,7 +62,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
{
var nestedDefinition = schema.GetDefinition(nestedType);
if (nestedDefinition.Context.Length > 0)
if (nestedDefinition.ReaderContext.Length > 0)
{
throw new ArgumentException($"'{name}' is a one-of definition and references another definition that defines context. This is currently not supported.");
}

View File

@@ -1,18 +1,40 @@
using System;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
namespace GitHub.DistributedTask.ObjectTemplating.Schema
{
internal sealed class PropertyValue
{
internal PropertyValue()
internal PropertyValue(TemplateToken token)
{
}
internal PropertyValue(String type)
{
Type = type;
if (token is StringToken stringToken)
{
Type = stringToken.Value;
}
else
{
var mapping = token.AssertMapping($"{TemplateConstants.MappingPropertyValue}");
foreach (var mappingPair in mapping)
{
var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.MappingPropertyValue} key");
switch (mappingKey.Value)
{
case TemplateConstants.Type:
Type = mappingPair.Value.AssertString($"{TemplateConstants.MappingPropertyValue} {TemplateConstants.Type}").Value;
break;
case TemplateConstants.Required:
Required = mappingPair.Value.AssertBoolean($"{TemplateConstants.MappingPropertyValue} {TemplateConstants.Required}").Value;
break;
default:
mappingKey.AssertUnexpectedValue($"{TemplateConstants.MappingPropertyValue} key");
break;
}
}
}
}
internal String Type { get; set; }
internal Boolean Required { get; set; }
}
}

View File

@@ -312,8 +312,8 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
// template-schema
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Version, new PropertyValue(TemplateConstants.NonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Definitions, new PropertyValue(TemplateConstants.Definitions));
mappingDefinition.Properties.Add(TemplateConstants.Version, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Definitions, new PropertyValue(new StringToken(null, null, null, TemplateConstants.Definitions)));
schema.Definitions.Add(TemplateConstants.TemplateSchema, mappingDefinition);
// definitions
@@ -335,9 +335,9 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
// null-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Null, new PropertyValue(TemplateConstants.NullDefinitionProperties));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Null, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NullDefinitionProperties)));
schema.Definitions.Add(TemplateConstants.NullDefinition, mappingDefinition);
// null-definition-properties
@@ -346,9 +346,9 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
// boolean-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Boolean, new PropertyValue(TemplateConstants.BooleanDefinitionProperties));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Boolean, new PropertyValue(new StringToken(null, null, null, TemplateConstants.BooleanDefinitionProperties)));
schema.Definitions.Add(TemplateConstants.BooleanDefinition, mappingDefinition);
// boolean-definition-properties
@@ -357,9 +357,9 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
// number-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Number, new PropertyValue(TemplateConstants.NumberDefinitionProperties));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Number, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NumberDefinitionProperties)));
schema.Definitions.Add(TemplateConstants.NumberDefinition, mappingDefinition);
// number-definition-properties
@@ -368,55 +368,68 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
// string-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.String, new PropertyValue(TemplateConstants.StringDefinitionProperties));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.String, new PropertyValue(new StringToken(null, null, null, TemplateConstants.StringDefinitionProperties)));
schema.Definitions.Add(TemplateConstants.StringDefinition, mappingDefinition);
// string-definition-properties
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Constant, new PropertyValue(TemplateConstants.NonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.IgnoreCase, new PropertyValue(TemplateConstants.Boolean));
mappingDefinition.Properties.Add(TemplateConstants.RequireNonEmpty, new PropertyValue(TemplateConstants.Boolean));
mappingDefinition.Properties.Add(TemplateConstants.Constant, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.IgnoreCase, new PropertyValue(new StringToken(null, null, null,TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.RequireNonEmpty, new PropertyValue(new StringToken(null, null, null, TemplateConstants.Boolean)));
schema.Definitions.Add(TemplateConstants.StringDefinitionProperties, mappingDefinition);
// sequence-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Sequence, new PropertyValue(TemplateConstants.SequenceDefinitionProperties));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Sequence, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceDefinitionProperties)));
schema.Definitions.Add(TemplateConstants.SequenceDefinition, mappingDefinition);
// sequence-definition-properties
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.ItemType, new PropertyValue(TemplateConstants.NonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.ItemType, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
schema.Definitions.Add(TemplateConstants.SequenceDefinitionProperties, mappingDefinition);
// mapping-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Mapping, new PropertyValue(TemplateConstants.MappingDefinitionProperties));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Mapping, new PropertyValue(new StringToken(null, null, null, TemplateConstants.MappingDefinitionProperties)));
schema.Definitions.Add(TemplateConstants.MappingDefinition, mappingDefinition);
// mapping-definition-properties
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Properties, new PropertyValue(TemplateConstants.Properties));
mappingDefinition.Properties.Add(TemplateConstants.LooseKeyType, new PropertyValue(TemplateConstants.NonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.LooseValueType, new PropertyValue(TemplateConstants.NonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Properties, new PropertyValue(new StringToken(null, null, null, TemplateConstants.Properties)));
mappingDefinition.Properties.Add(TemplateConstants.LooseKeyType, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.LooseValueType, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
schema.Definitions.Add(TemplateConstants.MappingDefinitionProperties, mappingDefinition);
// properties
mappingDefinition = new MappingDefinition();
mappingDefinition.LooseKeyType = TemplateConstants.NonEmptyString;
mappingDefinition.LooseValueType = TemplateConstants.NonEmptyString;
mappingDefinition.LooseValueType = TemplateConstants.PropertyValue;
schema.Definitions.Add(TemplateConstants.Properties, mappingDefinition);
// property-value
oneOfDefinition = new OneOfDefinition();
oneOfDefinition.OneOf.Add(TemplateConstants.NonEmptyString);
oneOfDefinition.OneOf.Add(TemplateConstants.MappingPropertyValue);
schema.Definitions.Add(TemplateConstants.PropertyValue, oneOfDefinition);
// mapping-property-value
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Type, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Required, new PropertyValue(new StringToken(null, null, null, TemplateConstants.Boolean)));
schema.Definitions.Add(TemplateConstants.MappingPropertyValue, mappingDefinition);
// one-of-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.OneOf, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.OneOf, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
schema.Definitions.Add(TemplateConstants.OneOfDefinition, mappingDefinition);
// non-empty-string
@@ -477,4 +490,4 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
private static readonly Regex s_definitionNameRegex = new Regex("^[a-zA-Z_][a-zA-Z0-9_-]*$", RegexOptions.Compiled);
private static TemplateSchema s_schema;
}
}
}

View File

@@ -22,9 +22,11 @@ namespace GitHub.DistributedTask.ObjectTemplating
internal const String ItemType = "item-type";
internal const String LooseKeyType = "loose-key-type";
internal const String LooseValueType = "loose-value-type";
internal const String MaxConstant = "MAX";
internal const String Mapping = "mapping";
internal const String MappingDefinition = "mapping-definition";
internal const String MappingDefinitionProperties = "mapping-definition-properties";
internal const String MappingPropertyValue = "mapping-property-value";
internal const String NonEmptyString = "non-empty-string";
internal const String Null = "null";
internal const String NullDefinition = "null-definition";
@@ -35,7 +37,9 @@ namespace GitHub.DistributedTask.ObjectTemplating
internal const String OneOf = "one-of";
internal const String OneOfDefinition = "one-of-definition";
internal const String OpenExpression = "${{";
internal const String PropertyValue = "property-value";
internal const String Properties = "properties";
internal const String Required = "required";
internal const String RequireNonEmpty = "require-non-empty";
internal const String Scalar = "scalar";
internal const String ScalarDefinition = "scalar-definition";
@@ -43,6 +47,7 @@ namespace GitHub.DistributedTask.ObjectTemplating
internal const String Sequence = "sequence";
internal const String SequenceDefinition = "sequence-definition";
internal const String SequenceDefinitionProperties = "sequence-definition-properties";
internal const String Type = "type";
internal const String SequenceOfNonEmptyString = "sequence-of-non-empty-string";
internal const String String = "string";
internal const String StringDefinition = "string-definition";

View File

@@ -47,7 +47,16 @@ namespace GitHub.DistributedTask.ObjectTemplating
var evaluator = new TemplateEvaluator(context, template, removeBytes);
try
{
var availableContext = new HashSet<String>(context.ExpressionValues.Keys);
var availableContext = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
foreach (var key in context.ExpressionValues.Keys)
{
availableContext.Add(key);
}
foreach (var function in context.ExpressionFunctions)
{
availableContext.Add($"{function.Name}()");
}
var definitionInfo = new DefinitionInfo(context.Schema, type, availableContext);
result = evaluator.Evaluate(definitionInfo);
@@ -182,12 +191,14 @@ namespace GitHub.DistributedTask.ObjectTemplating
}
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var hasExpressionKey = false;
while (m_unraveler.AllowScalar(definition.Expand, out ScalarToken nextKeyScalar))
{
// Expression
if (nextKeyScalar is ExpressionToken)
{
hasExpressionKey = true;
var anyDefinition = new DefinitionInfo(definition, TemplateConstants.Any);
mapping.Add(nextKeyScalar, Evaluate(anyDefinition));
continue;
@@ -268,6 +279,19 @@ namespace GitHub.DistributedTask.ObjectTemplating
String listToDeDuplicate = String.Join(", ", nonDuplicates);
m_context.Error(mapping, TemplateStrings.UnableToDetermineOneOf(listToDeDuplicate));
}
else if (mappingDefinitions.Count == 1 && !hasExpressionKey)
{
foreach (var property in mappingDefinitions[0].Properties)
{
if (property.Value.Required)
{
if (!keys.Contains(property.Key))
{
m_context.Error(mapping, $"Required property is missing: {property.Key}");
}
}
}
}
m_unraveler.ReadMappingEnd();
}
@@ -378,14 +402,13 @@ namespace GitHub.DistributedTask.ObjectTemplating
Definition = m_schema.GetDefinition(name);
// Determine whether to expand
if (Definition.Context.Length > 0)
m_allowedContext = Definition.EvaluatorContext;
if (Definition.EvaluatorContext.Length > 0)
{
m_allowedContext = Definition.Context;
Expand = m_availableContext.IsSupersetOf(m_allowedContext);
}
else
{
m_allowedContext = new String[0];
Expand = false;
}
}
@@ -401,9 +424,9 @@ namespace GitHub.DistributedTask.ObjectTemplating
Definition = m_schema.GetDefinition(name);
// Determine whether to expand
if (Definition.Context.Length > 0)
if (Definition.EvaluatorContext.Length > 0)
{
m_allowedContext = new HashSet<String>(parent.m_allowedContext.Concat(Definition.Context)).ToArray();
m_allowedContext = new HashSet<String>(parent.m_allowedContext.Concat(Definition.EvaluatorContext), StringComparer.OrdinalIgnoreCase).ToArray();
Expand = m_availableContext.IsSupersetOf(m_allowedContext);
}
else

View File

@@ -49,6 +49,14 @@ namespace GitHub.DistributedTask.ObjectTemplating
m_errors = new List<TemplateValidationError>(errors ?? Enumerable.Empty<TemplateValidationError>());
}
public TemplateValidationException(
String message,
IEnumerable<TemplateValidationError> errors)
: this(message)
{
m_errors = new List<TemplateValidationError>(errors ?? Enumerable.Empty<TemplateValidationError>());
}
public TemplateValidationException(String message)
: base(message)
{

View File

@@ -178,14 +178,15 @@ namespace GitHub.DistributedTask.ObjectTemplating
}
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var hasExpressionKey = false;
while (m_objectReader.AllowLiteral(out LiteralToken rawLiteral))
{
var nextKeyScalar = ParseScalar(rawLiteral, definition.AllowedContext);
// Expression
if (nextKeyScalar is ExpressionToken)
{
hasExpressionKey = true;
// Legal
if (definition.AllowedContext.Length > 0)
{
@@ -280,7 +281,19 @@ namespace GitHub.DistributedTask.ObjectTemplating
String listToDeDuplicate = String.Join(", ", nonDuplicates);
m_context.Error(mapping, TemplateStrings.UnableToDetermineOneOf(listToDeDuplicate));
}
else if (mappingDefinitions.Count == 1 && !hasExpressionKey)
{
foreach (var property in mappingDefinitions[0].Properties)
{
if (property.Value.Required)
{
if (!keys.Contains(property.Key))
{
m_context.Error(mapping, $"Required property is missing: {property.Key}");
}
}
}
}
ExpectMappingEnd();
}
@@ -767,15 +780,8 @@ namespace GitHub.DistributedTask.ObjectTemplating
// Lookup the definition
Definition = m_schema.GetDefinition(name);
// Determine whether to expand
if (Definition.Context.Length > 0)
{
AllowedContext = Definition.Context;
}
else
{
AllowedContext = new String[0];
}
// Record allowed context
AllowedContext = Definition.ReaderContext;
}
public DefinitionInfo(
@@ -787,10 +793,10 @@ namespace GitHub.DistributedTask.ObjectTemplating
// Lookup the definition
Definition = m_schema.GetDefinition(name);
// Determine whether to expand
if (Definition.Context.Length > 0)
// Record allowed context
if (Definition.ReaderContext.Length > 0)
{
AllowedContext = new HashSet<String>(parent.AllowedContext.Concat(Definition.Context)).ToArray();
AllowedContext = new HashSet<String>(parent.AllowedContext.Concat(Definition.ReaderContext), StringComparer.OrdinalIgnoreCase).ToArray();
}
else
{

View File

@@ -2,6 +2,7 @@
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.ObjectTemplating
@@ -41,7 +42,7 @@ namespace GitHub.DistributedTask.ObjectTemplating
{
for (int i = 0; i < 50; i++)
{
String message = !String.IsNullOrEmpty(messagePrefix) ? $"{messagePrefix} {ex.Message}" : ex.Message;
String message = !String.IsNullOrEmpty(messagePrefix) ? $"{messagePrefix} {ex.Message}" : ex.ToString();
Add(new TemplateValidationError(message));
if (ex.InnerException == null)
{
@@ -88,6 +89,23 @@ namespace GitHub.DistributedTask.ObjectTemplating
}
}
/// <summary>
/// Throws <c ref="TemplateValidationException" /> if any errors.
/// <param name="prefix">The error message prefix</param>
/// </summary>
public void Check(String prefix)
{
if (String.IsNullOrEmpty(prefix))
{
this.Check();
}
else if (m_errors.Count > 0)
{
var message = $"{prefix.Trim()} {String.Join(",", m_errors.Select(e => e.Message))}";
throw new TemplateValidationException(message, m_errors);
}
}
public void Clear()
{
m_errors.Clear();

View File

@@ -1,7 +1,9 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Globalization;
using System.Runtime.Serialization;
using System.Text.RegularExpressions;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.Services.WebApi.Internal;
@@ -35,11 +37,29 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
String[] allowedContext,
out Exception ex)
{
// Create dummy allowed contexts
INamedValueInfo[] namedValues = null;
// Create dummy named values and functions
var namedValues = new List<INamedValueInfo>();
var functions = new List<IFunctionInfo>();
if (allowedContext?.Length > 0)
{
namedValues = allowedContext.Select(x => new NamedValueInfo<ContextValueNode>(x)).ToArray();
foreach (var contextItem in allowedContext)
{
var match = s_function.Match(contextItem);
if (match.Success)
{
var functionName = match.Groups[1].Value;
var minParameters = Int32.Parse(match.Groups[2].Value, NumberStyles.None, CultureInfo.InvariantCulture);
var maxParametersRaw = match.Groups[3].Value;
var maxParameters = String.Equals(maxParametersRaw, TemplateConstants.MaxConstant, StringComparison.Ordinal)
? Int32.MaxValue
: Int32.Parse(maxParametersRaw, NumberStyles.None, CultureInfo.InvariantCulture);
functions.Add(new FunctionInfo<DummyFunction>(functionName, minParameters, maxParameters));
}
else
{
namedValues.Add(new NamedValueInfo<ContextValueNode>(contextItem));
}
}
}
// Parse
@@ -47,7 +67,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
ExpressionNode root = null;
try
{
root = new ExpressionParser().CreateTree(expression, null, namedValues, null) as ExpressionNode;
root = new ExpressionParser().CreateTree(expression, null, namedValues, functions) as ExpressionNode;
result = true;
ex = null;
@@ -60,5 +80,18 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
return result;
}
private sealed class DummyFunction : Function
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
return null;
}
}
private static readonly Regex s_function = new Regex(@"^([a-zA-Z0-9_]+)\(([0-9]+),([0-9]+|MAX)\)$", RegexOptions.Compiled);
}
}

View File

@@ -1,5 +1,8 @@
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
namespace GitHub.DistributedTask.ObjectTemplating.Tokens
{
@@ -106,6 +109,43 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
throw new ArgumentException($"Error while reading '{objectDescription}'. Unexpected value '{literal.ToString()}'");
}
/// <summary>
/// Traverses the token and checks whether all required expression values
/// and functions are provided.
/// </summary>
public static bool CheckHasRequiredContext(
this TemplateToken token,
IReadOnlyObject expressionValues,
IList<IFunctionInfo> expressionFunctions)
{
var expressionTokens = token.Traverse()
.OfType<BasicExpressionToken>()
.ToArray();
var parser = new ExpressionParser();
foreach (var expressionToken in expressionTokens)
{
var tree = parser.ValidateSyntax(expressionToken.Expression, null);
foreach (var node in tree.Traverse())
{
if (node is NamedValue namedValue)
{
if (expressionValues?.Keys.Any(x => string.Equals(x, namedValue.Name, StringComparison.OrdinalIgnoreCase)) != true)
{
return false;
}
}
else if (node is Function function &&
!ExpressionConstants.WellKnownFunctions.ContainsKey(function.Name) &&
expressionFunctions?.Any(x => string.Equals(x.Name, function.Name, StringComparison.OrdinalIgnoreCase)) != true)
{
return false;
}
}
}
return true;
}
/// <summary>
/// Returns all tokens (depth first)
/// </summary>

View File

@@ -40,7 +40,9 @@ namespace GitHub.DistributedTask.Pipelines
WorkspaceOptions workspaceOptions,
IEnumerable<JobStep> steps,
IEnumerable<ContextScope> scopes,
IList<String> fileTable)
IList<String> fileTable,
TemplateToken jobOutputs,
IList<TemplateToken> defaults)
{
this.MessageType = JobRequestMessageTypes.PipelineAgentJobRequest;
this.Plan = plan;
@@ -52,6 +54,7 @@ namespace GitHub.DistributedTask.Pipelines
this.Timeline = timeline;
this.Resources = jobResources;
this.Workspace = workspaceOptions;
this.JobOutputs = jobOutputs;
m_variables = new Dictionary<String, VariableValue>(variables, StringComparer.OrdinalIgnoreCase);
m_maskHints = new List<MaskHint>(maskHints);
@@ -67,6 +70,11 @@ namespace GitHub.DistributedTask.Pipelines
m_environmentVariables = new List<TemplateToken>(environmentVariables);
}
if (defaults?.Count > 0)
{
m_defaults = new List<TemplateToken>(defaults);
}
this.ContextData = new Dictionary<String, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
if (contextData?.Count > 0)
{
@@ -138,6 +146,13 @@ namespace GitHub.DistributedTask.Pipelines
private set;
}
[DataMember(EmitDefaultValue = false)]
public TemplateToken JobOutputs
{
get;
private set;
}
[DataMember]
public Int64 RequestId
{
@@ -204,6 +219,21 @@ namespace GitHub.DistributedTask.Pipelines
}
}
/// <summary>
/// Gets the hierarchy of defaults to overlay, last wins.
/// </summary>
public IList<TemplateToken> Defaults
{
get
{
if (m_defaults == null)
{
m_defaults = new List<TemplateToken>();
}
return m_defaults;
}
}
/// <summary>
/// Gets the collection of variables associated with the current context.
/// </summary>
@@ -243,6 +273,9 @@ namespace GitHub.DistributedTask.Pipelines
}
}
/// <summary>
/// Gets the table of files used when parsing the pipeline (e.g. yaml files)
/// </summary>
public IList<String> FileTable
{
get
@@ -363,6 +396,11 @@ namespace GitHub.DistributedTask.Pipelines
m_environmentVariables = null;
}
if (m_defaults?.Count == 0)
{
m_defaults = null;
}
if (m_fileTable?.Count == 0)
{
m_fileTable = null;
@@ -397,6 +435,9 @@ namespace GitHub.DistributedTask.Pipelines
[DataMember(Name = "EnvironmentVariables", EmitDefaultValue = false)]
private List<TemplateToken> m_environmentVariables;
[DataMember(Name = "Defaults", EmitDefaultValue = false)]
private List<TemplateToken> m_defaults;
[DataMember(Name = "FileTable", EmitDefaultValue = false)]
private List<String> m_fileTable;

View File

@@ -42,7 +42,12 @@ namespace GitHub.DistributedTask.Pipelines.ContextData
var floored = Math.Floor(m_value);
if (m_value == floored && m_value <= (Double)Int32.MaxValue && m_value >= (Double)Int32.MinValue)
{
Int32 flooredInt = (Int32)floored;
var flooredInt = (Int32)floored;
return (JToken)flooredInt;
}
else if (m_value == floored && m_value <= (Double)Int64.MaxValue && m_value >= (Double)Int64.MinValue)
{
var flooredInt = (Int64)floored;
return (JToken)flooredInt;
}
else

View File

@@ -8,12 +8,14 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
public const String Always = "always";
public const String BooleanStepsContext = "boolean-steps-context";
public const String BooleanStrategyContext = "boolean-strategy-context";
public const String CancelTimeoutMinutes = "cancel-timeout-minutes";
public const String Cancelled = "cancelled";
public const String Checkout = "checkout";
public const String Clean = "clean";
public const String Container = "container";
public const String ContinueOnError = "continue-on-error";
public const String Defaults = "defaults";
public const String Env = "env";
public const String Event = "event";
public const String EventPattern = "github.event";
@@ -23,13 +25,18 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String FetchDepth = "fetch-depth";
public const String GeneratedId = "generated-id";
public const String GitHub = "github";
public const String HashFiles = "hashFiles";
public const String Id = "id";
public const String If = "if";
public const String Image = "image";
public const String Include = "include";
public const String Inputs = "inputs";
public const String Job = "job";
public const String JobDefaultsRun = "job-defaults-run";
public const String JobIfResult = "job-if-result";
public const String JobOutputs = "job-outputs";
public const String Jobs = "jobs";
public const String Labels = "labels";
public const String Lfs = "lfs";
public const String Matrix = "matrix";
public const String MaxParallel = "max-parallel";
@@ -56,7 +63,9 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String Shell = "shell";
public const String Skipped = "skipped";
public const String StepEnv = "step-env";
public const String StepIfResult = "step-if-result";
public const String Steps = "steps";
public const String StepsInTemplate = "steps-in-template";
public const String StepsScopeInputs = "steps-scope-inputs";
public const String StepsScopeOutputs = "steps-scope-outputs";
public const String StepsTemplateRoot = "steps-template-root";

View File

@@ -16,6 +16,19 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal static class PipelineTemplateConverter
{
internal static Boolean ConvertToIfResult(
TemplateContext context,
TemplateToken ifResult)
{
var expression = ifResult.Traverse().FirstOrDefault(x => x is ExpressionToken);
if (expression != null)
{
throw new ArgumentException($"Unexpected type '{expression.GetType().Name}' encountered while reading 'if'.");
}
var evaluationResult = EvaluationResult.CreateIntermediateResult(null, ifResult);
return evaluationResult.IsTruthy;
}
internal static Boolean? ConvertToStepContinueOnError(
TemplateContext context,
TemplateToken token,
@@ -250,5 +263,351 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
//Note: originally was List<Step> but we need to change to List<ActionStep> to use the "Inputs" attribute
internal static List<ActionStep> ConvertToSteps(
TemplateContext context,
TemplateToken steps)
{
var stepsSequence = steps.AssertSequence($"job {PipelineTemplateConstants.Steps}");
var result = new List<ActionStep>();
foreach (var stepsItem in stepsSequence)
{
var step = ConvertToStep(context, stepsItem);
if (step != null) // step = null means we are hitting error during step conversion, there should be an error in context.errors
{
if (step.Enabled)
{
result.Add(step);
}
}
}
return result;
}
private static ActionStep ConvertToStep(
TemplateContext context,
TemplateToken stepsItem)
{
var step = stepsItem.AssertMapping($"{PipelineTemplateConstants.Steps} item");
var continueOnError = default(ScalarToken);
var env = default(TemplateToken);
var id = default(StringToken);
var ifCondition = default(String);
var ifToken = default(ScalarToken);
var name = default(ScalarToken);
var run = default(ScalarToken);
var scope = default(StringToken);
var timeoutMinutes = default(ScalarToken);
var uses = default(StringToken);
var with = default(TemplateToken);
var workingDir = default(ScalarToken);
var path = default(ScalarToken);
var clean = default(ScalarToken);
var fetchDepth = default(ScalarToken);
var lfs = default(ScalarToken);
var submodules = default(ScalarToken);
var shell = default(ScalarToken);
foreach (var stepProperty in step)
{
var propertyName = stepProperty.Key.AssertString($"{PipelineTemplateConstants.Steps} item key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Clean:
clean = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Clean}");
break;
case PipelineTemplateConstants.ContinueOnError:
ConvertToStepContinueOnError(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
continueOnError = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} {PipelineTemplateConstants.ContinueOnError}");
break;
case PipelineTemplateConstants.Env:
ConvertToStepEnvironment(context, stepProperty.Value, StringComparer.Ordinal, allowExpressions: true); // Validate early if possible
env = stepProperty.Value;
break;
case PipelineTemplateConstants.FetchDepth:
fetchDepth = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.FetchDepth}");
break;
case PipelineTemplateConstants.Id:
id = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Id}");
if (!NameValidation.IsValid(id.Value, true))
{
context.Error(id, $"Step id {id.Value} is invalid. Ids must start with a letter or '_' and contain only alphanumeric characters, '-', or '_'");
}
break;
case PipelineTemplateConstants.If:
ifToken = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.If}");
break;
case PipelineTemplateConstants.Lfs:
lfs = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Lfs}");
break;
case PipelineTemplateConstants.Name:
name = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Name}");
break;
case PipelineTemplateConstants.Path:
path = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Path}");
break;
case PipelineTemplateConstants.Run:
run = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Run}");
break;
case PipelineTemplateConstants.Shell:
shell = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Shell}");
break;
case PipelineTemplateConstants.Scope:
scope = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Scope}");
break;
case PipelineTemplateConstants.Submodules:
submodules = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Submodules}");
break;
case PipelineTemplateConstants.TimeoutMinutes:
ConvertToStepTimeout(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
timeoutMinutes = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.TimeoutMinutes}");
break;
case PipelineTemplateConstants.Uses:
uses = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}");
break;
case PipelineTemplateConstants.With:
ConvertToStepInputs(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
with = stepProperty.Value;
break;
case PipelineTemplateConstants.WorkingDirectory:
workingDir = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.WorkingDirectory}");
break;
default:
propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Steps} item key"); // throws
break;
}
}
// Fixup the if-condition
var isDefaultScope = String.IsNullOrEmpty(scope?.Value);
ifCondition = ConvertToIfCondition(context, ifToken, false, isDefaultScope);
if (run != null)
{
var result = new ActionStep
{
ScopeName = scope?.Value,
ContextName = id?.Value,
ContinueOnError = continueOnError,
DisplayNameToken = name,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes,
Environment = env,
Reference = new ScriptReference(),
};
var inputs = new MappingToken(null, null, null);
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script), run);
if (workingDir != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.WorkingDirectory), workingDir);
}
if (shell != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Shell), shell);
}
result.Inputs = inputs;
return result;
}
else
{
uses.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}");
var result = new ActionStep
{
ScopeName = scope?.Value,
ContextName = id?.Value,
ContinueOnError = continueOnError,
DisplayNameToken = name,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes,
Inputs = with,
Environment = env,
};
if (uses.Value.StartsWith("docker://", StringComparison.Ordinal))
{
var image = uses.Value.Substring("docker://".Length);
result.Reference = new ContainerRegistryReference { Image = image };
}
else if (uses.Value.StartsWith("./") || uses.Value.StartsWith(".\\"))
{
result.Reference = new RepositoryPathReference
{
RepositoryType = PipelineConstants.SelfAlias,
Path = uses.Value
};
}
else
{
var usesSegments = uses.Value.Split('@');
var pathSegments = usesSegments[0].Split(new[] { '/', '\\' }, StringSplitOptions.RemoveEmptyEntries);
var gitRef = usesSegments.Length == 2 ? usesSegments[1] : String.Empty;
if (usesSegments.Length != 2 ||
pathSegments.Length < 2 ||
String.IsNullOrEmpty(pathSegments[0]) ||
String.IsNullOrEmpty(pathSegments[1]) ||
String.IsNullOrEmpty(gitRef))
{
// todo: loc
context.Error(uses, $"Expected format {{org}}/{{repo}}[/path]@ref. Actual '{uses.Value}'");
}
else
{
var repositoryName = $"{pathSegments[0]}/{pathSegments[1]}";
var directoryPath = pathSegments.Length > 2 ? String.Join("/", pathSegments.Skip(2)) : String.Empty;
result.Reference = new RepositoryPathReference
{
RepositoryType = RepositoryTypes.GitHub,
Name = repositoryName,
Ref = gitRef,
Path = directoryPath,
};
}
}
return result;
}
}
/// <summary>
/// When empty, default to "success()".
/// When a status function is not referenced, format as "success() &amp;&amp; &lt;CONDITION&gt;".
/// </summary>
private static String ConvertToIfCondition(
TemplateContext context,
TemplateToken token,
Boolean isJob,
Boolean isDefaultScope)
{
String condition;
if (token is null)
{
condition = null;
}
else if (token is BasicExpressionToken expressionToken)
{
condition = expressionToken.Expression;
}
else
{
var stringToken = token.AssertString($"{(isJob ? "job" : "step")} {PipelineTemplateConstants.If}");
condition = stringToken.Value;
}
if (String.IsNullOrWhiteSpace(condition))
{
return $"{PipelineTemplateConstants.Success}()";
}
var expressionParser = new ExpressionParser();
var functions = default(IFunctionInfo[]);
var namedValues = default(INamedValueInfo[]);
if (isJob)
{
namedValues = s_jobIfNamedValues;
// TODO: refactor into seperate functions
// functions = PhaseCondition.FunctionInfo;
}
else
{
namedValues = isDefaultScope ? s_stepNamedValues : s_stepInTemplateNamedValues;
functions = s_stepConditionFunctions;
}
var node = default(ExpressionNode);
try
{
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
}
catch (Exception ex)
{
context.Error(token, ex);
return null;
}
if (node == null)
{
return $"{PipelineTemplateConstants.Success}()";
}
var hasStatusFunction = node.Traverse().Any(x =>
{
if (x is Function function)
{
return String.Equals(function.Name, PipelineTemplateConstants.Always, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Cancelled, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Failure, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Success, StringComparison.OrdinalIgnoreCase);
}
return false;
});
return hasStatusFunction ? condition : $"{PipelineTemplateConstants.Success}() && ({condition})";
}
private static readonly INamedValueInfo[] s_jobIfNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly INamedValueInfo[] s_stepNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly INamedValueInfo[] s_stepInTemplateNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Inputs),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly IFunctionInfo[] s_stepConditionFunctions = new IFunctionInfo[]
{
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Always, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Cancelled, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Failure, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Success, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.HashFiles, 1, Byte.MaxValue),
};
}
}

View File

@@ -4,7 +4,7 @@ using System.ComponentModel;
using System.Linq;
using System.Threading;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.Expressions2.Sdk.Functions;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Schema;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
@@ -14,6 +14,9 @@ using ITraceWriter = GitHub.DistributedTask.ObjectTemplating.ITraceWriter;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Evaluates parts of the workflow DOM. For example, a job strategy or step inputs.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public class PipelineTemplateEvaluator
{
@@ -50,13 +53,14 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public DictionaryContextData EvaluateStepScopeInputs(
TemplateToken token,
DictionaryContextData contextData)
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(DictionaryContextData);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsScopeInputs, token, 0, null, omitHeader: true);
@@ -76,13 +80,14 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public DictionaryContextData EvaluateStepScopeOutputs(
TemplateToken token,
DictionaryContextData contextData)
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(DictionaryContextData);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsScopeOutputs, token, 0, null, omitHeader: true);
@@ -102,13 +107,14 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public Boolean EvaluateStepContinueOnError(
TemplateToken token,
DictionaryContextData contextData)
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Boolean?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.BooleanStepsContext, token, 0, null, omitHeader: true);
@@ -126,16 +132,69 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result ?? false;
}
public String EvaluateStepDisplayName(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(String);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StringStepsContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToStepDisplayName(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public List<ActionStep> LoadCompositeSteps(
TemplateToken token)
{
var result = default(List<ActionStep>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(null, null, setMissingContext: false);
// TODO: we might want to to have a bool to prevent it from filling in with missing context w/ dummy variables
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsInTemplate, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToSteps(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public Dictionary<String, String> EvaluateStepEnvironment(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,
StringComparer keyComparer)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepEnv, token, 0, null, omitHeader: true);
@@ -153,15 +212,44 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result ?? new Dictionary<String, String>(keyComparer);
}
public Boolean EvaluateStepIf(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState)
{
var result = default(Boolean?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions, expressionState);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepIfResult, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToIfResult(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? throw new InvalidOperationException("Step if cannot be null");
}
public Dictionary<String, String> EvaluateStepInputs(
TemplateToken token,
DictionaryContextData contextData)
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepWith, token, 0, null, omitHeader: true);
@@ -181,13 +269,14 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public Int32 EvaluateStepTimeout(
TemplateToken token,
DictionaryContextData contextData)
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Int32?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.NumberStepsContext, token, 0, null, omitHeader: true);
@@ -207,13 +296,14 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public JobContainer EvaluateJobContainer(
TemplateToken token,
DictionaryContextData contextData)
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(JobContainer);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Container, token, 0, null, omitHeader: true);
@@ -231,15 +321,90 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
public Dictionary<String, String> EvaluateJobOutput(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.JobOutputs, token, 0, null, omitHeader: true);
context.Errors.Check();
result = new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
var mapping = token.AssertMapping("outputs");
foreach (var pair in mapping)
{
// Literal key
var key = pair.Key.AssertString("output key");
// Literal value
var value = pair.Value.AssertString("output value");
result[key.Value] = value.Value;
}
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public Dictionary<String, String> EvaluateJobDefaultsRun(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.JobDefaultsRun, token, 0, null, omitHeader: true);
context.Errors.Check();
result = new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
var mapping = token.AssertMapping("defaults run");
foreach (var pair in mapping)
{
// Literal key
var key = pair.Key.AssertString("defaults run key");
// Literal value
var value = pair.Value.AssertString("defaults run value");
result[key.Value] = value.Value;
}
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public IList<KeyValuePair<String, JobContainer>> EvaluateJobServiceContainers(
TemplateToken token,
DictionaryContextData contextData)
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(List<KeyValuePair<String, JobContainer>>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData);
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Services, token, 0, null, omitHeader: true);
@@ -257,62 +422,11 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
public Boolean TryEvaluateStepDisplayName(
TemplateToken token,
private TemplateContext CreateContext(
DictionaryContextData contextData,
out String stepName)
{
stepName = default(String);
var context = CreateContext(contextData);
if (token != null && token.Type != TokenType.Null)
{
// We should only evaluate basic expressions if we are sure we have context on all the Named Values and functions
// Otherwise return and use a default name
if (token is BasicExpressionToken expressionToken)
{
ExpressionNode root = null;
try
{
root = new ExpressionParser().ValidateSyntax(expressionToken.Expression, null) as ExpressionNode;
}
catch (Exception exception)
{
context.Errors.Add(exception);
context.Errors.Check();
}
foreach (var node in root.Traverse())
{
if (node is NamedValue namedValue && !contextData.ContainsKey(namedValue.Name))
{
return false;
}
else if (node is Function function &&
!context.ExpressionFunctions.Any(item => String.Equals(item.Name, function.Name)) &&
!ExpressionConstants.WellKnownFunctions.ContainsKey(function.Name))
{
return false;
}
}
}
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StringStepsContext, token, 0, null, omitHeader: true);
context.Errors.Check();
stepName = PipelineTemplateConverter.ConvertToStepDisplayName(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return true;
}
private TemplateContext CreateContext(DictionaryContextData contextData)
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState = null,
bool setMissingContext = true)
{
var result = new TemplateContext
{
@@ -335,7 +449,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
}
}
// Add named context
// Add named values
if (contextData != null)
{
foreach (var pair in contextData)
@@ -344,12 +458,47 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
}
}
// Compat for new agent against old server
foreach (var name in s_contextNames)
// Add functions
var functionNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
if (expressionFunctions?.Count > 0)
{
if (!result.ExpressionValues.ContainsKey(name))
foreach (var function in expressionFunctions)
{
result.ExpressionValues[name] = null;
result.ExpressionFunctions.Add(function);
functionNames.Add(function.Name);
}
}
// Add missing expression values and expression functions.
// This solves the following problems:
// - Compat for new agent against old server (new contexts not sent down in job message)
// - Evaluating early when all referenced contexts are available, even though all allowed
// contexts may not yet be available. For example, evaluating step display name can often
// be performed early.
if (setMissingContext)
{
foreach (var name in s_expressionValueNames)
{
if (!result.ExpressionValues.ContainsKey(name))
{
result.ExpressionValues[name] = null;
}
}
foreach (var name in s_expressionFunctionNames)
{
if (!functionNames.Contains(name))
{
result.ExpressionFunctions.Add(new FunctionInfo<NoOperation>(name, 0, Int32.MaxValue));
}
}
}
// Add state
if (expressionState != null)
{
foreach (var pair in expressionState)
{
result.State[pair.Key] = pair.Value;
}
}
@@ -359,11 +508,13 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
private readonly ITraceWriter m_trace;
private readonly TemplateSchema m_schema;
private readonly IList<String> m_fileTable;
private readonly String[] s_contextNames = new[]
private readonly String[] s_expressionValueNames = new[]
{
PipelineTemplateConstants.GitHub,
PipelineTemplateConstants.Needs,
PipelineTemplateConstants.Strategy,
PipelineTemplateConstants.Matrix,
PipelineTemplateConstants.Needs,
PipelineTemplateConstants.Secrets,
PipelineTemplateConstants.Steps,
PipelineTemplateConstants.Inputs,
@@ -371,5 +522,13 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
PipelineTemplateConstants.Runner,
PipelineTemplateConstants.Env,
};
private readonly String[] s_expressionFunctionNames = new[]
{
PipelineTemplateConstants.Always,
PipelineTemplateConstants.Cancelled,
PipelineTemplateConstants.Failure,
PipelineTemplateConstants.HashFiles,
PipelineTemplateConstants.Success,
};
}
}

View File

@@ -2,25 +2,35 @@
using System.ComponentModel;
using System.IO;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Schema;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class PipelineTemplateSchemaFactory
public static class PipelineTemplateSchemaFactory
{
public TemplateSchema CreateSchema()
public static TemplateSchema GetSchema()
{
var assembly = Assembly.GetExecutingAssembly();
var json = default(String);
using (var stream = assembly.GetManifestResourceStream("GitHub.DistributedTask.Pipelines.ObjectTemplating.workflow-v1.0.json"))
using (var streamReader = new StreamReader(stream))
if (s_schema == null)
{
json = streamReader.ReadToEnd();
var assembly = Assembly.GetExecutingAssembly();
var json = default(String);
using (var stream = assembly.GetManifestResourceStream("GitHub.DistributedTask.Pipelines.ObjectTemplating.workflow-v1.0.json"))
using (var streamReader = new StreamReader(stream))
{
json = streamReader.ReadToEnd();
}
var objectReader = new JsonObjectReader(null, json);
var schema = TemplateSchema.Load(objectReader);
Interlocked.CompareExchange(ref s_schema, schema, null);
}
var objectReader = new JsonObjectReader(null, json);
return TemplateSchema.Load(objectReader);
return s_schema;
}
private static TemplateSchema s_schema;
}
}

View File

@@ -1,4 +1,4 @@
using System;
using System;
using System.Globalization;
using System.IO;
using System.Linq;
@@ -12,7 +12,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
/// <summary>
/// Converts a YAML file into a TemplateToken
/// </summary>
public sealed class YamlObjectReader : IObjectReader
internal sealed class YamlObjectReader : IObjectReader
{
internal YamlObjectReader(
Int32? fileId,

Some files were not shown because too many files have changed in this diff Show More