Compare commits

..

52 Commits

Author SHA1 Message Date
Marek Mahut
dcda342ecc use /usr/bin/env to find bash in scripts (#314) 2021-04-22 17:45:33 -04:00
Tingluo Huang
a711bd9494 add workflow_dispatch 2020-07-28 14:52:38 -04:00
Ethan Chiu
5e0cde8649 Composite Actions UI (#578)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Initial Start for FileTable stuff

* Progress towards passing FileTable or FileID or FileName

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Pass fileID to all children token of root action token

* Change confusing term context => templateContext, Eliminate _fileTable and only use ExecutionContext.FileTable + update this table when need be

* Remove unnessary FileID attribute from CompositeActionExecutionData

* Clean up file path for error message

* Remove todo

* Initial start/framework for output handling

* Outline different class vs Handler approach

* Remove InitializeScope

* Remove InitializeScope

* Fix Workflow Step Env overiding Parent Env

* First Approach for Attaching ID + Group ID to each Composite Action Step

* Add GroupID to the ActionDefinitionData

* starting foundation for handling clean up outputs step

* Pass outputs data to each composite action step to enable set-output functionality

* Create ScopeName for whole composite action.

This will enable us to add to the StepsContext[ScopeName] for the composite action which will allow us to use all these outputs in the cleanup step

* Hook up composite output step to handler => tmmrw implement composite output handler

* Add post composite action step to cleanup outputs => triggers composite output cleanup handler

* Fix Outputs Token handling start. Add individual step scope names.

* Set up Scope Name and Context Name naming system{

* Figured out how to pass Parent Execution Context to clean up step

* Figured out how to pass Parent Execution Context and scope names to
clean up step

* Add GetOutput function for StepsContext

* Generate child scope name correctly if parent scope name is null

* Simplify InitializeScope()

* Outputs are set correctly and able to get all final outputs in handler

* Parse through Action Outputs

* Fix null ScopeName + ContextName in CompositeOutputHandler

* Shift over handling of Action Outputs to output handler

* First attempt to fix null retrievals for output variables

* Basic Support for Outputs Done.

* Clean up pt.1

* Refactor outputs to avoid using Action Reference

* Clean up code

* Clean up part 2

* Add clarifying comments for the output handler

* Remove TODO

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Add file length check

* Clean up

* Fix logging issue

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error

* Fix period

* Sanity check for fileTable add + remove unn. code

* revert back

* Add back line break

* Fix null errors

* Address situation if FileTable is null + add sanity check for adding file to fileTable

* add line

* Revert

* Fix unit tests to instantiate a FileTable

* Fix logic for trimming manifestfile path

* Add null check

* Revert

* Revert

* revert

* spacing

* Add filetable to testing file, remove ? since we know filetable should never be non null

* Fix Throw logic

* Clarify template outputs token

* Add another type support for outputs to avoid container unit tests errors

* Add mapping for parity

* Build support for new outputs format

* Build support for new outputs format

* Refactor to avoid duplication of action yaml for workflow yaml

* revert

* revert

* revert

* spacing
2020-07-13 17:55:15 -04:00
Ethan Chiu
cb2b323781 Composite Run Steps Outputs (#568)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Initial Start for FileTable stuff

* Progress towards passing FileTable or FileID or FileName

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Pass fileID to all children token of root action token

* Change confusing term context => templateContext, Eliminate _fileTable and only use ExecutionContext.FileTable + update this table when need be

* Remove unnessary FileID attribute from CompositeActionExecutionData

* Clean up file path for error message

* Remove todo

* Initial start/framework for output handling

* Outline different class vs Handler approach

* Remove InitializeScope

* Remove InitializeScope

* Fix Workflow Step Env overiding Parent Env

* First Approach for Attaching ID + Group ID to each Composite Action Step

* Add GroupID to the ActionDefinitionData

* starting foundation for handling clean up outputs step

* Pass outputs data to each composite action step to enable set-output functionality

* Create ScopeName for whole composite action.

This will enable us to add to the StepsContext[ScopeName] for the composite action which will allow us to use all these outputs in the cleanup step

* Hook up composite output step to handler => tmmrw implement composite output handler

* Add post composite action step to cleanup outputs => triggers composite output cleanup handler

* Fix Outputs Token handling start. Add individual step scope names.

* Set up Scope Name and Context Name naming system{

* Figured out how to pass Parent Execution Context to clean up step

* Figured out how to pass Parent Execution Context and scope names to
clean up step

* Add GetOutput function for StepsContext

* Generate child scope name correctly if parent scope name is null

* Simplify InitializeScope()

* Outputs are set correctly and able to get all final outputs in handler

* Parse through Action Outputs

* Fix null ScopeName + ContextName in CompositeOutputHandler

* Shift over handling of Action Outputs to output handler

* First attempt to fix null retrievals for output variables

* Basic Support for Outputs Done.

* Clean up pt.1

* Refactor outputs to avoid using Action Reference

* Clean up code

* Clean up part 2

* Add clarifying comments for the output handler

* Remove TODO

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Add file length check

* Clean up

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error

* Fix period

* Sanity check for fileTable add + remove unn. code

* revert back

* Add back line break

* Fix null errors

* Address situation if FileTable is null + add sanity check for adding file to fileTable

* add line

* Revert

* Fix unit tests to instantiate a FileTable

* Fix logic for trimming manifestfile path

* Add null check

* Revert

* Revert

* revert

* spacing

* Add filetable to testing file, remove ? since we know filetable should never be non null

* Fix Throw logic

* Clarify template outputs token

* Add another type support for outputs to avoid container unit tests errors

* Add mapping for parity

* Build support for new outputs format

* Refactor to avoid duplication of action yaml for workflow yaml

* Move SDK work in ActionManifestManager, Condense Code

* Defer runs evaluation till after for loop to ensure order doesn't matter

* Fix logic error in setting scope and context names

* Add Regex + Add Child Context name null resolution

* move private function to bottom of class
2020-07-13 17:23:19 -04:00
Ethan Chiu
6c3958f365 Composite Run Steps ADR (#554)
* start

* Inputs + Outputs

* Clarify docs

* Finish Environment

* Add if condition

* Clarify language

* Update 0549-composite-run-steps.md

* timeout-minutes

* Finish

* add relevant example

* Fix syntax

* fix env example

* fix yaml syntax

* Update 0549-composite-run-steps.md

* Update file names, add more relevant example if condition

* Add note to continue-on-error

* Apply changes to If Condition

* bolding

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Syntax support + spacing

* Add guiding principles.

* Update 0549-composite-run-steps.md

* Reverse order.

* Update 0549-composite-run-steps.md

* change from job to step

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Add Secrets

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Fix output example

* Fix output example

* Fix action examples to use using.

* fix output variable name

* update workingDir + env

* Defaults + continue-on-error

* Update Outputs Section

* Eliminate Env

* Secrets

* Update timeout-minutes

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Fix example.

* Remove TODOs

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md
2020-07-13 12:30:31 -04:00
Ethan Chiu
9d7bd4706b Improve Error Messaging for Actions by Using ExecutionContext's FileTable as Single Source of Truth and by Passing FileID to All Children Tokens. (#564)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Initial Start for FileTable stuff

* Progress towards passing FileTable or FileID or FileName

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Pass fileID to all children token of root action token

* Change confusing term context => templateContext, Eliminate _fileTable and only use ExecutionContext.FileTable + update this table when need be

* Remove unnessary FileID attribute from CompositeActionExecutionData

* Clean up file path for error message

* Remove todo

* Fix Workflow Step Env overiding Parent Env

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Add file length check

* Clean up

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error

* Fix period

* Sanity check for fileTable add + remove unn. code

* revert back

* Add back line break

* Fix null errors

* Address situation if FileTable is null + add sanity check for adding file to fileTable

* add line

* Revert

* Fix unit tests to instantiate a FileTable

* Fix logic for trimming manifestfile path

* Add null check

* Add filetable to testing file, remove ? since we know filetable should never be non null
2020-07-08 17:15:16 -04:00
Ethan Chiu
5822a38c39 Add bash command for running custom runner (#569) 2020-07-08 11:20:38 -04:00
Ethan Chiu
d42c9da2d7 Composite Actions: Support Env Flow (#557)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Fix Workflow Step Env overiding Parent Env

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error
2020-07-08 10:16:51 -04:00
eric sciple
121deedeb5 Fix trailing '.0' for Int64 values (#572) 2020-06-30 17:25:47 -04:00
Ethan Chiu
a0942ed345 Composite Actions Support for Multiple Run Steps (#549)
* Composite Action Run Steps

* Clean up trace messages + add Trace debug in ActionManager

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Add verbose trace logs which are only viewable by devs

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps
2020-06-23 15:35:32 -04:00
TingluoHuang
7cef9a27ca release 2.267.0 runner. 2020-06-23 14:05:28 -04:00
Tingluo Huang
df7e16954e print runner and machine name to log. (#539) 2020-06-23 13:57:37 -04:00
eric sciple
4e7d27a53c remove temporary logic when resolving action download info (#550) 2020-06-15 13:13:47 -04:00
Lokesh Gopu
89d1418e48 Update exception message (#540) 2020-06-11 17:25:50 -04:00
Tingluo Huang
e728b8594d fix race condition. (#538) 2020-06-11 16:17:24 -04:00
Tingluo Huang
de4490d06d Restore SELinux context on service file when SELinux is enabled (#525) 2020-06-11 15:40:09 -04:00
Tingluo Huang
2e800f857e Skip search $PATH on command with fully qualified path (#526) 2020-06-11 13:52:42 -04:00
Tingluo Huang
312c7668a8 Fix DataContract with Token service (#532) 2020-06-11 12:11:35 -04:00
Tingluo Huang
eaf39bb058 add libicu66 for Ubuntu 20.04 (#535) 2020-06-11 12:11:13 -04:00
eric sciple
5815819f24 Resolve action download info (#515) 2020-06-09 08:53:28 -04:00
Ethan Chiu
1aea046932 Add substep for developer flow for clarity (#523) 2020-06-08 10:47:58 -04:00
Ethan Chiu
eda463601c Update Links and Language to Git + VSCode (#522) 2020-06-08 10:19:17 -04:00
Nick Fields
f994ae0542 Reduce input validation warnings (#506)
* Only raise a single warning for unexpected inputs

* Update invalid input test to raise single warning
2020-06-05 23:09:14 -04:00
Tingluo Huang
3c5aef791c Fix null ref exception in SecretMasker caused by hashfiles timeout. (#516) 2020-06-05 23:02:10 -04:00
Tingluo Huang
c4626d0c3a Remove SPS/Token migration code. Remove GHES url manipulate code. (#513)
* Remove SPS/Token migration code. Remove GHES url manipulate code.

* feedback.
2020-06-03 23:24:53 -04:00
eric sciple
416a7ac4b8 prepare to switch to service resolves archive download info (#508) 2020-06-02 17:21:50 -04:00
TingluoHuang
11435857e4 prepare 2.263.0 runner release. 2020-05-21 15:49:10 -04:00
Tingluo Huang
6f260012a3 Fix inputs validation warning, fix post step display name, fix worker crash due to error in step.env (#490) 2020-05-21 11:09:50 -04:00
eric sciple
4fc87ddfc6 fix problem matcher for GHES (#488) 2020-05-19 16:15:03 -04:00
eric sciple
b45c1b9440 switch GITHUB_URL to GITHUB_SERVER_URL (#482) 2020-05-18 13:02:30 -04:00
Quan TRAN
73307c0a30 jq returns "null" if the field does not exist (#478) 2020-05-14 11:09:20 -04:00
Tingluo Huang
cd8e4ddba1 Validate inputs only for repo action, no warning for small delay. (#476)
* validate inputs only for repo action, no warning for small delay.

* l0
2020-05-12 16:09:13 -04:00
Tingluo Huang
abf59bdcb6 Fix configure as service with runner name has space. (#474) 2020-05-12 16:08:50 -04:00
Brian Cristante
09cf59c1e0 Use an env var to point to an Actions Service dev instance (#468)
* Use an environment variable for the testing backdoor

* Make the env var a boolean

We'll still have to pass the URL on the command line

* Empty commit to rerun CI
2020-05-11 17:14:02 -04:00
TingluoHuang
7a65236022 prepare 2.262.0 runner release. 2020-05-11 15:05:59 -04:00
David Kale
462b5117c8 docker build using -f instead of implied default (#471)
* pass -f to docker build

* Wrong place

* build path

* Also pass docker context path

* Tidy up format

* PR Feedback
2020-05-11 13:57:31 -04:00
Tingluo Huang
6922f3cb86 sps/token migration tweak, ActionResult casing. (#462) 2020-05-11 12:36:35 -04:00
Tingluo Huang
911135e66c add help info for '--labels' (#472) 2020-05-11 12:36:16 -04:00
PJ Quirk
01c9a8a8af Remove community actions munging and add dotcom fallback for downloading actions (#469)
* Fallback to dotcom rather than munged community actions

* Encapsulate the link and the auth details

* Rename the method to be clearer

* Remove BOM
2020-05-11 12:23:02 -04:00
eric sciple
33d2d2c328 update checkout@v1 for GHES (#470) 2020-05-11 11:41:16 -04:00
Justin Hutchings
a246b3b29d Add CodeQL Analysis workflow (#459)
* Add CodeQL Analysis workflow

* Fix path

* Add manual build step

Import manual build step from build.yml
2020-05-07 11:17:34 -04:00
Brian Cristante
c7768d4a7b Adapt create-latest-svc.sh to work with GHES (#452)
* Add README

* Adapt create-latest-svc to work with GHES

* Fix inverted conditions
2020-04-27 23:53:53 -04:00
Tingluo Huang
70729fb3c4 Help trace worker crash in Kusto. (#450)
* Help trace worker crash in Kusto.

* more

* feedback.
2020-04-27 23:44:17 -04:00
Tingluo Huang
1470a3b6e2 better error when runner removed from service. (#441) 2020-04-27 23:02:00 -04:00
eric sciple
2fadf430e4 post-alpha fixes for github.url github.api_url and github.graphql_url (#451) 2020-04-24 13:38:59 -04:00
PJ Quirk
f798f5606b Use the API_URL and munge action URLs for GHES (#437)
* First pass at logic for GHES, not all correct

* Need to mock out file downloading

* Allowed for mocking of HTTP responses

* Added test for builtin GHES action download

* More tests

* Don't retry on action 404

* Remove commented out code

* Add a using statement back, because Windows

* Make windows happy again

* Another windows fix

* Always delete the cache since it isn't fully implemented

* Use RunnerService base class

* Add examples, update URL path

* Remove forceDotCom

* Fix a bug

* Remove a test that's no longer relevant

* PR feedback

* Add missing return

* More trace info

* Use the new agreed-upon format

* Use the auth token since we're hitting GHES directly

* Fixing tests on windows

* Fixed one more test
2020-04-23 17:02:13 -04:00
Tingluo Huang
3f7a01af93 add secret masker for trimming double qoutes. (#440) 2020-04-21 22:07:55 -04:00
Tingluo Huang
d5c54f9819 Raise warning when action input does not match action.yml. (#429) 2020-04-21 19:42:53 -04:00
Mathias LANG
9f78ad3b34 Make release notes code blocks copy-paste-able (#430)
The release notes used C-style comments (`//`) instead of shell-style (`#`),
causing the code snippet to not be easily copy-paste-able.

Additionally, the code fence for Windows had the extra `powershell` added,
as well as a comment to direct users towards `powershell` over `cmd`.
2020-04-19 22:11:44 -04:00
Jan Pazdziora
97883c8cd5 Fix spelling of RHEL and CentOS. (#436) 2020-04-19 16:53:06 -04:00
Tingluo Huang
c5fa9fb062 Print node version in debug instead of output. (#433) 2020-04-17 11:53:51 -04:00
Bryan MacFarlane
b2dcdc21dc Sample scripts to automate scaleable runners (#427) 2020-04-17 11:08:45 -04:00
81 changed files with 4526 additions and 2001 deletions

View File

@@ -1,6 +1,7 @@
name: Runner CI name: Runner CI
on: on:
workflow_dispatch:
push: push:
branches: branches:
- master - master

35
.github/workflows/codeql.yml vendored Normal file
View File

@@ -0,0 +1,35 @@
name: "Code Scanning - Action"
on:
push:
schedule:
- cron: '0 0 * * 0'
jobs:
CodeQL-Build:
strategy:
fail-fast: false
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
- name: Manual build
run : |
./dev.sh layout Release linux-x64
working-directory: src
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@@ -0,0 +1,275 @@
# ADR 054x: Composite Run Steps
**Date**: 2020-06-17
**Status**: Proposed
**Relevant PR**: https://github.com/actions/runner/pull/549
## Context
Customers want to be able to compose actions from actions (ex: https://github.com/actions/runner/issues/438)
An important step towards meeting this goal is to build in functionality for actions where users can simply execute any number of steps.
## Guiding Principles
We don't want the workflow author to need to know how the internal workings of the action work. Users shouldn't know the internal workings of the composite action (for example, `default.shell` and `default.workingDir` should not be inherited from the workflow file to the action file). When deciding how to design certain parts of composite run steps, we want to think one logical step from the consumer.
A composite action is treated as **one** individual job step (aka encapsulation).
## Decision
**In this ADR, we only support running multiple run steps in an Action.** In doing so, we build in support for mapping and flowing the inputs, outputs, and env variables (ex: All nested steps should have access to its parents' input variables and nested steps can overwrite the input variables).
## Steps
Example `workflow.yml`
```yaml
jobs:
build:
runs-on: self-hosted
steps:
- id: step1
uses: actions/setup-python@v1
- id: step2
uses: actions/setup-node@v2
- uses: actions/checkout@v2
- uses: user/composite@v1
- name: workflow step 1
run: echo hello world 3
- name: workflow step 2
run: echo hello world 4
```
Example `user/composite/action.yml`
```yaml
runs:
using: "composite"
steps:
- run: pip install -r requirements.txt
- run: npm install
```
Example Output
```yaml
[npm installation output]
[pip requirements output]
echo hello world 3
echo hello world 4
```
We add a token called "composite" which allows our Runner code to process composite actions. By invoking "using: composite", our Runner code then processes the "steps" attribute, converts this template code to a list of steps, and finally runs each run step sequentially. If any step fails and there are no `if` conditions defined, the whole composite action job fails.
## Inputs
Example `workflow.yml`:
```yaml
steps:
- id: foo
uses: user/composite@v1
with:
your_name: "Octocat"
```
Example `user/composite/action.yml`:
```yaml
inputs:
your_name:
description: 'Your name'
default: 'Ethan'
runs:
using: "composite"
steps:
- run: echo hello ${{ inputs.your_name }}
```
Example Output:
```
hello Octocat
```
Each input variable in the composite action is only viewable in its own scope.
## Outputs
Example `workflow.yml`:
```yaml
...
steps:
- id: foo
uses: user/composite@v1
- run: echo random-number ${{ steps.foo.outputs.random-number }}
```
Example `user/composite/action.yml`:
```yaml
outputs:
random-number:
description: "Random number"
value: ${{ steps.random-number-generator.outputs.random-id }}
runs:
using: "composite"
steps:
- id: random-number-generator
run: echo "::set-output name=random-number::$(echo $RANDOM)"
```
Example Output:
```
::set-output name=my-output::43243
random-number 43243
```
Each of the output variables from the composite action is viewable from the workflow file that uses the composite action. In other words, every child action output(s) is viewable only by its parent using dot notation (ex `steps.foo.outputs.random-number`).
Moreover, the output ids are only accessible within the scope where it was defined. Note that in the example above, in our `workflow.yml` file, it should not have access to output id (i.e. `random-id`). The reason why we are doing this is because we don't want to require the workflow author to know the internal workings of the composite action.
## Context
Similar to the workflow file, the composite action has access to the [same context objects](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#contexts) (ex: `github`, `env`, `strategy`).
## Environment
In the Composite Action, you'll only be able to use `::set-env::` to set environment variables just like you could with other actions.
## Secrets
**Note** : This feature will be focused on in a future ADR.
We'll pass the secrets from the composite action's parents (ex: the workflow file) to the composite action. Secrets can be created in the composite action with the secrets context. In the actions yaml, we'll automatically mask the secret.
## If Condition
Example `workflow.yml`:
```yaml
steps:
- run: exit 1
- uses: user/composite@v1 # <--- this will run, as it's marked as always runing
if: always()
```
Example `user/composite/action.yml`:
```yaml
runs:
using: "composite"
steps:
- run: echo "just succeeding"
- run: echo "I will run, as my current scope is succeeding"
if: success()
- run: exit 1
- run: echo "I will not run, as my current scope is now failing"
```
See the paragraph below for a rudimentary approach (thank you to @cybojenix for the idea, example, and explanation for this approach):
The `if` statement in the parent (in the example above, this is the `workflow.yml`) shows whether or not we should run the composite action. So, our composite action will run since the `if` condition for running the composite action is `always()`.
**Note that the if condition on the parent does not propogate to the rest of its children though.**
In the child action (in this example, this is the `action.yml`), it starts with a clean slate (in other words, no imposing if conditions). Similar to the logic in the paragraph above, `echo "I will run, as my current scope is succeeding"` will run since the `if` condition checks if the previous steps **within this composite action** has not failed. `run: echo "I will not run, as my current scope is now failing"` will not run since the previous step resulted in an error and by default, the if expression is set to `success()` if the if condition is not set for a step.
What if a step has `cancelled()`? We do the opposite of our approach above if `cancelled()` is used for any of our composite run steps. We will cancel any step that has this condition if the workflow is cancelled at all.
## Timeout-minutes
Example `workflow.yml`:
```yaml
steps:
- id: bar
uses: user/test@v1
timeout-minutes: 50
```
Example `user/composite/action.yml`:
```yaml
runs:
using: "composite"
steps:
- id: foo1
run: echo test 1
timeout-minutes: 10
- id: foo2
run: echo test 2
- id: foo3
run: echo test 3
timeout-minutes: 10
```
A composite action in its entirety is a job. You can set both timeout-minutes for the whole composite action or its steps as long as the the sum of the `timeout-minutes` for each composite action step that has the attribute `timeout-minutes` is less than or equals to `timeout-minutes` for the composite action. There is no default timeout-minutes for each composite action step.
If the time taken for any of the steps in combination or individually exceed the whole composite action `timeout-minutes` attribute, the whole job will fail (1). If an individual step exceeds its own `timeout-minutes` attribute but the total time that has been used including this step is below the overall composite action `timeout-minutes`, the individual step will fail but the rest of the steps will run based on their own `timeout-minutes` attribute (they will still abide by condition (1) though).
For reference, in the example above, if the composite step `foo1` takes 11 minutes to run, that step will fail but the rest of the steps, `foo1` and `foo2`, will proceed as long as their total runtime with the previous failed `foo1` action is less than the composite action's `timeout-minutes` (50 minutes). If the composite step `foo2` takes 51 minutes to run, it will cause the whole composite action job to fail. I
The rationale behind this is that users can configure their steps with the `if` condition to conditionally set how steps rely on each other. Due to the additional capabilities that are offered with combining `timeout-minutes` and/or `if`, we wanted the `timeout-minutes` condition to be as dumb as possible and not effect other steps.
[Usage limits still apply](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions?query=if%28%29#usage-limits)
## Continue-on-error
Example `workflow.yml`:
```yaml
steps:
- run: exit 1
- id: bar
uses: user/test@v1
continue-on-error: false
- id: foo
run: echo "Hello World" <------- This step will not run
```
Example `user/composite/action.yml`:
```yaml
runs:
using: "composite"
steps:
- run: exit 1
continue-on-error: true
- run: echo "Hello World 2" <----- This step will run
```
If any of the steps fail in the composite action and the `continue-on-error` is set to `false` for the whole composite action step in the workflow file, then the steps below it will run. On the flip side, if `continue-on-error` is set to `true` for the whole composite action step in the workflow file, the next job step will run.
For the composite action steps, it follows the same logic as above. In this example, `"Hello World 2"` will be outputted because the previous step has `continue-on-error` set to `true` although that previous step errored.
## Defaults
The composite action author will be required to set the `shell` and `workingDir` of the composite action. Moreover, the composite action author will be able to explicitly set the shell for each composite run step. The workflow author will not have the ability to change these attributes.
## Visualizing Composite Action in the GitHub Actions UI
We want all the composite action's steps to be condensed into the original composite action node.
Here is a visual represenation of the [first example](#Steps)
```yaml
| composite_action_node |
| echo hello world 1 |
| echo hello world 2 |
| echo hello world 3 |
| echo hello world 4 |
```
## Conclusion
This ADR lays the framework for eventually supporting nested Composite Actions within Composite Actions. This ADR allows for users to run multiple run steps within a GitHub Composite Action with the support of inputs, outputs, environment, and context for use in any steps as well as the if, timeout-minutes, and the continue-on-error attributes for each Composite Action step.

View File

@@ -23,7 +23,7 @@ An ADR is an Architectural Decision Record. This allows consensus on the direct
### Required Dev Dependencies ### Required Dev Dependencies
![Win](res/win_sm.png) Git for Windows [Install Here](https://git-scm.com/downloads) (needed for dev sh script) ![Win](res/win_sm.png) ![*nix](res/linux_sm.png) Git for Windows and Linux [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
### To Build, Test, Layout ### To Build, Test, Layout
@@ -43,6 +43,7 @@ Sample developer flow:
```bash ```bash
git clone https://github.com/actions/runner git clone https://github.com/actions/runner
cd runner
cd ./src cd ./src
./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout ./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout
<make code changes> <make code changes>
@@ -50,10 +51,23 @@ cd ./src
./dev.(sh/cmd) test # run all unit tests before git commit/push ./dev.(sh/cmd) test # run all unit tests before git commit/push
``` ```
View logs:
```bash
cd runner/_layout/_diag
ls
cat (Runner/Worker)_TIMESTAMP.log # view your log file
```
Run Runner:
```bash
cd runner/_layout
./run.sh # run your custom runner
```
### Editors ### Editors
[Using Visual Studio Code](https://code.visualstudio.com/) [Using Visual Studio Code](https://code.visualstudio.com/)
[Using Visual Studio 2019](https://www.visualstudio.com/vs/) [Using Visual Studio](https://code.visualstudio.com/docs)
### Styling ### Styling

View File

@@ -40,7 +40,7 @@ Debian based OS (Debian, Ubuntu, Linux Mint)
- libssl1.1, libssl1.0.2 or libssl1.0.0 - libssl1.1, libssl1.0.2 or libssl1.0.0
- libicu63, libicu60, libicu57 or libicu55 - libicu63, libicu60, libicu57 or libicu55
Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7) Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7)
- lttng-ust - lttng-ust
- openssl-libs - openssl-libs

View File

@@ -1,20 +1,29 @@
## Features ## Features
- Runner support for GHES Alpha (#381 #386 #390 #393 $401) - Resolve action download info from server (#508, #515, #550)
- Allow secrets context in Container.env (#388) - Print runner and machine name to log. (#539)
## Bugs ## Bugs
- Raise warning when volume mount root. (#413) - Reduce input validation warnings (#506)
- Fix typo (#394) - Fix null ref exception in SecretMasker caused by `hashfiles` timeout. (#516)
- Add libicu66 to `./installDependencies.sh` for Ubuntu 20.04 (#535)
- Fix DataContract with Token service (#532)
- Skip search $PATH on command with fully qualified path (#526)
- Restore SELinux context on service file when SELinux is enabled (#525)
## Misc ## Misc
- N/A - Remove SPS/Token migration code. Remove GHES url manipulate code. (#513)
- Add sub-step for developer flow for clarity (#523)
- Update Links and Language to Git + VSCode (#522)
- Update runner configuration exception message (#540)
## Windows x64 ## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.
```
// Create a folder under the drive root The following snipped needs to be run on `powershell`:
``` powershell
# Create a folder under the drive root
mkdir \actions-runner ; cd \actions-runner mkdir \actions-runner ; cd \actions-runner
// Download the latest runner package # Download the latest runner package
Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-win-x64-<RUNNER_VERSION>.zip -OutFile actions-runner-win-x64-<RUNNER_VERSION>.zip Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-win-x64-<RUNNER_VERSION>.zip -OutFile actions-runner-win-x64-<RUNNER_VERSION>.zip
// Extract the installer # Extract the installer
Add-Type -AssemblyName System.IO.Compression.FileSystem ; Add-Type -AssemblyName System.IO.Compression.FileSystem ;
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD") [System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
``` ```
@@ -22,44 +31,44 @@ Add-Type -AssemblyName System.IO.Compression.FileSystem ;
## OSX ## OSX
``` bash ``` bash
// Create a folder # Create a folder
mkdir actions-runner && cd actions-runner mkdir actions-runner && cd actions-runner
// Download the latest runner package # Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
// Extract the installer # Extract the installer
tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz tar xzf ./actions-runner-osx-x64-<RUNNER_VERSION>.tar.gz
``` ```
## Linux x64 ## Linux x64
``` bash ``` bash
// Create a folder # Create a folder
mkdir actions-runner && cd actions-runner mkdir actions-runner && cd actions-runner
// Download the latest runner package # Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
// Extract the installer # Extract the installer
tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz tar xzf ./actions-runner-linux-x64-<RUNNER_VERSION>.tar.gz
``` ```
## Linux arm64 (Pre-release) ## Linux arm64 (Pre-release)
``` bash ``` bash
// Create a folder # Create a folder
mkdir actions-runner && cd actions-runner mkdir actions-runner && cd actions-runner
// Download the latest runner package # Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
// Extract the installer # Extract the installer
tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz tar xzf ./actions-runner-linux-arm64-<RUNNER_VERSION>.tar.gz
``` ```
## Linux arm (Pre-release) ## Linux arm (Pre-release)
``` bash ``` bash
// Create a folder # Create a folder
mkdir actions-runner && cd actions-runner mkdir actions-runner && cd actions-runner
// Download the latest runner package # Download the latest runner package
curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz curl -O -L https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
// Extract the installer # Extract the installer
tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz tar xzf ./actions-runner-linux-arm-<RUNNER_VERSION>.tar.gz
``` ```

View File

@@ -1 +1 @@
2.168.0 <Update to ./src/runnerversion when creating release>

4
scripts/README.md Normal file
View File

@@ -0,0 +1,4 @@
# Sample scripts for self-hosted runners
Here are some examples to work from if you'd like to automate your use of self-hosted runners.
See the docs [here](../docs/automate.md).

View File

@@ -7,27 +7,29 @@ set -e
# Configures as a service # Configures as a service
# #
# Examples: # Examples:
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myuser/myrepo # RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myuser/myrepo my.ghe.deployment.net
# RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myorg # RUNNER_CFG_PAT=<yourPAT> ./create-latest-svc.sh myorg my.ghe.deployment.net
# #
# Usage: # Usage:
# export RUNNER_CFG_PAT=<yourPAT> # export RUNNER_CFG_PAT=<yourPAT>
# ./create-latest-svc scope [name] [user] # ./create-latest-svc scope [ghe_domain] [name] [user]
#
# scope required repo (:owner/:repo) or org (:organization)
# ghe_domain optional the fully qualified domain name of your GitHub Enterprise Server deployment
# name optional defaults to hostname
# user optional user svc will run as. defaults to current
# #
# scope required repo (:owner/:repo) or org (:organization)
# name optional defaults to hostname
# user optional user svc will run as. defaults to current
#
# Notes: # Notes:
# PATS over envvars are more secure # PATS over envvars are more secure
# Should be used on VMs and not containers # Should be used on VMs and not containers
# Works on OSX and Linux # Works on OSX and Linux
# Assumes x64 arch # Assumes x64 arch
# #
runner_scope=${1} runner_scope=${1}
runner_name=${2:-$(hostname)} ghe_hostname=${2}
svc_user=${3:-$USER} runner_name=${3:-$(hostname)}
svc_user=${4:-$USER}
echo "Configuring runner @ ${runner_scope}" echo "Configuring runner @ ${runner_scope}"
sudo echo sudo echo
@@ -51,9 +53,9 @@ which curl || fatal "curl required. Please install in PATH with apt-get, brew,
which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc" which jq || fatal "jq required. Please install in PATH with apt-get, brew, etc"
# bail early if there's already a runner there. also sudo early # bail early if there's already a runner there. also sudo early
if [ -d ./runner ]; then if [ -d ./runner ]; then
fatal "Runner already exists. Use a different directory or delete ./runner" fatal "Runner already exists. Use a different directory or delete ./runner"
fi fi
sudo -u ${svc_user} mkdir runner sudo -u ${svc_user} mkdir runner
@@ -66,15 +68,20 @@ sudo -u ${svc_user} mkdir runner
echo echo
echo "Generating a registration token..." echo "Generating a registration token..."
# if the scope has a slash, it's an repo runner base_api_url="https://api.github.com"
base_api_url="https://api.github.com/orgs" if [ -n "${ghe_hostname}" ]; then
if [[ "$runner_scope" == *\/* ]]; then base_api_url="https://${ghe_hostname}/api/v3"
base_api_url="https://api.github.com/repos"
fi fi
export RUNNER_TOKEN=$(curl -s -X POST ${base_api_url}/${runner_scope}/actions/runners/registration-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token') # if the scope has a slash, it's a repo runner
orgs_or_repos="orgs"
if [[ "$runner_scope" == *\/* ]]; then
orgs_or_repos="repos"
fi
if [ -z "$RUNNER_TOKEN" ]; then fatal "Failed to get a token"; fi export RUNNER_TOKEN=$(curl -s -X POST ${base_api_url}/${orgs_or_repos}/${runner_scope}/actions/runners/registration-token -H "accept: application/vnd.github.everest-preview+json" -H "authorization: token ${RUNNER_CFG_PAT}" | jq -r '.token')
if [ "null" == "$RUNNER_TOKEN" -o -z "$RUNNER_TOKEN" ]; then fatal "Failed to get a token"; fi
#--------------------------------------- #---------------------------------------
# Download latest released and extract # Download latest released and extract
@@ -82,6 +89,7 @@ if [ -z "$RUNNER_TOKEN" ]; then fatal "Failed to get a token"; fi
echo echo
echo "Downloading latest runner ..." echo "Downloading latest runner ..."
# For the GHES Alpha, download the runner from github.com
latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name') latest_version_label=$(curl -s -X GET 'https://api.github.com/repos/actions/runner/releases/latest' | jq -r '.tag_name')
latest_version=$(echo ${latest_version_label:1}) latest_version=$(echo ${latest_version_label:1})
runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz" runner_file="actions-runner-${runner_plat}-x64-${latest_version}.tar.gz"
@@ -116,6 +124,10 @@ pushd ./runner
# Unattend config # Unattend config
#--------------------------------------- #---------------------------------------
runner_url="https://github.com/${runner_scope}" runner_url="https://github.com/${runner_scope}"
if [ -n "${ghe_hostname}" ]; then
runner_url="https://${ghe_hostname}/${runner_scope}"
fi
echo echo
echo "Configuring ${runner_name} @ $runner_url" echo "Configuring ${runner_name} @ $runner_url"
echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name" echo "./config.sh --unattended --url $runner_url --token *** --name $runner_name"
@@ -127,9 +139,9 @@ sudo -E -u ${svc_user} ./config.sh --unattended --url $runner_url --token $RUNNE
echo echo
echo "Configuring as a service ..." echo "Configuring as a service ..."
prefix="" prefix=""
if [ "${runner_plat}" == "linux" ]; then if [ "${runner_plat}" == "linux" ]; then
prefix="sudo " prefix="sudo "
fi fi
${prefix}./svc.sh install ${svc_user} ${prefix}./svc.sh install ${svc_user}
${prefix}./svc.sh start ${prefix}./svc.sh start

View File

@@ -154,7 +154,16 @@ function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [in
function Get-Machine-Architecture() { function Get-Machine-Architecture() {
Say-Invocation $MyInvocation Say-Invocation $MyInvocation
# possible values: amd64, x64, x86, arm64, arm # On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems.
# To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432.
# PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE.
# Possible values: amd64, x64, x86, arm64, arm
if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null )
{
return $ENV:PROCESSOR_ARCHITEW6432
}
return $ENV:PROCESSOR_ARCHITECTURE return $ENV:PROCESSOR_ARCHITECTURE
} }
@@ -684,3 +693,196 @@ Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath
Say "Installation finished" Say "Installation finished"
exit 0 exit 0
# SIG # Begin signature block
# MIIjhwYJKoZIhvcNAQcCoIIjeDCCI3QCAQExDzANBglghkgBZQMEAgEFADB5Bgor
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCAiKYSY4KtkeThH
# d5M1aXqv1K0/pff07QwfUbYZ/qX5LqCCDYUwggYDMIID66ADAgECAhMzAAABiK9S
# 1rmSbej5AAAAAAGIMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy
# b3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNpZ25p
# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ4WhcNMjEwMzAzMTgzOTQ4WjB0MQsw
# CQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9u
# ZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMR4wHAYDVQQDExVNaWNy
# b3NvZnQgQ29ycG9yYXRpb24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
# AQCSCNryE+Cewy2m4t/a74wZ7C9YTwv1PyC4BvM/kSWPNs8n0RTe+FvYfU+E9uf0
# t7nYlAzHjK+plif2BhD+NgdhIUQ8sVwWO39tjvQRHjP2//vSvIfmmkRoML1Ihnjs
# 9kQiZQzYRDYYRp9xSQYmRwQjk5hl8/U7RgOiQDitVHaU7BT1MI92lfZRuIIDDYBd
# vXtbclYJMVOwqZtv0O9zQCret6R+fRSGaDNfEEpcILL+D7RV3M4uaJE4Ta6KAOdv
# V+MVaJp1YXFTZPKtpjHO6d9pHQPZiG7NdC6QbnRGmsa48uNQrb6AfmLKDI1Lp31W
# MogTaX5tZf+CZT9PSuvjOCLNAgMBAAGjggGCMIIBfjAfBgNVHSUEGDAWBgorBgEE
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUj9RJL9zNrPcL10RZdMQIXZN7MG8w
# VAYDVR0RBE0wS6RJMEcxLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
# dGlvbnMgTGltaXRlZDEWMBQGA1UEBRMNMjMwMDEyKzQ1ODM4NjAfBgNVHSMEGDAW
# gBRIbmTlUAXTgqoXNzcitW2oynUClTBUBgNVHR8ETTBLMEmgR6BFhkNodHRwOi8v
# d3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NybC9NaWNDb2RTaWdQQ0EyMDExXzIw
# MTEtMDctMDguY3JsMGEGCCsGAQUFBwEBBFUwUzBRBggrBgEFBQcwAoZFaHR0cDov
# L3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9jZXJ0cy9NaWNDb2RTaWdQQ0EyMDEx
# XzIwMTEtMDctMDguY3J0MAwGA1UdEwEB/wQCMAAwDQYJKoZIhvcNAQELBQADggIB
# ACnXo8hjp7FeT+H6iQlV3CcGnkSbFvIpKYafgzYCFo3UHY1VHYJVb5jHEO8oG26Q
# qBELmak6MTI+ra3WKMTGhE1sEIlowTcp4IAs8a5wpCh6Vf4Z/bAtIppP3p3gXk2X
# 8UXTc+WxjQYsDkFiSzo/OBa5hkdW1g4EpO43l9mjToBdqEPtIXsZ7Hi1/6y4gK0P
# mMiwG8LMpSn0n/oSHGjrUNBgHJPxgs63Slf58QGBznuXiRaXmfTUDdrvhRocdxIM
# i8nXQwWACMiQzJSRzBP5S2wUq7nMAqjaTbeXhJqD2SFVHdUYlKruvtPSwbnqSRWT
# GI8s4FEXt+TL3w5JnwVZmZkUFoioQDMMjFyaKurdJ6pnzbr1h6QW0R97fWc8xEIz
# LIOiU2rjwWAtlQqFO8KNiykjYGyEf5LyAJKAO+rJd9fsYR+VBauIEQoYmjnUbTXM
# SY2Lf5KMluWlDOGVh8q6XjmBccpaT+8tCfxpaVYPi1ncnwTwaPQvVq8RjWDRB7Pa
# 8ruHgj2HJFi69+hcq7mWx5nTUtzzFa7RSZfE5a1a5AuBmGNRr7f8cNfa01+tiWjV
# Kk1a+gJUBSP0sIxecFbVSXTZ7bqeal45XSDIisZBkWb+83TbXdTGMDSUFKTAdtC+
# r35GfsN8QVy59Hb5ZYzAXczhgRmk7NyE6jD0Ym5TKiW5MIIHejCCBWKgAwIBAgIK
# YQ6Q0gAAAAAAAzANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMCVVMxEzARBgNV
# BAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jv
# c29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJvb3QgQ2VydGlm
# aWNhdGUgQXV0aG9yaXR5IDIwMTEwHhcNMTEwNzA4MjA1OTA5WhcNMjYwNzA4MjEw
# OTA5WjB+MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UE
# BxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSgwJgYD
# VQQDEx9NaWNyb3NvZnQgQ29kZSBTaWduaW5nIFBDQSAyMDExMIICIjANBgkqhkiG
# 9w0BAQEFAAOCAg8AMIICCgKCAgEAq/D6chAcLq3YbqqCEE00uvK2WCGfQhsqa+la
# UKq4BjgaBEm6f8MMHt03a8YS2AvwOMKZBrDIOdUBFDFC04kNeWSHfpRgJGyvnkmc
# 6Whe0t+bU7IKLMOv2akrrnoJr9eWWcpgGgXpZnboMlImEi/nqwhQz7NEt13YxC4D
# dato88tt8zpcoRb0RrrgOGSsbmQ1eKagYw8t00CT+OPeBw3VXHmlSSnnDb6gE3e+
# lD3v++MrWhAfTVYoonpy4BI6t0le2O3tQ5GD2Xuye4Yb2T6xjF3oiU+EGvKhL1nk
# kDstrjNYxbc+/jLTswM9sbKvkjh+0p2ALPVOVpEhNSXDOW5kf1O6nA+tGSOEy/S6
# A4aN91/w0FK/jJSHvMAhdCVfGCi2zCcoOCWYOUo2z3yxkq4cI6epZuxhH2rhKEmd
# X4jiJV3TIUs+UsS1Vz8kA/DRelsv1SPjcF0PUUZ3s/gA4bysAoJf28AVs70b1FVL
# 5zmhD+kjSbwYuER8ReTBw3J64HLnJN+/RpnF78IcV9uDjexNSTCnq47f7Fufr/zd
# sGbiwZeBe+3W7UvnSSmnEyimp31ngOaKYnhfsi+E11ecXL93KCjx7W3DKI8sj0A3
# T8HhhUSJxAlMxdSlQy90lfdu+HggWCwTXWCVmj5PM4TasIgX3p5O9JawvEagbJjS
# 4NaIjAsCAwEAAaOCAe0wggHpMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBRI
# bmTlUAXTgqoXNzcitW2oynUClTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTAL
# BgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBRyLToCMZBD
# uRQFTuHqp8cx0SOJNDBaBgNVHR8EUzBRME+gTaBLhklodHRwOi8vY3JsLm1pY3Jv
# c29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
# MDNfMjIuY3JsMF4GCCsGAQUFBwEBBFIwUDBOBggrBgEFBQcwAoZCaHR0cDovL3d3
# dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
# MDNfMjIuY3J0MIGfBgNVHSAEgZcwgZQwgZEGCSsGAQQBgjcuAzCBgzA/BggrBgEF
# BQcCARYzaHR0cDovL3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9kb2NzL3ByaW1h
# cnljcHMuaHRtMEAGCCsGAQUFBwICMDQeMiAdAEwAZQBnAGEAbABfAHAAbwBsAGkA
# YwB5AF8AcwB0AGEAdABlAG0AZQBuAHQALiAdMA0GCSqGSIb3DQEBCwUAA4ICAQBn
# 8oalmOBUeRou09h0ZyKbC5YR4WOSmUKWfdJ5DJDBZV8uLD74w3LRbYP+vj/oCso7
# v0epo/Np22O/IjWll11lhJB9i0ZQVdgMknzSGksc8zxCi1LQsP1r4z4HLimb5j0b
# pdS1HXeUOeLpZMlEPXh6I/MTfaaQdION9MsmAkYqwooQu6SpBQyb7Wj6aC6VoCo/
# KmtYSWMfCWluWpiW5IP0wI/zRive/DvQvTXvbiWu5a8n7dDd8w6vmSiXmE0OPQvy
# CInWH8MyGOLwxS3OW560STkKxgrCxq2u5bLZ2xWIUUVYODJxJxp/sfQn+N4sOiBp
# mLJZiWhub6e3dMNABQamASooPoI/E01mC8CzTfXhj38cbxV9Rad25UAqZaPDXVJi
# hsMdYzaXht/a8/jyFqGaJ+HNpZfQ7l1jQeNbB5yHPgZ3BtEGsXUfFL5hYbXw3MYb
# BL7fQccOKO7eZS/sl/ahXJbYANahRr1Z85elCUtIEJmAH9AAKcWxm6U/RXceNcbS
# oqKfenoi+kiVH6v7RyOA9Z74v2u3S5fi63V4GuzqN5l5GEv/1rMjaHXmr/r8i+sL
# gOppO6/8MO0ETI7f33VtY5E90Z1WTk+/gFcioXgRMiF670EKsT/7qMykXcGhiJtX
# cVZOSEXAQsmbdlsKgEhr/Xmfwb1tbWrJUnMTDXpQzTGCFVgwghVUAgEBMIGVMH4x
# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01p
# Y3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTECEzMAAAGIr1LWuZJt6PkAAAAA
# AYgwDQYJYIZIAWUDBAIBBQCgga4wGQYJKoZIhvcNAQkDMQwGCisGAQQBgjcCAQQw
# HAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUwLwYJKoZIhvcNAQkEMSIEIFxZ
# Yezh3liQqiGQuXNa+zYfoSIbLqOpdEn2ZKskBkisMEIGCisGAQQBgjcCAQwxNDAy
# oBSAEgBNAGkAYwByAG8AcwBvAGYAdKEagBhodHRwOi8vd3d3Lm1pY3Jvc29mdC5j
# b20wDQYJKoZIhvcNAQEBBQAEggEAjLUrwCXJCPHZulZuKAQSX+MfnIRFAhlN7ru2
# 6H8rudvhkWgqMISkLb9gFDPR5FhR4sqdYgKW4P0ERao9ypCGi1FWDLqygC2XBbHj
# NEQHBxHJs5SMsMAXNSIcYHqVAvhF3nXoseaNBkhOTrkQ1FS/fW7AfDGRbsiiESzv
# lebf92shZylBFKOsKQLAL0mF/B7xrxHJIj5dgQoD1phATRNHOEQj3jgmkidFWowV
# 4r8MzbxRhAEORbnJexlUoDQJQH3YwxuUyXkTvrYMTKSbGJLlwRaZQbrcBU0k4gCH
# y8Sci+p9Rq+aOTzLCoNrZyh9E7OdwVDm1FJAtY30bV50T2WSFKGCEuIwghLeBgor
# BgEEAYI3AwMBMYISzjCCEsoGCSqGSIb3DQEHAqCCErswghK3AgEDMQ8wDQYJYIZI
# AWUDBAIBBQAwggFRBgsqhkiG9w0BCRABBKCCAUAEggE8MIIBOAIBAQYKKwYBBAGE
# WQoDATAxMA0GCWCGSAFlAwQCAQUABCD7JNcBBSfhlKPL1tN3CEKRKJuT/dZ8RO9K
# orYLXJeLTwIGXvN89YD7GBMyMDIwMDcwMTE0MTYyMC40MDVaMASAAgH0oIHQpIHN
# MIHKMQswCQYDVQQGEwJVUzELMAkGA1UECBMCV0ExEDAOBgNVBAcTB1JlZG1vbmQx
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEtMCsGA1UECxMkTWljcm9z
# b2Z0IElyZWxhbmQgT3BlcmF0aW9ucyBMaW1pdGVkMSYwJAYDVQQLEx1UaGFsZXMg
# VFNTIEVTTjoxNzlFLTRCQjAtODI0NjElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUt
# U3RhbXAgU2VydmljZaCCDjkwggTxMIID2aADAgECAhMzAAABDKp4btzMQkzBAAAA
# AAEMMA0GCSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNo
# aW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29y
# cG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEw
# MB4XDTE5MTAyMzIzMTkxNloXDTIxMDEyMTIzMTkxNlowgcoxCzAJBgNVBAYTAlVT
# MQswCQYDVQQIEwJXQTEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9z
# b2Z0IENvcnBvcmF0aW9uMS0wKwYDVQQLEyRNaWNyb3NvZnQgSXJlbGFuZCBPcGVy
# YXRpb25zIExpbWl0ZWQxJjAkBgNVBAsTHVRoYWxlcyBUU1MgRVNOOjE3OUUtNEJC
# MC04MjQ2MSUwIwYDVQQDExxNaWNyb3NvZnQgVGltZS1TdGFtcCBTZXJ2aWNlMIIB
# IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq5011+XqVJmQKtiw39igeEMv
# CLcZ1forbmxsDkpnCN1SrThKI+n2Pr3zqTzJVgdJFCoKm1ks1gtRJ7HaL6tDkrOw
# 8XJmfJaxyQAluCQ+e40NI+A4w+u59Gy89AVY5lJNrmCva6gozfg1kxw6abV5WWr+
# PjEpNCshO4hxv3UqgMcCKnT2YVSZzF1Gy7APub1fY0P1vNEuOFKrNCEEvWIKRrqs
# eyBB73G8KD2yw6jfz0VKxNSRAdhJV/ghOyrDt5a+L6C3m1rpr8sqiof3iohv3ANI
# gNqw6ex+4+G+B7JMbIHbGpPdebedL6ePbuBCnbgJoDn340k0aw6ij21GvvUnkQID
# AQABo4IBGzCCARcwHQYDVR0OBBYEFAlCOq9DDIa0A0oqgKtM5vjuZeK+MB8GA1Ud
# IwQYMBaAFNVjOlyKMZDzQ3t8RhvFM2hahW1VMFYGA1UdHwRPME0wS6BJoEeGRWh0
# dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kvY3JsL3Byb2R1Y3RzL01pY1RpbVN0
# YVBDQV8yMDEwLTA3LTAxLmNybDBaBggrBgEFBQcBAQROMEwwSgYIKwYBBQUHMAKG
# Pmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9wa2kvY2VydHMvTWljVGltU3RhUENB
# XzIwMTAtMDctMDEuY3J0MAwGA1UdEwEB/wQCMAAwEwYDVR0lBAwwCgYIKwYBBQUH
# AwgwDQYJKoZIhvcNAQELBQADggEBAET3xBg/IZ9zdOfwbDGK7cK3qKYt/qUOlbRB
# zgeNjb32K86nGeRGkBee10dVOEGWUw6KtBeWh1LQ70b64/tLtiLcsf9JzaAyDYb1
# sRmMi5fjRZ753TquaT8V7NJ7RfEuYfvZlubfQD0MVbU4tzsdZdYuxE37V2J9pN89
# j7GoFNtAnSnCn1MRxENAILgt9XzeQzTEDhFYW0N2DNphTkRPXGjpDmwi6WtkJ5fv
# 0iTyB4dwEC+/ed0lGbFLcytJoMwfTNMdH6gcnHlMzsniornGFZa5PPiV78XoZ9Fe
# upKo8ZKNGhLLLB5GTtqfHex5no3ioVSq+NthvhX0I/V+iXJsopowggZxMIIEWaAD
# AgECAgphCYEqAAAAAAACMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzET
# MBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMV
# TWljcm9zb2Z0IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBD
# ZXJ0aWZpY2F0ZSBBdXRob3JpdHkgMjAxMDAeFw0xMDA3MDEyMTM2NTVaFw0yNTA3
# MDEyMTQ2NTVaMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw
# DgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24x
# JjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwMIIBIjANBgkq
# hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqR0NvHcRijog7PwTl/X6f2mUa3RUENWl
# CgCChfvtfGhLLF/Fw+Vhwna3PmYrW/AVUycEMR9BGxqVHc4JE458YTBZsTBED/Fg
# iIRUQwzXTbg4CLNC3ZOs1nMwVyaCo0UN0Or1R4HNvyRgMlhgRvJYR4YyhB50YWeR
# X4FUsc+TTJLBxKZd0WETbijGGvmGgLvfYfxGwScdJGcSchohiq9LZIlQYrFd/Xcf
# PfBXday9ikJNQFHRD5wGPmd/9WbAA5ZEfu/QS/1u5ZrKsajyeioKMfDaTgaRtogI
# Neh4HLDpmc085y9Euqf03GS9pAHBIAmTeM38vMDJRF1eFpwBBU8iTQIDAQABo4IB
# 5jCCAeIwEAYJKwYBBAGCNxUBBAMCAQAwHQYDVR0OBBYEFNVjOlyKMZDzQ3t8RhvF
# M2hahW1VMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1UdDwQEAwIBhjAP
# BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNX2VsuP6KJcYmjRPZSQW9fOmhjE
# MFYGA1UdHwRPME0wS6BJoEeGRWh0dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kv
# Y3JsL3Byb2R1Y3RzL01pY1Jvb0NlckF1dF8yMDEwLTA2LTIzLmNybDBaBggrBgEF
# BQcBAQROMEwwSgYIKwYBBQUHMAKGPmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9w
# a2kvY2VydHMvTWljUm9vQ2VyQXV0XzIwMTAtMDYtMjMuY3J0MIGgBgNVHSABAf8E
# gZUwgZIwgY8GCSsGAQQBgjcuAzCBgTA9BggrBgEFBQcCARYxaHR0cDovL3d3dy5t
# aWNyb3NvZnQuY29tL1BLSS9kb2NzL0NQUy9kZWZhdWx0Lmh0bTBABggrBgEFBQcC
# AjA0HjIgHQBMAGUAZwBhAGwAXwBQAG8AbABpAGMAeQBfAFMAdABhAHQAZQBtAGUA
# bgB0AC4gHTANBgkqhkiG9w0BAQsFAAOCAgEAB+aIUQ3ixuCYP4FxAz2do6Ehb7Pr
# psz1Mb7PBeKp/vpXbRkws8LFZslq3/Xn8Hi9x6ieJeP5vO1rVFcIK1GCRBL7uVOM
# zPRgEop2zEBAQZvcXBf/XPleFzWYJFZLdO9CEMivv3/Gf/I3fVo/HPKZeUqRUgCv
# OA8X9S95gWXZqbVr5MfO9sp6AG9LMEQkIjzP7QOllo9ZKby2/QThcJ8ySif9Va8v
# /rbljjO7Yl+a21dA6fHOmWaQjP9qYn/dxUoLkSbiOewZSnFjnXshbcOco6I8+n99
# lmqQeKZt0uGc+R38ONiU9MalCpaGpL2eGq4EQoO4tYCbIjggtSXlZOz39L9+Y1kl
# D3ouOVd2onGqBooPiRa6YacRy5rYDkeagMXQzafQ732D8OE7cQnfXXSYIghh2rBQ
# Hm+98eEA3+cxB6STOvdlR3jo+KhIq/fecn5ha293qYHLpwmsObvsxsvYgrRyzR30
# uIUBHoD7G4kqVDmyW9rIDVWZeodzOwjmmC3qjeAzLhIp9cAvVCch98isTtoouLGp
# 25ayp0Kiyc8ZQU3ghvkqmqMRZjDTu3QyS99je/WZii8bxyGvWbWu3EQ8l1Bx16HS
# xVXjad5XwdHeMMD9zOZN+w2/XU/pnR4ZOC+8z1gFLu8NoFA12u8JJxzVs341Hgi6
# 2jbb01+P3nSISRKhggLLMIICNAIBATCB+KGB0KSBzTCByjELMAkGA1UEBhMCVVMx
# CzAJBgNVBAgTAldBMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
# ZnQgQ29ycG9yYXRpb24xLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
# dGlvbnMgTGltaXRlZDEmMCQGA1UECxMdVGhhbGVzIFRTUyBFU046MTc5RS00QkIw
# LTgyNDYxJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1wIFNlcnZpY2WiIwoB
# ATAHBgUrDgMCGgMVAMsg9FQ9pgPLXI2Ld5z7xDS0QAZ9oIGDMIGApH4wfDELMAkG
# A1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQx
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9z
# b2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAwDQYJKoZIhvcNAQEFBQACBQDipo0MMCIY
# DzIwMjAwNzAxMTIxODIwWhgPMjAyMDA3MDIxMjE4MjBaMHQwOgYKKwYBBAGEWQoE
# ATEsMCowCgIFAOKmjQwCAQAwBwIBAAICE70wBwIBAAICEeIwCgIFAOKn3owCAQAw
# NgYKKwYBBAGEWQoEAjEoMCYwDAYKKwYBBAGEWQoDAqAKMAgCAQACAwehIKEKMAgC
# AQACAwGGoDANBgkqhkiG9w0BAQUFAAOBgQCOPjlHOH8nYtgt2XnpKXenxPUR03ED
# xPBm8XR5Z1vIq53RU9jG6yYcYNTdK+q38SGZtu0W/SgagTfKCQhjhRakuv7rGSs2
# dlhx9LGCoc/q1vqmZpRSjkqWVcc/NzmldUWIWnLlV6rmLGoDmfCH5BcsiU6Eo6wU
# iUVwnnXoqsCaBzGCAw0wggMJAgEBMIGTMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQI
# EwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
# ZnQgQ29ycG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBD
# QSAyMDEwAhMzAAABDKp4btzMQkzBAAAAAAEMMA0GCWCGSAFlAwQCAQUAoIIBSjAa
# BgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwLwYJKoZIhvcNAQkEMSIEIDpwhjyu
# zgu3Kmxpnpz86ZlthBqEzG5vaEMOkYRyuFCaMIH6BgsqhkiG9w0BCRACLzGB6jCB
# 5zCB5DCBvQQgg5AWKX7M1+m2//+V7qmRvt1K/ww5Muu8XzGJBqygVCkwgZgwgYCk
# fjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYDVQQD
# Ex1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMAITMwAAAQyqeG7czEJMwQAA
# AAABDDAiBCD11urvv5vgo4gFVQ2NMVrzgxT87Yuiq16YdswYbaYeITANBgkqhkiG
# 9w0BAQsFAASCAQAi3q8hwcT2ft4b2EleaiyZxOImV/cKusmth1dtCh5/Jb0GbOld
# f5cSalrjf42MNPodWAtgmWozkYrQF6HxnsOiYiamfRA8E3E7xyRMy7AFfAhjcwMi
# xaW4Iye6E1Ec6LtULANxfDtG/KIdCWdZxKqOezL3nzFNQWmm1mXPV+UnKpnJkA3E
# DsQOUWk8J6ojDurhrP536WI+3arg8PcnppHBLd/xNKYdlsTb+6qndgzKXkDDt1CV
# 4zCyuZ7bO8eyZAmNoSZz22k7vus9UjBz/CDhXylo20N43nr29rWPItUgH4uvOGQn
# t26Y/yjBaQImz32psrfJEMbQ7cl789s8WOx8
# SIG # End signature block

View File

@@ -1,4 +1,4 @@
#!/bin/bash #!/usr/bin/env bash
PACKAGERUNTIME=$1 PACKAGERUNTIME=$1
PRECACHE=$2 PRECACHE=$2

View File

@@ -1,6 +1,7 @@
#!/bin/bash #!/usr/bin/env bash
SVC_NAME="{{SvcNameVar}}" SVC_NAME="{{SvcNameVar}}"
SVC_NAME=${SVC_NAME// /_}
SVC_DESCRIPTION="{{SvcDescription}}" SVC_DESCRIPTION="{{SvcDescription}}"
user_id=`id -u` user_id=`id -u`

View File

@@ -1,4 +1,4 @@
#!/bin/bash #!/usr/bin/env bash
user_id=`id -u` user_id=`id -u`
@@ -9,7 +9,7 @@ fi
# Determine OS type # Determine OS type
# Debian based OS (Debian, Ubuntu, Linux Mint) has /etc/debian_version # Debian based OS (Debian, Ubuntu, Linux Mint) has /etc/debian_version
# Fedora based OS (Fedora, Redhat, Centos, Oracle Linux 7) has /etc/redhat-release # Fedora based OS (Fedora, Red Hat Enterprise Linux, CentOS, Oracle Linux 7) has /etc/redhat-release
# SUSE based OS (OpenSUSE, SUSE Enterprise) has ID_LIKE=suse in /etc/os-release # SUSE based OS (OpenSUSE, SUSE Enterprise) has ID_LIKE=suse in /etc/os-release
function print_errormessage() function print_errormessage()
@@ -70,8 +70,8 @@ then
exit 1 exit 1
fi fi
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52 # libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52 apt install -y libicu66 || apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
if [ $? -ne 0 ] if [ $? -ne 0 ]
then then
echo "'apt' failed with exit code '$?'" echo "'apt' failed with exit code '$?'"
@@ -99,8 +99,8 @@ then
exit 1 exit 1
fi fi
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52 # libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52 apt-get install -y libicu66 || apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
if [ $? -ne 0 ] if [ $? -ne 0 ]
then then
echo "'apt-get' failed with exit code '$?'" echo "'apt-get' failed with exit code '$?'"
@@ -116,12 +116,12 @@ then
elif [ -e /etc/redhat-release ] elif [ -e /etc/redhat-release ]
then then
echo "The current OS is Fedora based" echo "The current OS is Fedora based"
echo "--------Redhat Version--------" echo "--Fedora/RHEL/CentOS Version--"
cat /etc/redhat-release cat /etc/redhat-release
echo "------------------------------" echo "------------------------------"
# use dnf on fedora # use dnf on fedora
# use yum on centos and redhat # use yum on centos and rhel
if [ -e /etc/fedora-release ] if [ -e /etc/fedora-release ]
then then
command -v dnf command -v dnf
@@ -191,7 +191,7 @@ then
redhatRelease=$(</etc/redhat-release) redhatRelease=$(</etc/redhat-release)
if [[ $redhatRelease == "CentOS release 6."* || $redhatRelease == "Red Hat Enterprise Linux Server release 6."* ]] if [[ $redhatRelease == "CentOS release 6."* || $redhatRelease == "Red Hat Enterprise Linux Server release 6."* ]]
then then
echo "The current OS is Red Hat Enterprise Linux 6 or Centos 6" echo "The current OS is Red Hat Enterprise Linux 6 or CentOS 6"
# Install known dependencies, as a best effort. # Install known dependencies, as a best effort.
# The remaining dependencies are covered by the GitHub doc that will be shown by `print_rhel6message` # The remaining dependencies are covered by the GitHub doc that will be shown by `print_rhel6message`

View File

@@ -1,4 +1,4 @@
#!/bin/bash #!/usr/bin/env bash
# convert SIGTERM signal to SIGINT # convert SIGTERM signal to SIGINT
# for more info on how to propagate SIGTERM to a child process see: http://veithen.github.io/2014/11/16/sigterm-propagation.html # for more info on how to propagate SIGTERM to a child process see: http://veithen.github.io/2014/11/16/sigterm-propagation.html

View File

@@ -1,6 +1,7 @@
#!/bin/bash #!/usr/bin/env bash
SVC_NAME="{{SvcNameVar}}" SVC_NAME="{{SvcNameVar}}"
SVC_NAME=${SVC_NAME// /_}
SVC_DESCRIPTION="{{SvcDescription}}" SVC_DESCRIPTION="{{SvcDescription}}"
SVC_CMD=$1 SVC_CMD=$1
@@ -62,12 +63,25 @@ function install()
sed "s/{{User}}/${run_as_user}/g; s/{{Description}}/$(echo ${SVC_DESCRIPTION} | sed -e 's/[\/&]/\\&/g')/g; s/{{RunnerRoot}}/$(echo ${RUNNER_ROOT} | sed -e 's/[\/&]/\\&/g')/g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file" sed "s/{{User}}/${run_as_user}/g; s/{{Description}}/$(echo ${SVC_DESCRIPTION} | sed -e 's/[\/&]/\\&/g')/g; s/{{RunnerRoot}}/$(echo ${RUNNER_ROOT} | sed -e 's/[\/&]/\\&/g')/g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
mv "${TEMP_PATH}" "${UNIT_PATH}" || failed "failed to copy unit file" mv "${TEMP_PATH}" "${UNIT_PATH}" || failed "failed to copy unit file"
# Recent Fedora based Linux (CentOS/Redhat) has SELinux enabled by default
# We need to restore security context on the unit file we added otherwise SystemD have no access to it.
command -v getenforce > /dev/null
if [ $? -eq 0 ]
then
selinuxEnabled=$(getenforce)
if [[ $selinuxEnabled == "Enforcing" ]]
then
# SELinux is enabled, we will need to Restore SELinux Context for the service file
restorecon -r -v "${UNIT_PATH}" || failed "failed to restore SELinux context on ${UNIT_PATH}"
fi
fi
# unit file should not be executable and world writable # unit file should not be executable and world writable
chmod 664 ${UNIT_PATH} || failed "failed to set permissions on ${UNIT_PATH}" chmod 664 "${UNIT_PATH}" || failed "failed to set permissions on ${UNIT_PATH}"
systemctl daemon-reload || failed "failed to reload daemons" systemctl daemon-reload || failed "failed to reload daemons"
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user. # Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
cp ./bin/runsvc.sh ./runsvc.sh || failed "failed to copy runsvc.sh" cp ./bin/runsvc.sh ./runsvc.sh || failed "failed to copy runsvc.sh"
chown ${run_as_uid}:${run_as_gid} ./runsvc.sh || failed "failed to set owner for runsvc.sh" chown ${run_as_uid}:${run_as_gid} ./runsvc.sh || failed "failed to set owner for runsvc.sh"
chmod 755 ./runsvc.sh || failed "failed to set permission for runsvc.sh" chmod 755 ./runsvc.sh || failed "failed to set permission for runsvc.sh"

View File

@@ -1,4 +1,4 @@
#!/bin/bash #!/usr/bin/env bash
# runner will replace key words in the template and generate a batch script to run. # runner will replace key words in the template and generate a batch script to run.
# Keywords: # Keywords:

View File

@@ -1,4 +1,4 @@
#!/bin/bash #!/usr/bin/env bash
user_id=`id -u` user_id=`id -u`

View File

@@ -1,4 +1,4 @@
#!/bin/bash #!/usr/bin/env bash
varCheckList=( varCheckList=(
'LANG' 'LANG'

View File

@@ -1,4 +1,4 @@
#!/bin/bash #!/usr/bin/env bash
# Validate not sudo # Validate not sudo
user_id=`id -u` user_id=`id -u`

View File

@@ -108,9 +108,9 @@ namespace GitHub.Runner.Common
CredentialData GetMigratedCredentials(); CredentialData GetMigratedCredentials();
RunnerSettings GetSettings(); RunnerSettings GetSettings();
void SaveCredential(CredentialData credential); void SaveCredential(CredentialData credential);
void SaveMigratedCredential(CredentialData credential);
void SaveSettings(RunnerSettings settings); void SaveSettings(RunnerSettings settings);
void DeleteCredential(); void DeleteCredential();
void DeleteMigratedCredential();
void DeleteSettings(); void DeleteSettings();
} }
@@ -232,21 +232,6 @@ namespace GitHub.Runner.Common
File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden); File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden);
} }
public void SaveMigratedCredential(CredentialData credential)
{
Trace.Info("Saving {0} migrated credential @ {1}", credential.Scheme, _migratedCredFilePath);
if (File.Exists(_migratedCredFilePath))
{
// Delete existing credential file first, since the file is hidden and not able to overwrite.
Trace.Info("Delete exist runner migrated credential file.");
IOUtil.DeleteFile(_migratedCredFilePath);
}
IOUtil.SaveObject(credential, _migratedCredFilePath);
Trace.Info("Migrated Credentials Saved.");
File.SetAttributes(_migratedCredFilePath, File.GetAttributes(_migratedCredFilePath) | FileAttributes.Hidden);
}
public void SaveSettings(RunnerSettings settings) public void SaveSettings(RunnerSettings settings)
{ {
Trace.Info("Saving runner settings."); Trace.Info("Saving runner settings.");
@@ -268,6 +253,11 @@ namespace GitHub.Runner.Common
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken)); IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
} }
public void DeleteMigratedCredential()
{
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
}
public void DeleteSettings() public void DeleteSettings()
{ {
IOUtil.Delete(_configFilePath, default(CancellationToken)); IOUtil.Delete(_configFilePath, default(CancellationToken));

View File

@@ -137,6 +137,9 @@ namespace GitHub.Runner.Common
public const int RunnerUpdating = 3; public const int RunnerUpdating = 3;
public const int RunOnceRunnerUpdating = 4; public const int RunOnceRunnerUpdating = 4;
} }
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
public static readonly string WorkerCrash = "WORKER_CRASH";
} }
public static class RunnerEvent public static class RunnerEvent

View File

@@ -1,19 +1,18 @@
using GitHub.Runner.Common.Util; using System;
using System;
using System.Collections.Concurrent; using System.Collections.Concurrent;
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.Tracing;
using System.Globalization; using System.Globalization;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Reflection; using System.Reflection;
using System.Runtime.Loader; using System.Runtime.Loader;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using System.Diagnostics;
using System.Net.Http;
using System.Diagnostics.Tracing;
using GitHub.DistributedTask.Logging; using GitHub.DistributedTask.Logging;
using System.Net.Http.Headers;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common namespace GitHub.Runner.Common
@@ -89,6 +88,7 @@ namespace GitHub.Runner.Common
this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape); this.SecretMasker.AddValueEncoder(ValueEncoders.JsonStringEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.UriDataEscape); this.SecretMasker.AddValueEncoder(ValueEncoders.UriDataEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.XmlDataEscape); this.SecretMasker.AddValueEncoder(ValueEncoders.XmlDataEscape);
this.SecretMasker.AddValueEncoder(ValueEncoders.TrimDoubleQuotes);
// Create the trace manager. // Create the trace manager.
if (string.IsNullOrEmpty(logFile)) if (string.IsNullOrEmpty(logFile))
@@ -614,9 +614,8 @@ namespace GitHub.Runner.Common
{ {
public static HttpClientHandler CreateHttpClientHandler(this IHostContext context) public static HttpClientHandler CreateHttpClientHandler(this IHostContext context)
{ {
HttpClientHandler clientHandler = new HttpClientHandler(); var handlerFactory = context.GetService<IHttpClientHandlerFactory>();
clientHandler.Proxy = context.WebProxy; return handlerFactory.CreateClientHandler(context.WebProxy);
return clientHandler;
} }
} }

View File

@@ -0,0 +1,19 @@
using System.Net.Http;
using GitHub.Runner.Sdk;
namespace GitHub.Runner.Common
{
[ServiceLocator(Default = typeof(HttpClientHandlerFactory))]
public interface IHttpClientHandlerFactory : IRunnerService
{
HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy);
}
public class HttpClientHandlerFactory : RunnerService, IHttpClientHandlerFactory
{
public HttpClientHandler CreateClientHandler(RunnerWebProxy webProxy)
{
return new HttpClientHandler() { Proxy = webProxy };
}
}
}

View File

@@ -22,6 +22,7 @@ namespace GitHub.Runner.Common
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken); Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent; Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken); Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken);
} }
public sealed class JobServer : RunnerService, IJobServer public sealed class JobServer : RunnerService, IJobServer
@@ -113,5 +114,14 @@ namespace GitHub.Runner.Common
CheckConnection(); CheckConnection();
return _taskClient.GetTimelineAsync(scopeIdentifier, hubName, planId, timelineId, includeRecords: true, cancellationToken: cancellationToken); return _taskClient.GetTimelineAsync(scopeIdentifier, hubName, planId, timelineId, includeRecords: true, cancellationToken: cancellationToken);
} }
//-----------------------------------------------------------------
// Action download info
//-----------------------------------------------------------------
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken)
{
CheckConnection();
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, actions, cancellationToken: cancellationToken);
}
} }
} }

View File

@@ -50,10 +50,6 @@ namespace GitHub.Runner.Common
// agent update // agent update
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState); Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState);
// runner authorization url
Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId);
Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error);
} }
public sealed class RunnerServer : RunnerService, IRunnerServer public sealed class RunnerServer : RunnerService, IRunnerServer

View File

@@ -96,13 +96,14 @@ namespace GitHub.Runner.Common
Trace.Info($"WRITE: {message}"); Trace.Info($"WRITE: {message}");
if (!Silent) if (!Silent)
{ {
if(colorCode != null) if (colorCode != null)
{ {
Console.ForegroundColor = colorCode.Value; Console.ForegroundColor = colorCode.Value;
Console.Write(message); Console.Write(message);
Console.ResetColor(); Console.ResetColor();
} }
else { else
{
Console.Write(message); Console.Write(message);
} }
} }
@@ -120,13 +121,14 @@ namespace GitHub.Runner.Common
Trace.Info($"WRITE LINE: {line}"); Trace.Info($"WRITE LINE: {line}");
if (!Silent) if (!Silent)
{ {
if(colorCode != null) if (colorCode != null)
{ {
Console.ForegroundColor = colorCode.Value; Console.ForegroundColor = colorCode.Value;
Console.WriteLine(line); Console.WriteLine(line);
Console.ResetColor(); Console.ResetColor();
} }
else { else
{
Console.WriteLine(line); Console.WriteLine(line);
} }
} }

View File

@@ -92,9 +92,11 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteSection("Authentication"); _term.WriteSection("Authentication");
while (true) while (true)
{ {
// Get the URL // When testing against a dev deployment of Actions Service, set this environment variable
var useDevActionsServiceUrl = Environment.GetEnvironmentVariable("USE_DEV_ACTIONS_SERVICE_URL");
var inputUrl = command.GetUrl(); var inputUrl = command.GetUrl();
if (inputUrl.Contains("codedev.ms", StringComparison.OrdinalIgnoreCase)) if (inputUrl.Contains("codedev.ms", StringComparison.OrdinalIgnoreCase)
|| useDevActionsServiceUrl != null)
{ {
runnerSettings.ServerUrl = inputUrl; runnerSettings.ServerUrl = inputUrl;
// Get the credentials // Get the credentials
@@ -117,6 +119,19 @@ namespace GitHub.Runner.Listener.Configuration
// Determine the service deployment type based on connection data. (Hosted/OnPremises) // Determine the service deployment type based on connection data. (Hosted/OnPremises)
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl)); runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
// Warn if the Actions server url and GHES server url has different Host
if (!runnerSettings.IsHostedServer)
{
// Example actionsServerUrl is https://my-ghes/_services/pipelines/[...]
// Example githubServerUrl is https://my-ghes
var actionsServerUrl = new Uri(runnerSettings.ServerUrl);
var githubServerUrl = new Uri(runnerSettings.GitHubUrl);
if (!string.Equals(actionsServerUrl.Authority, githubServerUrl.Authority, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"GitHub Actions is not properly configured in GHES. GHES url: {runnerSettings.GitHubUrl}, Actions url: {runnerSettings.ServerUrl}.");
}
}
// Validate can connect. // Validate can connect.
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), creds); await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), creds);
@@ -195,7 +210,7 @@ namespace GitHub.Runner.Listener.Configuration
else if (command.Unattended) else if (command.Unattended)
{ {
// if not replace and it is unattended config. // if not replace and it is unattended config.
throw new TaskAgentExistsException($"Pool {runnerSettings.PoolId} already contains a runner with name {runnerSettings.AgentName}."); throw new TaskAgentExistsException($"A runner exists with the same name {runnerSettings.AgentName}.");
} }
} }
else else
@@ -219,36 +234,11 @@ namespace GitHub.Runner.Listener.Configuration
// Add Agent Id to settings // Add Agent Id to settings
runnerSettings.AgentId = agent.Id; runnerSettings.AgentId = agent.Id;
// respect the serverUrl resolve by server.
// in case of agent configured using collection url instead of account url.
string agentServerUrl;
if (agent.Properties.TryGetValidatedValue<string>("ServerUrl", out agentServerUrl) &&
!string.IsNullOrEmpty(agentServerUrl))
{
Trace.Info($"Agent server url resolve by server: '{agentServerUrl}'.");
// we need make sure the Schema/Host/Port component of the url remain the same.
UriBuilder inputServerUrl = new UriBuilder(runnerSettings.ServerUrl);
UriBuilder serverReturnedServerUrl = new UriBuilder(agentServerUrl);
if (Uri.Compare(inputServerUrl.Uri, serverReturnedServerUrl.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0)
{
inputServerUrl.Path = serverReturnedServerUrl.Path;
Trace.Info($"Replace server returned url's scheme://host:port component with user input server url's scheme://host:port: '{inputServerUrl.Uri.AbsoluteUri}'.");
runnerSettings.ServerUrl = inputServerUrl.Uri.AbsoluteUri;
}
else
{
runnerSettings.ServerUrl = agentServerUrl;
}
}
// See if the server supports our OAuth key exchange for credentials // See if the server supports our OAuth key exchange for credentials
if (agent.Authorization != null && if (agent.Authorization != null &&
agent.Authorization.ClientId != Guid.Empty && agent.Authorization.ClientId != Guid.Empty &&
agent.Authorization.AuthorizationUrl != null) agent.Authorization.AuthorizationUrl != null)
{ {
UriBuilder configServerUrl = new UriBuilder(runnerSettings.ServerUrl);
UriBuilder oauthEndpointUrlBuilder = new UriBuilder(agent.Authorization.AuthorizationUrl);
var credentialData = new CredentialData var credentialData = new CredentialData
{ {
Scheme = Constants.Configuration.OAuth, Scheme = Constants.Configuration.OAuth,
@@ -256,7 +246,6 @@ namespace GitHub.Runner.Listener.Configuration
{ {
{ "clientId", agent.Authorization.ClientId.ToString("D") }, { "clientId", agent.Authorization.ClientId.ToString("D") },
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri }, { "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
{ "oauthEndpointUrl", oauthEndpointUrlBuilder.Uri.AbsoluteUri },
}, },
}; };
@@ -462,7 +451,7 @@ namespace GitHub.Runner.Listener.Configuration
// update should replace the existing labels // update should replace the existing labels
agent.Version = BuildConstants.RunnerPackage.Version; agent.Version = BuildConstants.RunnerPackage.Version;
agent.OSDescription = RuntimeInformation.OSDescription; agent.OSDescription = RuntimeInformation.OSDescription;
agent.Labels.Clear(); agent.Labels.Clear();
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System)); agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
@@ -473,7 +462,7 @@ namespace GitHub.Runner.Listener.Configuration
{ {
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User)); agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
} }
return agent; return agent;
} }

View File

@@ -13,7 +13,7 @@ namespace GitHub.Runner.Listener.Configuration
public interface ICredentialManager : IRunnerService public interface ICredentialManager : IRunnerService
{ {
ICredentialProvider GetCredentialProvider(string credType); ICredentialProvider GetCredentialProvider(string credType);
VssCredentials LoadCredentials(bool preferMigrated = true); VssCredentials LoadCredentials();
} }
public class CredentialManager : RunnerService, ICredentialManager public class CredentialManager : RunnerService, ICredentialManager
@@ -40,7 +40,7 @@ namespace GitHub.Runner.Listener.Configuration
return creds; return creds;
} }
public VssCredentials LoadCredentials(bool preferMigrated = true) public VssCredentials LoadCredentials()
{ {
IConfigurationStore store = HostContext.GetService<IConfigurationStore>(); IConfigurationStore store = HostContext.GetService<IConfigurationStore>();
@@ -50,14 +50,16 @@ namespace GitHub.Runner.Listener.Configuration
} }
CredentialData credData = store.GetCredentials(); CredentialData credData = store.GetCredentials();
var migratedCred = store.GetMigratedCredentials();
if (preferMigrated) if (migratedCred != null)
{ {
var migratedCred = store.GetMigratedCredentials(); credData = migratedCred;
if (migratedCred != null)
{ // Re-write .credentials with Token URL
credData = migratedCred; store.SaveCredential(credData);
}
// Delete .credentials_migrated
store.DeleteMigratedCredential();
} }
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme); ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);

View File

@@ -858,7 +858,6 @@ namespace GitHub.Runner.Listener
} }
} }
// TODO: We need send detailInfo back to DT in order to add an issue for the job
private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null) private async Task CompleteJobRequestAsync(int poolId, Pipelines.AgentJobRequestMessage message, Guid lockToken, TaskResult result, string detailInfo = null)
{ {
Trace.Entering(); Trace.Entering();
@@ -952,8 +951,10 @@ namespace GitHub.Runner.Listener
ArgUtil.NotNull(timeline, nameof(timeline)); ArgUtil.NotNull(timeline, nameof(timeline));
TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job"); TimelineRecord jobRecord = timeline.Records.FirstOrDefault(x => x.Id == message.JobId && x.RecordType == "Job");
ArgUtil.NotNull(jobRecord, nameof(jobRecord)); ArgUtil.NotNull(jobRecord, nameof(jobRecord));
var unhandledExceptionIssue = new Issue() { Type = IssueType.Error, Message = errorMessage };
unhandledExceptionIssue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = Constants.Runner.WorkerCrash;
jobRecord.ErrorCount++; jobRecord.ErrorCount++;
jobRecord.Issues.Add(new Issue() { Type = IssueType.Error, Message = errorMessage }); jobRecord.Issues.Add(unhandledExceptionIssue);
await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None); await jobServer.UpdateTimelineRecordsAsync(message.Plan.ScopeIdentifier, message.Plan.PlanType, message.Plan.PlanId, message.Timeline.Id, new TimelineRecord[] { jobRecord }, CancellationToken.None);
} }
catch (Exception ex) catch (Exception ex)

View File

@@ -13,10 +13,7 @@ using System.Diagnostics;
using System.Runtime.InteropServices; using System.Runtime.InteropServices;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Services.WebApi;
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]
namespace GitHub.Runner.Listener namespace GitHub.Runner.Listener
{ {
[ServiceLocator(Default = typeof(MessageListener))] [ServiceLocator(Default = typeof(MessageListener))]
@@ -35,30 +32,18 @@ namespace GitHub.Runner.Listener
private ITerminal _term; private ITerminal _term;
private IRunnerServer _runnerServer; private IRunnerServer _runnerServer;
private TaskAgentSession _session; private TaskAgentSession _session;
private ICredentialManager _credMgr;
private IConfigurationStore _configStore;
private TimeSpan _getNextMessageRetryInterval; private TimeSpan _getNextMessageRetryInterval;
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30); private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4); private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30); private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new Dictionary<string, int>(); private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new Dictionary<string, int>();
// Whether load credentials from .credentials_migrated file
internal bool _useMigratedCredentials;
// need to check auth url if there is only .credentials and auth schema is OAuth
internal bool _needToCheckAuthorizationUrlUpdate;
internal Task<VssCredentials> _authorizationUrlMigrationBackgroundTask;
internal Task _authorizationUrlRollbackReattemptDelayBackgroundTask;
public override void Initialize(IHostContext hostContext) public override void Initialize(IHostContext hostContext)
{ {
base.Initialize(hostContext); base.Initialize(hostContext);
_term = HostContext.GetService<ITerminal>(); _term = HostContext.GetService<ITerminal>();
_runnerServer = HostContext.GetService<IRunnerServer>(); _runnerServer = HostContext.GetService<IRunnerServer>();
_credMgr = HostContext.GetService<ICredentialManager>();
_configStore = HostContext.GetService<IConfigurationStore>();
} }
public async Task<Boolean> CreateSessionAsync(CancellationToken token) public async Task<Boolean> CreateSessionAsync(CancellationToken token)
@@ -73,8 +58,8 @@ namespace GitHub.Runner.Listener
// Create connection. // Create connection.
Trace.Info("Loading Credentials"); Trace.Info("Loading Credentials");
_useMigratedCredentials = !StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_SPSAUTHURL")); var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials creds = _credMgr.LoadCredentials(_useMigratedCredentials); VssCredentials creds = credMgr.LoadCredentials();
var agent = new TaskAgentReference var agent = new TaskAgentReference
{ {
@@ -89,17 +74,6 @@ namespace GitHub.Runner.Listener
string errorMessage = string.Empty; string errorMessage = string.Empty;
bool encounteringError = false; bool encounteringError = false;
var originalCreds = _configStore.GetCredentials();
var migratedCreds = _configStore.GetMigratedCredentials();
if (migratedCreds == null)
{
_useMigratedCredentials = false;
if (originalCreds.Scheme == Constants.Configuration.OAuth)
{
_needToCheckAuthorizationUrlUpdate = true;
}
}
while (true) while (true)
{ {
token.ThrowIfCancellationRequested(); token.ThrowIfCancellationRequested();
@@ -127,12 +101,6 @@ namespace GitHub.Runner.Listener
encounteringError = false; encounteringError = false;
} }
if (_needToCheckAuthorizationUrlUpdate)
{
// start background task try to get new authorization url
_authorizationUrlMigrationBackgroundTask = GetNewOAuthAuthorizationSetting(token);
}
return true; return true;
} }
catch (OperationCanceledException) when (token.IsCancellationRequested) catch (OperationCanceledException) when (token.IsCancellationRequested)
@@ -150,25 +118,26 @@ namespace GitHub.Runner.Listener
Trace.Error("Catch exception during create session."); Trace.Error("Catch exception during create session.");
Trace.Error(ex); Trace.Error(ex);
if (!IsSessionCreationExceptionRetriable(ex)) if (ex is VssOAuthTokenRequestException && creds.Federated is VssOAuthCredential vssOAuthCred)
{ {
if (_useMigratedCredentials) // Check whether we get 401 because the runner registration already removed by the service.
// If the runner registration get deleted, we can't exchange oauth token.
Trace.Error("Test oauth app registration.");
var oauthTokenProvider = new VssOAuthTokenProvider(vssOAuthCred, new Uri(serverUrl));
var authError = await oauthTokenProvider.ValidateCredentialAsync(token);
if (string.Equals(authError, "invalid_client", StringComparison.OrdinalIgnoreCase))
{ {
// migrated credentials might cause lose permission during permission check, _term.WriteError("Failed to create a session. The runner registration has been deleted from the server, please re-configure.");
// we will force to use original credential and try again
_useMigratedCredentials = false;
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
creds = _credMgr.LoadCredentials(false);
Trace.Error("Fallback to original credentials and try again.");
}
else
{
_term.WriteError($"Failed to create session. {ex.Message}");
return false; return false;
} }
} }
if (!IsSessionCreationExceptionRetriable(ex))
{
_term.WriteError($"Failed to create session. {ex.Message}");
return false;
}
if (!encounteringError) //print the message only on the first error if (!encounteringError) //print the message only on the first error
{ {
_term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected."); _term.WriteError($"{DateTime.UtcNow:u}: Runner connect error: {ex.Message}. Retrying until reconnected.");
@@ -227,51 +196,6 @@ namespace GitHub.Runner.Listener
encounteringError = false; encounteringError = false;
continuousError = 0; continuousError = 0;
} }
if (_needToCheckAuthorizationUrlUpdate &&
_authorizationUrlMigrationBackgroundTask?.IsCompleted == true)
{
if (HostContext.GetService<IJobDispatcher>().Busy ||
HostContext.GetService<ISelfUpdater>().Busy)
{
Trace.Info("Job or runner updates in progress, update credentials next time.");
}
else
{
try
{
var newCred = await _authorizationUrlMigrationBackgroundTask;
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), newCred);
Trace.Info("Updated connection to use migrated credential for next GetMessage call.");
_useMigratedCredentials = true;
_authorizationUrlMigrationBackgroundTask = null;
_needToCheckAuthorizationUrlUpdate = false;
}
catch (Exception ex)
{
Trace.Error("Fail to refresh connection with new authorization url.");
Trace.Error(ex);
}
}
}
if (_authorizationUrlRollbackReattemptDelayBackgroundTask?.IsCompleted == true)
{
try
{
// we rolled back to use original creds about 2 days before, now it's a good time to try migrated creds again.
Trace.Info("Re-attempt to use migrated credential");
var migratedCreds = _credMgr.LoadCredentials();
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedCreds);
_useMigratedCredentials = true;
_authorizationUrlRollbackReattemptDelayBackgroundTask = null;
}
catch (Exception ex)
{
Trace.Error("Fail to refresh connection with new authorization url on rollback reattempt.");
Trace.Error(ex);
}
}
} }
catch (OperationCanceledException) when (token.IsCancellationRequested) catch (OperationCanceledException) when (token.IsCancellationRequested)
{ {
@@ -295,21 +219,7 @@ namespace GitHub.Runner.Listener
} }
else if (!IsGetNextMessageExceptionRetriable(ex)) else if (!IsGetNextMessageExceptionRetriable(ex))
{ {
if (_useMigratedCredentials) throw;
{
// migrated credentials might cause lose permission during permission check,
// we will force to use original credential and try again
_useMigratedCredentials = false;
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
var originalCreds = _credMgr.LoadCredentials(false);
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), originalCreds);
Trace.Error("Fallback to original credentials and try again.");
}
else
{
throw;
}
} }
else else
{ {
@@ -501,80 +411,5 @@ namespace GitHub.Runner.Listener
return true; return true;
} }
} }
private async Task<VssCredentials> GetNewOAuthAuthorizationSetting(CancellationToken token)
{
Trace.Info("Start checking oauth authorization url update.");
while (true)
{
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromMinutes(30), TimeSpan.FromMinutes(45));
await HostContext.Delay(backoff, token);
try
{
var migratedAuthorizationUrl = await _runnerServer.GetRunnerAuthUrlAsync(_settings.PoolId, _settings.AgentId);
if (!string.IsNullOrEmpty(migratedAuthorizationUrl))
{
var credData = _configStore.GetCredentials();
var clientId = credData.Data.GetValueOrDefault("clientId", null);
var currentAuthorizationUrl = credData.Data.GetValueOrDefault("authorizationUrl", null);
Trace.Info($"Current authorization url: {currentAuthorizationUrl}, new authorization url: {migratedAuthorizationUrl}");
if (string.Equals(currentAuthorizationUrl, migratedAuthorizationUrl, StringComparison.OrdinalIgnoreCase))
{
// We don't need to update credentials.
Trace.Info("No needs to update authorization url");
await Task.Delay(TimeSpan.FromMilliseconds(-1), token);
}
var keyManager = HostContext.GetService<IRSAKeyManager>();
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
var migratedClientCredential = new VssOAuthJwtBearerClientCredential(clientId, migratedAuthorizationUrl, signingCredentials);
var migratedRunnerCredential = new VssOAuthCredential(new Uri(migratedAuthorizationUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, migratedClientCredential);
Trace.Info("Try connect service with Token Service OAuth endpoint.");
var runnerServer = HostContext.CreateService<IRunnerServer>();
await runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedRunnerCredential);
await runnerServer.GetAgentPoolsAsync();
Trace.Info($"Successfully connected service with new authorization url.");
var migratedCredData = new CredentialData
{
Scheme = Constants.Configuration.OAuth,
Data =
{
{ "clientId", clientId },
{ "authorizationUrl", migratedAuthorizationUrl },
{ "oauthEndpointUrl", migratedAuthorizationUrl },
},
};
_configStore.SaveMigratedCredential(migratedCredData);
return migratedRunnerCredential;
}
else
{
Trace.Verbose("No authorization url updates");
}
}
catch (Exception ex)
{
Trace.Error("Fail to get/test new authorization url.");
Trace.Error(ex);
try
{
await _runnerServer.ReportRunnerAuthUrlErrorAsync(_settings.PoolId, _settings.AgentId, ex.ToString());
}
catch (Exception e)
{
// best effort
Trace.Error("Fail to report the migration error");
Trace.Error(e);
}
}
}
}
} }
} }

View File

@@ -102,7 +102,9 @@ namespace GitHub.Runner.Listener
IRunner runner = context.GetService<IRunner>(); IRunner runner = context.GetService<IRunner>();
try try
{ {
return await runner.ExecuteCommand(command); var returnCode = await runner.ExecuteCommand(command);
trace.Info($"Runner execution has finished with return code {returnCode}");
return returnCode;
} }
catch (OperationCanceledException) when (context.RunnerShutdownToken.IsCancellationRequested) catch (OperationCanceledException) when (context.RunnerShutdownToken.IsCancellationRequested)
{ {

View File

@@ -466,6 +466,7 @@ Config Options:
--url string Repository to add the runner to. Required if unattended --url string Repository to add the runner to. Required if unattended
--token string Registration token. Required if unattended --token string Registration token. Required if unattended
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"}) --name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
--work string Relative runner work directory (default {Constants.Path.WorkDirectory}) --work string Relative runner work directory (default {Constants.Path.WorkDirectory})
--replace Replace any existing runner with the same name (default false)"); --replace Replace any existing runner with the same name (default false)");
#if OS_WINDOWS #if OS_WINDOWS
@@ -478,7 +479,9 @@ Examples:
Configure a runner non-interactively: Configure a runner non-interactively:
.{separator}config.{ext} --unattended --url <url> --token <token> .{separator}config.{ext} --unattended --url <url> --token <token>
Configure a runner non-interactively, replacing any existing runner with the same name: Configure a runner non-interactively, replacing any existing runner with the same name:
.{separator}config.{ext} --unattended --url <url> --token <token> --replace [--name <name>]"); .{separator}config.{ext} --unattended --url <url> --token <token> --replace [--name <name>]
Configure a runner non-interactively with three extra labels:
.{separator}config.{ext} --unattended --url <url> --token <token> --labels L1,L2,L3");
#if OS_WINDOWS #if OS_WINDOWS
_term.WriteLine($@" Configure a runner to run as a service:"); _term.WriteLine($@" Configure a runner to run as a service:");
_term.WriteLine($@" .{separator}config.{ext} --url <url> --token <token> --runasservice"); _term.WriteLine($@" .{separator}config.{ext} --url <url> --token <token> --runasservice");

View File

@@ -80,7 +80,12 @@ namespace GitHub.Runner.Plugins.Repository.v1_0
// Validate args. // Validate args.
ArgUtil.NotNull(executionContext, nameof(executionContext)); ArgUtil.NotNull(executionContext, nameof(executionContext));
executionContext.Output($"Syncing repository: {repoFullName}"); executionContext.Output($"Syncing repository: {repoFullName}");
Uri repositoryUrl = new Uri($"https://github.com/{repoFullName}");
// Repository URL
var githubUrl = executionContext.GetGitHubContext("server_url");
var githubUri = new Uri(!string.IsNullOrEmpty(githubUrl) ? githubUrl : "https://github.com");
var portInfo = githubUri.IsDefaultPort ? string.Empty : $":{githubUri.Port}";
Uri repositoryUrl = new Uri($"{githubUri.Scheme}://{githubUri.Host}{portInfo}/{repoFullName}");
if (!repositoryUrl.IsAbsoluteUri) if (!repositoryUrl.IsAbsoluteUri)
{ {
throw new InvalidOperationException("Repository url need to be an absolute uri."); throw new InvalidOperationException("Repository url need to be an absolute uri.");

View File

@@ -318,7 +318,12 @@ namespace GitHub.Runner.Sdk
} }
} }
using (var registration = cancellationToken.Register(async () => await CancelAndKillProcessTree(killProcessOnCancel))) var cancellationFinished = new TaskCompletionSource<bool>();
using (var registration = cancellationToken.Register(async () =>
{
await CancelAndKillProcessTree(killProcessOnCancel);
cancellationFinished.TrySetResult(true);
}))
{ {
Trace.Info($"Process started with process id {_proc.Id}, waiting for process exit."); Trace.Info($"Process started with process id {_proc.Id}, waiting for process exit.");
while (true) while (true)
@@ -341,6 +346,13 @@ namespace GitHub.Runner.Sdk
// data buffers one last time before returning // data buffers one last time before returning
ProcessOutput(); ProcessOutput();
if (cancellationToken.IsCancellationRequested)
{
// Ensure cancellation also finish on the cancellationToken.Register thread.
await cancellationFinished.Task;
Trace.Info($"Process Cancellation finished.");
}
Trace.Info($"Finished process {_proc.Id} with exit code {_proc.ExitCode}, and elapsed time {_stopWatch.Elapsed}."); Trace.Info($"Finished process {_proc.Id} with exit code {_proc.ExitCode}, and elapsed time {_stopWatch.Elapsed}.");
} }

View File

@@ -11,6 +11,11 @@ namespace GitHub.Runner.Sdk
{ {
ArgUtil.NotNullOrEmpty(command, nameof(command)); ArgUtil.NotNullOrEmpty(command, nameof(command));
trace?.Info($"Which: '{command}'"); trace?.Info($"Which: '{command}'");
if (Path.IsPathFullyQualified(command) && File.Exists(command))
{
trace?.Info($"Fully qualified path: '{command}'");
return command;
}
string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable); string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
if (string.IsNullOrEmpty(path)) if (string.IsNullOrEmpty(path))
{ {

View File

@@ -486,7 +486,10 @@ namespace GitHub.Runner.Worker
foreach (var property in command.Properties) foreach (var property in command.Properties)
{ {
issue.Data[property.Key] = property.Value; if (!string.Equals(property.Key, Constants.Runner.InternalTelemetryIssueDataKey, StringComparison.OrdinalIgnoreCase))
{
issue.Data[property.Key] = property.Value;
}
} }
context.AddIssue(issue); context.AddIssue(issue);

View File

@@ -1,21 +1,20 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.IO.Compression; using System.IO.Compression;
using System.Linq; using System.Linq;
using System.Threading; using System.Net;
using System.Threading.Tasks;
using System.Net.Http; using System.Net.Http;
using System.Net.Http.Headers; using System.Net.Http.Headers;
using System.Text; using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens; using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container; using GitHub.Runner.Worker.Container;
using GitHub.Services.Common; using GitHub.Services.Common;
using Newtonsoft.Json; using WebApi = GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines; using Pipelines = GitHub.DistributedTask.Pipelines;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants; using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
@@ -48,7 +47,7 @@ namespace GitHub.Runner.Worker
//81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k). //81920 is the default used by System.IO.Stream.CopyTo and is under the large object heap threshold (85k).
private const int _defaultCopyBufferSize = 81920; private const int _defaultCopyBufferSize = 81920;
private const string _dotcomApiUrl = "https://api.github.com";
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new Dictionary<Guid, ContainerInfo>(); private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new Dictionary<Guid, ContainerInfo>();
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers; public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
@@ -73,13 +72,12 @@ namespace GitHub.Runner.Worker
} }
// Clear the cache (for self-hosted runners) // Clear the cache (for self-hosted runners)
// Note, temporarily avoid this step for the on-premises product, to avoid rate limiting. IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
var configurationStore = HostContext.GetService<IConfigurationStore>();
var isHostedServer = configurationStore.GetSettings().IsHostedServer; // todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
if (isHostedServer) var newActionMetadata = executionContext.Variables.GetBoolean("DistributedTask.NewActionMetadata") ?? false;
{
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken); var repositoryActions = new List<Pipelines.ActionStep>();
}
foreach (var action in actions) foreach (var action in actions)
{ {
@@ -98,7 +96,8 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'"); Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'");
imagesToPull[containerReference.Image].Add(action.Id); imagesToPull[containerReference.Image].Add(action.Id);
} }
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository) // todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && !newActionMetadata)
{ {
// only download the repository archive // only download the repository archive
await DownloadRepositoryActionAsync(executionContext, action); await DownloadRepositoryActionAsync(executionContext, action);
@@ -132,6 +131,81 @@ namespace GitHub.Runner.Worker
} }
} }
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
{
var definition = LoadAction(executionContext, action);
if (definition.Data.Execution.HasPre)
{
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = action;
actionRunner.Stage = ActionRunStage.Pre;
actionRunner.Condition = definition.Data.Execution.InitCondition;
Trace.Info($"Add 'pre' execution for {action.Id}");
preStepTracker[action.Id] = actionRunner;
}
}
}
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && newActionMetadata)
{
repositoryActions.Add(action);
}
}
if (repositoryActions.Count > 0)
{
// Get the download info
var downloadInfos = await GetDownloadInfoAsync(executionContext, repositoryActions);
// Download each action
foreach (var action in repositoryActions)
{
var lookupKey = GetDownloadInfoLookupKey(action);
if (string.IsNullOrEmpty(lookupKey))
{
continue;
}
if (!downloadInfos.TryGetValue(lookupKey, out var downloadInfo))
{
throw new Exception($"Missing download info for {lookupKey}");
}
await DownloadRepositoryActionAsync(executionContext, downloadInfo);
}
// More preparation based on content in the repository (action.yml)
foreach (var action in repositoryActions)
{
var setupInfo = PrepareRepositoryActionAsync(executionContext, action);
if (setupInfo != null)
{
if (!string.IsNullOrEmpty(setupInfo.Image))
{
if (!imagesToPull.ContainsKey(setupInfo.Image))
{
imagesToPull[setupInfo.Image] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to pull image '{setupInfo.Image}'");
imagesToPull[setupInfo.Image].Add(action.Id);
}
else
{
ArgUtil.NotNullOrEmpty(setupInfo.ActionRepository, nameof(setupInfo.ActionRepository));
if (!imagesToBuild.ContainsKey(setupInfo.ActionRepository))
{
imagesToBuild[setupInfo.ActionRepository] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to build image '{setupInfo.Dockerfile}'");
imagesToBuild[setupInfo.ActionRepository].Add(action.Id);
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
}
}
var repoAction = action.Reference as Pipelines.RepositoryPathReference; var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias) if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
{ {
@@ -321,6 +395,14 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}."); Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}.");
} }
} }
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
var compositeAction = definition.Data.Execution as CompositeActionExecutionData;
Trace.Info($"Load {compositeAction.Steps?.Count ?? 0} action steps.");
Trace.Verbose($"Details: {StringUtil.ConvertToJson(compositeAction?.Steps)}");
Trace.Info($"Load: {compositeAction.Outputs?.Count ?? 0} number of outputs");
Trace.Info($"Details: {StringUtil.ConvertToJson(compositeAction?.Outputs)}");
}
else else
{ {
throw new NotSupportedException(definition.Data.Execution.ExecutionType.ToString()); throw new NotSupportedException(definition.Data.Execution.ExecutionType.ToString());
@@ -438,7 +520,12 @@ namespace GitHub.Runner.Worker
var imageName = $"{dockerManger.DockerInstanceLabel}:{Guid.NewGuid().ToString("N")}"; var imageName = $"{dockerManger.DockerInstanceLabel}:{Guid.NewGuid().ToString("N")}";
while (retryCount < 3) while (retryCount < 3)
{ {
buildExitCode = await dockerManger.DockerBuild(executionContext, setupInfo.Container.WorkingDirectory, Directory.GetParent(setupInfo.Container.Dockerfile).FullName, imageName); buildExitCode = await dockerManger.DockerBuild(
executionContext,
setupInfo.Container.WorkingDirectory,
setupInfo.Container.Dockerfile,
Directory.GetParent(setupInfo.Container.Dockerfile).FullName,
imageName);
if (buildExitCode == 0) if (buildExitCode == 0)
{ {
break; break;
@@ -467,6 +554,80 @@ namespace GitHub.Runner.Worker
} }
} }
// This implementation is temporary and will be replaced with a REST API call to the service to resolve
private async Task<IDictionary<string, WebApi.ActionDownloadInfo>> GetDownloadInfoAsync(IExecutionContext executionContext, List<Pipelines.ActionStep> actions)
{
executionContext.Output("Getting action download info");
// Convert to action reference
var actionReferences = actions
.GroupBy(x => GetDownloadInfoLookupKey(x))
.Where(x => !string.IsNullOrEmpty(x.Key))
.Select(x =>
{
var action = x.First();
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
return new WebApi.ActionReference
{
NameWithOwner = repositoryReference.Name,
Ref = repositoryReference.Ref,
};
})
.ToList();
// Nothing to resolve?
if (actionReferences.Count == 0)
{
return new Dictionary<string, WebApi.ActionDownloadInfo>();
}
// Resolve download info
var jobServer = HostContext.GetService<IJobServer>();
var actionDownloadInfos = default(WebApi.ActionDownloadInfoCollection);
for (var attempt = 1; attempt <= 3; attempt++)
{
try
{
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Plan.ScopeIdentifier, executionContext.Plan.PlanType, executionContext.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
break;
}
catch (Exception ex) when (attempt < 3)
{
executionContext.Output($"Failed to resolve action download info. Error: {ex.Message}");
executionContext.Debug(ex.ToString());
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
{
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
executionContext.Output($"Retrying in {backoff.TotalSeconds} seconds");
await Task.Delay(backoff);
}
}
}
ArgUtil.NotNull(actionDownloadInfos, nameof(actionDownloadInfos));
ArgUtil.NotNull(actionDownloadInfos.Actions, nameof(actionDownloadInfos.Actions));
var apiUrl = GetApiUrl(executionContext);
var defaultAccessToken = executionContext.GetGitHubContext("token");
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
foreach (var actionDownloadInfo in actionDownloadInfos.Actions.Values)
{
// Add secret
HostContext.SecretMasker.AddValue(actionDownloadInfo.Authentication?.Token);
// Default auth token
if (string.IsNullOrEmpty(actionDownloadInfo.Authentication?.Token))
{
actionDownloadInfo.Authentication = new WebApi.ActionDownloadAuthentication { Token = defaultAccessToken };
}
}
return actionDownloadInfos.Actions;
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction) private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
{ {
Trace.Entering(); Trace.Entering();
@@ -490,7 +651,7 @@ namespace GitHub.Runner.Worker
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref)); ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref); string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref);
string watermarkFile = destDirectory + ".completed"; string watermarkFile = GetWatermarkFilePath(destDirectory);
if (File.Exists(watermarkFile)) if (File.Exists(watermarkFile))
{ {
executionContext.Debug($"Action '{repositoryReference.Name}@{repositoryReference.Ref}' already downloaded at '{destDirectory}'."); executionContext.Debug($"Action '{repositoryReference.Name}@{repositoryReference.Ref}' already downloaded at '{destDirectory}'.");
@@ -504,27 +665,116 @@ namespace GitHub.Runner.Worker
executionContext.Output($"Download action repository '{repositoryReference.Name}@{repositoryReference.Ref}'"); executionContext.Output($"Download action repository '{repositoryReference.Name}@{repositoryReference.Ref}'");
} }
#if OS_WINDOWS var configurationStore = HostContext.GetService<IConfigurationStore>();
string archiveLink = $"https://api.github.com/repos/{repositoryReference.Name}/zipball/{repositoryReference.Ref}"; var isHostedServer = configurationStore.GetSettings().IsHostedServer;
#else if (isHostedServer)
string archiveLink = $"https://api.github.com/repos/{repositoryReference.Name}/tarball/{repositoryReference.Ref}"; {
#endif string apiUrl = GetApiUrl(executionContext);
Trace.Info($"Download archive '{archiveLink}' to '{destDirectory}'."); string archiveLink = BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref);
var downloadDetails = new ActionDownloadDetails(archiveLink, ConfigureAuthorizationFromContext);
await DownloadRepositoryActionAsync(executionContext, downloadDetails, null, destDirectory);
return;
}
else
{
string apiUrl = GetApiUrl(executionContext);
// URLs to try:
var downloadAttempts = new List<ActionDownloadDetails> {
// A built-in action or an action the user has created, on their GHES instance
// Example: https://my-ghes/api/v3/repos/my-org/my-action/tarball/v1
new ActionDownloadDetails(
BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref),
ConfigureAuthorizationFromContext),
// The same action, on GitHub.com
// Example: https://api.github.com/repos/my-org/my-action/tarball/v1
new ActionDownloadDetails(
BuildLinkToActionArchive(_dotcomApiUrl, repositoryReference.Name, repositoryReference.Ref),
configureAuthorization: (e,h) => { /* no authorization for dotcom */ })
};
foreach (var downloadAttempt in downloadAttempts)
{
try
{
await DownloadRepositoryActionAsync(executionContext, downloadAttempt, null, destDirectory);
return;
}
catch (ActionNotFoundException)
{
Trace.Info($"Failed to find the action '{repositoryReference.Name}' at ref '{repositoryReference.Ref}' at {downloadAttempt.ArchiveLink}");
continue;
}
}
throw new ActionNotFoundException($"Failed to find the action '{repositoryReference.Name}' at ref '{repositoryReference.Ref}'. Paths attempted: {string.Join(", ", downloadAttempts.Select(d => d.ArchiveLink))}");
}
}
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo)
{
Trace.Entering();
ArgUtil.NotNull(executionContext, nameof(executionContext));
ArgUtil.NotNull(downloadInfo, nameof(downloadInfo));
ArgUtil.NotNullOrEmpty(downloadInfo.NameWithOwner, nameof(downloadInfo.NameWithOwner));
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.Ref));
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), downloadInfo.NameWithOwner.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), downloadInfo.Ref);
string watermarkFile = GetWatermarkFilePath(destDirectory);
if (File.Exists(watermarkFile))
{
executionContext.Debug($"Action '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' already downloaded at '{destDirectory}'.");
return;
}
else
{
// make sure we get a clean folder ready to use.
IOUtil.DeleteDirectory(destDirectory, executionContext.CancellationToken);
Directory.CreateDirectory(destDirectory);
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}'");
}
await DownloadRepositoryActionAsync(executionContext, null, downloadInfo, destDirectory);
}
private string GetApiUrl(IExecutionContext executionContext)
{
string apiUrl = executionContext.GetGitHubContext("api_url");
if (!string.IsNullOrEmpty(apiUrl))
{
return apiUrl;
}
// Once the api_url is set for hosted, we can remove this fallback (it doesn't make sense for GHES)
return _dotcomApiUrl;
}
private static string BuildLinkToActionArchive(string apiUrl, string repository, string @ref)
{
#if OS_WINDOWS
return $"{apiUrl}/repos/{repository}/zipball/{@ref}";
#else
return $"{apiUrl}/repos/{repository}/tarball/{@ref}";
#endif
}
// todo: Remove the parameter "actionDownloadDetails" when feature flag DistributedTask.NewActionMetadata is removed
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, ActionDownloadDetails actionDownloadDetails, WebApi.ActionDownloadInfo downloadInfo, string destDirectory)
{
//download and extract action in a temp folder and rename it on success //download and extract action in a temp folder and rename it on success
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid()); string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
Directory.CreateDirectory(tempDirectory); Directory.CreateDirectory(tempDirectory);
#if OS_WINDOWS #if OS_WINDOWS
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.zip"); string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.zip");
string link = downloadInfo?.ZipballUrl ?? actionDownloadDetails.ArchiveLink;
#else #else
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.tar.gz"); string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.tar.gz");
string link = downloadInfo?.TarballUrl ?? actionDownloadDetails.ArchiveLink;
#endif #endif
Trace.Info($"Save archive '{archiveLink}' into {archiveFile}.");
Trace.Info($"Save archive '{link}' into {archiveFile}.");
try try
{ {
int retryCount = 0; int retryCount = 0;
// Allow up to 20 * 60s for any action to be downloaded from github graph. // Allow up to 20 * 60s for any action to be downloaded from github graph.
@@ -541,64 +791,67 @@ namespace GitHub.Runner.Worker
using (var httpClientHandler = HostContext.CreateHttpClientHandler()) using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler)) using (var httpClient = new HttpClient(httpClientHandler))
{ {
var configurationStore = HostContext.GetService<IConfigurationStore>(); // Legacy
var isHostedServer = configurationStore.GetSettings().IsHostedServer; if (downloadInfo == null)
if (isHostedServer)
{ {
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN"); actionDownloadDetails.ConfigureAuthorization(executionContext, httpClient);
if (string.IsNullOrEmpty(authToken))
{
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
authToken = executionContext.Variables.Get("PREVIEW_ACTION_TOKEN");
}
if (!string.IsNullOrEmpty(authToken))
{
HostContext.SecretMasker.AddValue(authToken);
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
else
{
var accessToken = executionContext.GetGitHubContext("token");
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
} }
// FF DistributedTask.NewActionMetadata
else else
{ {
// Intentionally empty. Temporary for GHES alpha release, download from dotcom unauthenticated. httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadInfo.Authentication?.Token);
} }
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents); httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
using (var result = await httpClient.GetStreamAsync(archiveLink)) using (var response = await httpClient.GetAsync(link))
{ {
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token); if (response.IsSuccessStatusCode)
await fs.FlushAsync(actionDownloadCancellation.Token); {
using (var result = await response.Content.ReadAsStreamAsync())
{
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
await fs.FlushAsync(actionDownloadCancellation.Token);
// download succeed, break out the retry loop. // download succeed, break out the retry loop.
break; break;
}
}
else if (response.StatusCode == HttpStatusCode.NotFound)
{
// It doesn't make sense to retry in this case, so just stop
throw new ActionNotFoundException(new Uri(link));
}
else
{
// Something else bad happened, let's go to our retry logic
response.EnsureSuccessStatusCode();
}
} }
} }
} }
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested) catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
{ {
Trace.Info($"Action download has been cancelled."); Trace.Info("Action download has been cancelled.");
throw;
}
catch (ActionNotFoundException)
{
Trace.Info($"The action at '{link}' does not exist");
throw; throw;
} }
catch (Exception ex) when (retryCount < 2) catch (Exception ex) when (retryCount < 2)
{ {
retryCount++; retryCount++;
Trace.Error($"Fail to download archive '{archiveLink}' -- Attempt: {retryCount}"); Trace.Error($"Fail to download archive '{link}' -- Attempt: {retryCount}");
Trace.Error(ex); Trace.Error(ex);
if (actionDownloadTimeout.Token.IsCancellationRequested) if (actionDownloadTimeout.Token.IsCancellationRequested)
{ {
// action download didn't finish within timeout // action download didn't finish within timeout
executionContext.Warning($"Action '{archiveLink}' didn't finish download within {timeoutSeconds} seconds."); executionContext.Warning($"Action '{link}' didn't finish download within {timeoutSeconds} seconds.");
} }
else else
{ {
executionContext.Warning($"Failed to download action '{archiveLink}'. Error {ex.Message}"); executionContext.Warning($"Failed to download action '{link}'. Error: {ex.Message}");
} }
} }
} }
@@ -612,7 +865,7 @@ namespace GitHub.Runner.Worker
} }
ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile)); ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
executionContext.Debug($"Download '{archiveLink}' to '{archiveFile}'"); executionContext.Debug($"Download '{link}' to '{archiveFile}'");
var stagingDirectory = Path.Combine(tempDirectory, "_staging"); var stagingDirectory = Path.Combine(tempDirectory, "_staging");
Directory.CreateDirectory(stagingDirectory); Directory.CreateDirectory(stagingDirectory);
@@ -662,6 +915,7 @@ namespace GitHub.Runner.Worker
} }
Trace.Verbose("Create watermark file indicate action download succeed."); Trace.Verbose("Create watermark file indicate action download succeed.");
string watermarkFile = GetWatermarkFilePath(destDirectory);
File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString()); File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString());
executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'."); executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'.");
@@ -686,6 +940,32 @@ namespace GitHub.Runner.Worker
} }
} }
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private void ConfigureAuthorizationFromContext(IExecutionContext executionContext, HttpClient httpClient)
{
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
if (string.IsNullOrEmpty(authToken))
{
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
authToken = executionContext.Variables.Get("PREVIEW_ACTION_TOKEN");
}
if (!string.IsNullOrEmpty(authToken))
{
HostContext.SecretMasker.AddValue(authToken);
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
else
{
var accessToken = executionContext.GetGitHubContext("token");
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
}
private string GetWatermarkFilePath(string directory) => directory + ".completed";
private ActionContainer PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction) private ActionContainer PrepareRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
{ {
var repositoryReference = repositoryAction.Reference as Pipelines.RepositoryPathReference; var repositoryReference = repositoryAction.Reference as Pipelines.RepositoryPathReference;
@@ -766,6 +1046,11 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action plugin: {(actionDefinitionData.Execution as PluginActionExecutionData).Plugin}, no more preparation."); Trace.Info($"Action plugin: {(actionDefinitionData.Execution as PluginActionExecutionData).Plugin}, no more preparation.");
return null; return null;
} }
else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
Trace.Info($"Action composite: {(actionDefinitionData.Execution as CompositeActionExecutionData).Steps}, no more preparation.");
return null;
}
else else
{ {
throw new NotSupportedException(actionDefinitionData.Execution.ExecutionType.ToString()); throw new NotSupportedException(actionDefinitionData.Execution.ExecutionType.ToString());
@@ -791,6 +1076,64 @@ namespace GitHub.Runner.Worker
throw new InvalidOperationException($"Can't find 'action.yml', 'action.yaml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?"); throw new InvalidOperationException($"Can't find 'action.yml', 'action.yaml' or 'Dockerfile' under '{fullPath}'. Did you forget to run actions/checkout before running your local action?");
} }
} }
private static string GetDownloadInfoLookupKey(Pipelines.ActionStep action)
{
if (action.Reference.Type != Pipelines.ActionSourceType.Repository)
{
return null;
}
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
if (string.Equals(repositoryReference.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (!string.Equals(repositoryReference.RepositoryType, Pipelines.RepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase))
{
throw new NotSupportedException(repositoryReference.RepositoryType);
}
ArgUtil.NotNullOrEmpty(repositoryReference.Name, nameof(repositoryReference.Name));
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
}
private static string GetDownloadInfoLookupKey(WebApi.ActionDownloadInfo info)
{
ArgUtil.NotNullOrEmpty(info.NameWithOwner, nameof(info.NameWithOwner));
ArgUtil.NotNullOrEmpty(info.Ref, nameof(info.Ref));
return $"{info.NameWithOwner}@{info.Ref}";
}
private AuthenticationHeaderValue CreateAuthHeader(string token)
{
if (string.IsNullOrEmpty(token))
{
return null;
}
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
HostContext.SecretMasker.AddValue(base64EncodingToken);
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private class ActionDownloadDetails
{
public string ArchiveLink { get; }
public Action<IExecutionContext, HttpClient> ConfigureAuthorization { get; }
public ActionDownloadDetails(string archiveLink, Action<IExecutionContext, HttpClient> configureAuthorization)
{
ArchiveLink = archiveLink;
ConfigureAuthorization = configureAuthorization;
}
}
} }
public sealed class Definition public sealed class Definition
@@ -818,6 +1161,7 @@ namespace GitHub.Runner.Worker
NodeJS, NodeJS,
Plugin, Plugin,
Script, Script,
Composite,
} }
public sealed class ContainerActionExecutionData : ActionExecutionData public sealed class ContainerActionExecutionData : ActionExecutionData
@@ -874,6 +1218,15 @@ namespace GitHub.Runner.Worker
public override bool HasPost => false; public override bool HasPost => false;
} }
public sealed class CompositeActionExecutionData : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Composite;
public override bool HasPre => false;
public override bool HasPost => false;
public List<Pipelines.ActionStep> Steps { get; set; }
public MappingToken Outputs { get; set; }
}
public abstract class ActionExecutionData public abstract class ActionExecutionData
{ {
private string _initCondition = $"{Constants.Expressions.Always}()"; private string _initCondition = $"{Constants.Expressions.Always}()";
@@ -931,4 +1284,3 @@ namespace GitHub.Runner.Worker
public string ActionRepository { get; set; } public string ActionRepository { get; set; }
} }
} }

View File

@@ -14,6 +14,7 @@ using YamlDotNet.Core;
using YamlDotNet.Core.Events; using YamlDotNet.Core.Events;
using System.Globalization; using System.Globalization;
using System.Linq; using System.Linq;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker namespace GitHub.Runner.Worker
{ {
@@ -22,18 +23,20 @@ namespace GitHub.Runner.Worker
{ {
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile); ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues); List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues); Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token); string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
void SetAllCompositeOutputs(IExecutionContext parentExecutionContext, DictionaryContextData actionOutputs);
} }
public sealed class ActionManifestManager : RunnerService, IActionManifestManager public sealed class ActionManifestManager : RunnerService, IActionManifestManager
{ {
private TemplateSchema _actionManifestSchema; private TemplateSchema _actionManifestSchema;
private IReadOnlyList<String> _fileTable;
public override void Initialize(IHostContext hostContext) public override void Initialize(IHostContext hostContext)
{ {
base.Initialize(hostContext); base.Initialize(hostContext);
@@ -54,25 +57,45 @@ namespace GitHub.Runner.Worker
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile) public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
{ {
var context = CreateContext(executionContext); var templateContext = CreateContext(executionContext);
ActionDefinitionData actionDefinition = new ActionDefinitionData(); ActionDefinitionData actionDefinition = new ActionDefinitionData();
// Clean up file name real quick
// Instead of using Regex which can be computationally expensive,
// we can just remove the # of characters from the fileName according to the length of the basePath
string basePath = HostContext.GetDirectory(WellKnownDirectory.Actions);
string fileRelativePath = manifestFile;
if (manifestFile.Contains(basePath))
{
fileRelativePath = manifestFile.Remove(0, basePath.Length + 1);
}
try try
{ {
var token = default(TemplateToken); var token = default(TemplateToken);
// Get the file ID // Get the file ID
var fileId = context.GetFileId(manifestFile); var fileId = templateContext.GetFileId(fileRelativePath);
_fileTable = context.GetFileTable();
// Add this file to the FileTable in executionContext if it hasn't been added already
// we use > since fileID is 1 indexed
if (fileId > executionContext.FileTable.Count)
{
executionContext.FileTable.Add(fileRelativePath);
}
// Read the file // Read the file
var fileContent = File.ReadAllText(manifestFile); var fileContent = File.ReadAllText(manifestFile);
using (var stringReader = new StringReader(fileContent)) using (var stringReader = new StringReader(fileContent))
{ {
var yamlObjectReader = new YamlObjectReader(null, stringReader); var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
token = TemplateReader.Read(context, "action-root", yamlObjectReader, fileId, out _); token = TemplateReader.Read(templateContext, "action-root", yamlObjectReader, fileId, out _);
} }
var actionMapping = token.AssertMapping("action manifest root"); var actionMapping = token.AssertMapping("action manifest root");
var actionOutputs = default(MappingToken);
var actionRunValueToken = default(TemplateToken);
foreach (var actionPair in actionMapping) foreach (var actionPair in actionMapping)
{ {
var propertyName = actionPair.Key.AssertString($"action.yml property key"); var propertyName = actionPair.Key.AssertString($"action.yml property key");
@@ -83,44 +106,61 @@ namespace GitHub.Runner.Worker
actionDefinition.Name = actionPair.Value.AssertString("name").Value; actionDefinition.Name = actionPair.Value.AssertString("name").Value;
break; break;
case "outputs":
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
actionOutputs = actionPair.Value.AssertMapping("outputs");
break;
}
Trace.Info($"Ignore action property outputs. Outputs for a whole action is not supported yet.");
break;
case "description": case "description":
actionDefinition.Description = actionPair.Value.AssertString("description").Value; actionDefinition.Description = actionPair.Value.AssertString("description").Value;
break; break;
case "inputs": case "inputs":
ConvertInputs(context, actionPair.Value, actionDefinition); ConvertInputs(templateContext, actionPair.Value, actionDefinition);
break; break;
case "runs": case "runs":
actionDefinition.Execution = ConvertRuns(context, actionPair.Value); // Defer runs token evaluation to after for loop to ensure that order of outputs doesn't matter.
actionRunValueToken = actionPair.Value;
break; break;
default: default:
Trace.Info($"Ignore action property {propertyName}."); Trace.Info($"Ignore action property {propertyName}.");
break; break;
} }
} }
// Evaluate Runs Last
if (actionRunValueToken != null)
{
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionRunValueToken, actionOutputs);
}
} }
catch (Exception ex) catch (Exception ex)
{ {
Trace.Error(ex); Trace.Error(ex);
context.Errors.Add(ex); templateContext.Errors.Add(ex);
} }
if (context.Errors.Count > 0) if (templateContext.Errors.Count > 0)
{ {
foreach (var error in context.Errors) foreach (var error in templateContext.Errors)
{ {
Trace.Error($"Action.yml load error: {error.Message}"); Trace.Error($"Action.yml load error: {error.Message}");
executionContext.Error(error.Message); executionContext.Error(error.Message);
} }
throw new ArgumentException($"Fail to load {manifestFile}"); throw new ArgumentException($"Fail to load {fileRelativePath}");
} }
if (actionDefinition.Execution == null) if (actionDefinition.Execution == null)
{ {
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}"); executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
throw new ArgumentException($"Top level 'runs:' section is required for {manifestFile}"); throw new ArgumentException($"Top level 'runs:' section is required for {fileRelativePath}");
} }
else else
{ {
@@ -130,6 +170,61 @@ namespace GitHub.Runner.Worker
return actionDefinition; return actionDefinition;
} }
public void SetAllCompositeOutputs(
IExecutionContext parentExecutionContext,
DictionaryContextData actionOutputs)
{
// Each pair is structured like this
// We ignore "description" for now
// {
// "the-output-name": {
// "description": "",
// "value": "the value"
// },
// ...
// }
foreach (var pair in actionOutputs)
{
var outputsName = pair.Key;
var outputsAttributes = pair.Value as DictionaryContextData;
outputsAttributes.TryGetValue("value", out var val);
var outputsValue = val as StringContextData;
// Set output in the whole composite scope.
if (!String.IsNullOrEmpty(outputsName) && !String.IsNullOrEmpty(outputsValue))
{
parentExecutionContext.SetOutput(outputsName, outputsValue, out _);
}
}
}
public DictionaryContextData EvaluateCompositeOutputs(
IExecutionContext executionContext,
TemplateToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = default(DictionaryContextData);
if (token != null)
{
var context = CreateContext(executionContext, extraExpressionValues);
try
{
token = TemplateEvaluator.Evaluate(context, "outputs", token, 0, null, omitHeader: true);
context.Errors.Check();
result = token.ToContextData().AssertDictionary("composite outputs");
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new DictionaryContextData();
}
public List<string> EvaluateContainerArguments( public List<string> EvaluateContainerArguments(
IExecutionContext executionContext, IExecutionContext executionContext,
SequenceToken token, SequenceToken token,
@@ -281,21 +376,20 @@ namespace GitHub.Runner.Worker
result.ExpressionFunctions.Add(item); result.ExpressionFunctions.Add(item);
} }
// Add the file table // Add the file table from the Execution Context
if (_fileTable?.Count > 0) for (var i = 0; i < executionContext.FileTable.Count; i++)
{ {
for (var i = 0 ; i < _fileTable.Count ; i++) result.GetFileId(executionContext.FileTable[i]);
{
result.GetFileId(_fileTable[i]);
}
} }
return result; return result;
} }
private ActionExecutionData ConvertRuns( private ActionExecutionData ConvertRuns(
IExecutionContext executionContext,
TemplateContext context, TemplateContext context,
TemplateToken inputsToken) TemplateToken inputsToken,
MappingToken outputs = null)
{ {
var runsMapping = inputsToken.AssertMapping("runs"); var runsMapping = inputsToken.AssertMapping("runs");
var usingToken = default(StringToken); var usingToken = default(StringToken);
@@ -311,6 +405,8 @@ namespace GitHub.Runner.Worker
var postToken = default(StringToken); var postToken = default(StringToken);
var postEntrypointToken = default(StringToken); var postEntrypointToken = default(StringToken);
var postIfToken = default(StringToken); var postIfToken = default(StringToken);
var stepsLoaded = default(List<Pipelines.ActionStep>);
foreach (var run in runsMapping) foreach (var run in runsMapping)
{ {
var runsKey = run.Key.AssertString("runs key").Value; var runsKey = run.Key.AssertString("runs key").Value;
@@ -355,6 +451,15 @@ namespace GitHub.Runner.Worker
case "pre-if": case "pre-if":
preIfToken = run.Value.AssertString("pre-if"); preIfToken = run.Value.AssertString("pre-if");
break; break;
case "steps":
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
var steps = run.Value.AssertSequence("steps");
var evaluator = executionContext.ToPipelineTemplateEvaluator();
stepsLoaded = evaluator.LoadCompositeSteps(steps);
break;
}
throw new Exception("You aren't supposed to be using Composite Actions yet!");
default: default:
Trace.Info($"Ignore run property {runsKey}."); Trace.Info($"Ignore run property {runsKey}.");
break; break;
@@ -402,6 +507,22 @@ namespace GitHub.Runner.Worker
}; };
} }
} }
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase) && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
if (stepsLoaded == null)
{
// TODO: Add a more helpful error message + including file name, etc. to show user that it's because of their yaml file
throw new ArgumentNullException($"No steps provided.");
}
else
{
return new CompositeActionExecutionData()
{
Steps = stepsLoaded,
Outputs = outputs
};
}
}
else else
{ {
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker' or 'node12' instead."); throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker' or 'node12' instead.");

View File

@@ -0,0 +1,33 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.Runner.Worker
{
public class ActionNotFoundException : Exception
{
public ActionNotFoundException(Uri actionUri)
: base(FormatMessage(actionUri))
{
}
public ActionNotFoundException(string message)
: base(message)
{
}
public ActionNotFoundException(string message, System.Exception inner)
: base(message, inner)
{
}
protected ActionNotFoundException(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
private static string FormatMessage(Uri actionUri)
{
return $"An action could not be found at the URI '{actionUri}'";
}
}
}

View File

@@ -94,6 +94,13 @@ namespace GitHub.Runner.Worker
if (handlerData.HasPost && (Stage == ActionRunStage.Pre || Stage == ActionRunStage.Main)) if (handlerData.HasPost && (Stage == ActionRunStage.Pre || Stage == ActionRunStage.Main))
{ {
string postDisplayName = $"Post {this.DisplayName}"; string postDisplayName = $"Post {this.DisplayName}";
if (Stage == ActionRunStage.Pre &&
this.DisplayName.StartsWith("Pre ", StringComparison.OrdinalIgnoreCase))
{
// Trim the leading `Pre ` from the display name.
// Otherwise, we will get `Post Pre xxx` as DisplayName for the Post step.
postDisplayName = $"Post {this.DisplayName.Substring("Pre ".Length)}";
}
var repositoryReference = Action.Reference as RepositoryPathReference; var repositoryReference = Action.Reference as RepositoryPathReference;
var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}"; var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}";
var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" : var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" :
@@ -143,8 +150,10 @@ namespace GitHub.Runner.Worker
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator(); var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues, ExecutionContext.ExpressionFunctions); var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues, ExecutionContext.ExpressionFunctions);
var userInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (KeyValuePair<string, string> input in inputs) foreach (KeyValuePair<string, string> input in inputs)
{ {
userInputs.Add(input.Key);
string message = ""; string message = "";
if (definition.Data?.Deprecated?.TryGetValue(input.Key, out message) == true) if (definition.Data?.Deprecated?.TryGetValue(input.Key, out message) == true)
{ {
@@ -152,13 +161,22 @@ namespace GitHub.Runner.Worker
} }
} }
var validInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
if (handlerData.ExecutionType == ActionExecutionType.Container)
{
// container action always accept 'entryPoint' and 'args' as inputs
// https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepswithargs
validInputs.Add("entryPoint");
validInputs.Add("args");
}
// Merge the default inputs from the definition // Merge the default inputs from the definition
if (definition.Data?.Inputs != null) if (definition.Data?.Inputs != null)
{ {
var manifestManager = HostContext.GetService<IActionManifestManager>(); var manifestManager = HostContext.GetService<IActionManifestManager>();
foreach (var input in (definition.Data?.Inputs)) foreach (var input in definition.Data.Inputs)
{ {
string key = input.Key.AssertString("action input name").Value; string key = input.Key.AssertString("action input name").Value;
validInputs.Add(key);
if (!inputs.ContainsKey(key)) if (!inputs.ContainsKey(key))
{ {
inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value); inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value);
@@ -166,6 +184,24 @@ namespace GitHub.Runner.Worker
} }
} }
// Validate inputs only for actions with action.yml
if (Action.Reference.Type == Pipelines.ActionSourceType.Repository)
{
var unexpectedInputs = new List<string>();
foreach (var input in userInputs)
{
if (!validInputs.Contains(input))
{
unexpectedInputs.Add(input);
}
}
if (unexpectedInputs.Count > 0)
{
ExecutionContext.Warning($"Unexpected input(s) '{string.Join("', '", unexpectedInputs)}', valid inputs are ['{string.Join("', '", validInputs)}']");
}
}
// Load the action environment. // Load the action environment.
ExecutionContext.Debug("Loading env"); ExecutionContext.Debug("Loading env");
var environment = new Dictionary<String, String>(VarUtil.EnvironmentVariableKeyComparer); var environment = new Dictionary<String, String>(VarUtil.EnvironmentVariableKeyComparer);

View File

@@ -17,7 +17,7 @@ namespace GitHub.Runner.Worker.Container
string DockerInstanceLabel { get; } string DockerInstanceLabel { get; }
Task<DockerVersion> DockerVersion(IExecutionContext context); Task<DockerVersion> DockerVersion(IExecutionContext context);
Task<int> DockerPull(IExecutionContext context, string image); Task<int> DockerPull(IExecutionContext context, string image);
Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string tag); Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag);
Task<string> DockerCreate(IExecutionContext context, ContainerInfo container); Task<string> DockerCreate(IExecutionContext context, ContainerInfo container);
Task<int> DockerRun(IExecutionContext context, ContainerInfo container, EventHandler<ProcessDataReceivedEventArgs> stdoutDataReceived, EventHandler<ProcessDataReceivedEventArgs> stderrDataReceived); Task<int> DockerRun(IExecutionContext context, ContainerInfo container, EventHandler<ProcessDataReceivedEventArgs> stdoutDataReceived, EventHandler<ProcessDataReceivedEventArgs> stderrDataReceived);
Task<int> DockerStart(IExecutionContext context, string containerId); Task<int> DockerStart(IExecutionContext context, string containerId);
@@ -87,9 +87,9 @@ namespace GitHub.Runner.Worker.Container
return await ExecuteDockerCommandAsync(context, "pull", image, context.CancellationToken); return await ExecuteDockerCommandAsync(context, "pull", image, context.CancellationToken);
} }
public async Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string tag) public async Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag)
{ {
return await ExecuteDockerCommandAsync(context, "build", $"-t {tag} \"{dockerFile}\"", workingDirectory, context.CancellationToken); return await ExecuteDockerCommandAsync(context, "build", $"-t {tag} -f \"{dockerFile}\" \"{dockerContext}\"", workingDirectory, context.CancellationToken);
} }
public async Task<string> DockerCreate(IExecutionContext context, ContainerInfo container) public async Task<string> DockerCreate(IExecutionContext context, ContainerInfo container)

View File

@@ -6,6 +6,7 @@ using System.Globalization;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text; using System.Text;
using System.Text.RegularExpressions;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using System.Web; using System.Web;
@@ -44,6 +45,7 @@ namespace GitHub.Runner.Worker
TaskResult? CommandResult { get; set; } TaskResult? CommandResult { get; set; }
CancellationToken CancellationToken { get; } CancellationToken CancellationToken { get; }
List<ServiceEndpoint> Endpoints { get; } List<ServiceEndpoint> Endpoints { get; }
TaskOrchestrationPlanReference Plan { get; }
PlanFeatures Features { get; } PlanFeatures Features { get; }
Variables Variables { get; } Variables Variables { get; }
@@ -51,7 +53,6 @@ namespace GitHub.Runner.Worker
IDictionary<String, IDictionary<String, String>> JobDefaults { get; } IDictionary<String, IDictionary<String, String>> JobDefaults { get; }
Dictionary<string, VariableValue> JobOutputs { get; } Dictionary<string, VariableValue> JobOutputs { get; }
IDictionary<String, String> EnvironmentVariables { get; } IDictionary<String, String> EnvironmentVariables { get; }
IDictionary<String, ContextScope> Scopes { get; }
IList<String> FileTable { get; } IList<String> FileTable { get; }
StepsContext StepsContext { get; } StepsContext StepsContext { get; }
DictionaryContextData ExpressionValues { get; } DictionaryContextData ExpressionValues { get; }
@@ -62,17 +63,19 @@ namespace GitHub.Runner.Worker
JobContext JobContext { get; } JobContext JobContext { get; }
// Only job level ExecutionContext has JobSteps // Only job level ExecutionContext has JobSteps
Queue<IStep> JobSteps { get; } List<IStep> JobSteps { get; }
// Only job level ExecutionContext has PostJobSteps // Only job level ExecutionContext has PostJobSteps
Stack<IStep> PostJobSteps { get; } Stack<IStep> PostJobSteps { get; }
bool EchoOnActionCommand { get; set; } bool EchoOnActionCommand { get; set; }
IExecutionContext FinalizeContext { get; set; }
// Initialize // Initialize
void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token); void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token);
void CancelToken(); void CancelToken();
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null); IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null);
// logging // logging
bool WriteDebug { get; } bool WriteDebug { get; }
@@ -104,11 +107,13 @@ namespace GitHub.Runner.Worker
// others // others
void ForceTaskComplete(); void ForceTaskComplete();
void RegisterPostJobStep(IStep step); void RegisterPostJobStep(IStep step);
IStep RegisterNestedStep(IActionRunner step, DictionaryContextData inputsData, int location, Dictionary<string, string> envData, bool cleanUp = false);
} }
public sealed class ExecutionContext : RunnerService, IExecutionContext public sealed class ExecutionContext : RunnerService, IExecutionContext
{ {
private const int _maxIssueCount = 10; private const int _maxIssueCount = 10;
private const int _throttlingDelayReportThreshold = 10 * 1000; // Don't report throttling with less than 10 seconds delay
private readonly TimelineRecord _record = new TimelineRecord(); private readonly TimelineRecord _record = new TimelineRecord();
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>(); private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>();
@@ -117,6 +122,9 @@ namespace GitHub.Runner.Worker
private event OnMatcherChanged _onMatcherChanged; private event OnMatcherChanged _onMatcherChanged;
// Regex used for checking if ScopeName meets the condition that shows that its id is null.
private readonly static Regex _generatedContextNamePattern = new Regex("^__[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private IssueMatcherConfig[] _matchers; private IssueMatcherConfig[] _matchers;
private IPagingLogger _logger; private IPagingLogger _logger;
@@ -140,12 +148,12 @@ namespace GitHub.Runner.Worker
public Task ForceCompleted => _forceCompleted.Task; public Task ForceCompleted => _forceCompleted.Task;
public CancellationToken CancellationToken => _cancellationTokenSource.Token; public CancellationToken CancellationToken => _cancellationTokenSource.Token;
public List<ServiceEndpoint> Endpoints { get; private set; } public List<ServiceEndpoint> Endpoints { get; private set; }
public TaskOrchestrationPlanReference Plan { get; private set; }
public Variables Variables { get; private set; } public Variables Variables { get; private set; }
public Dictionary<string, string> IntraActionState { get; private set; } public Dictionary<string, string> IntraActionState { get; private set; }
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; private set; } public IDictionary<String, IDictionary<String, String>> JobDefaults { get; private set; }
public Dictionary<string, VariableValue> JobOutputs { get; private set; } public Dictionary<string, VariableValue> JobOutputs { get; private set; }
public IDictionary<String, String> EnvironmentVariables { get; private set; } public IDictionary<String, String> EnvironmentVariables { get; private set; }
public IDictionary<String, ContextScope> Scopes { get; private set; }
public IList<String> FileTable { get; private set; } public IList<String> FileTable { get; private set; }
public StepsContext StepsContext { get; private set; } public StepsContext StepsContext { get; private set; }
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData(); public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
@@ -156,7 +164,7 @@ namespace GitHub.Runner.Worker
public List<ContainerInfo> ServiceContainers { get; private set; } public List<ContainerInfo> ServiceContainers { get; private set; }
// Only job level ExecutionContext has JobSteps // Only job level ExecutionContext has JobSteps
public Queue<IStep> JobSteps { get; private set; } public List<IStep> JobSteps { get; private set; }
// Only job level ExecutionContext has PostJobSteps // Only job level ExecutionContext has PostJobSteps
public Stack<IStep> PostJobSteps { get; private set; } public Stack<IStep> PostJobSteps { get; private set; }
@@ -166,6 +174,7 @@ namespace GitHub.Runner.Worker
public bool EchoOnActionCommand { get; set; } public bool EchoOnActionCommand { get; set; }
public IExecutionContext FinalizeContext { get; set; }
public TaskResult? Result public TaskResult? Result
{ {
@@ -263,7 +272,55 @@ namespace GitHub.Runner.Worker
Root.PostJobSteps.Push(step); Root.PostJobSteps.Push(step);
} }
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null) /// <summary>
/// Helper function used in CompositeActionHandler::RunAsync to
/// add a child node, aka a step, to the current job to the Root.JobSteps based on the location.
/// </summary>
public IStep RegisterNestedStep(
IActionRunner step,
DictionaryContextData inputsData,
int location,
Dictionary<string, string> envData,
bool cleanUp = false)
{
// If the context name is empty and the scope name is empty, we would generate a unique scope name for this child in the following format:
// "__<GUID>"
var safeContextName = !string.IsNullOrEmpty(ContextName) ? ContextName : $"__{Guid.NewGuid()}";
// Set Scope Name. Note, for our design, we consider each step in a composite action to have the same scope
// This makes it much simpler to handle their outputs at the end of the Composite Action
var childScopeName = !string.IsNullOrEmpty(ScopeName) ? $"{ScopeName}.{safeContextName}" : safeContextName;
var childContextName = !string.IsNullOrEmpty(step.Action.ContextName) ? step.Action.ContextName : $"__{Guid.NewGuid()}";
step.ExecutionContext = Root.CreateChild(_record.Id, step.DisplayName, _record.Id.ToString("N"), childScopeName, childContextName, logger: _logger);
step.ExecutionContext.ExpressionValues["inputs"] = inputsData;
// Set Parent Attribute for Clean Up Step
if (cleanUp)
{
step.ExecutionContext.FinalizeContext = this;
}
// Add the composite action environment variables to each step.
#if OS_WINDOWS
var envContext = new DictionaryContextData();
#else
var envContext = new CaseSensitiveDictionaryContextData();
#endif
foreach (var pair in envData)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
step.ExecutionContext.ExpressionValues["env"] = envContext;
Root.JobSteps.Insert(location, step);
return step;
}
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null)
{ {
Trace.Entering(); Trace.Entering();
@@ -274,6 +331,7 @@ namespace GitHub.Runner.Worker
child.Features = Features; child.Features = Features;
child.Variables = Variables; child.Variables = Variables;
child.Endpoints = Endpoints; child.Endpoints = Endpoints;
child.Plan = Plan;
if (intraActionState == null) if (intraActionState == null)
{ {
child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
@@ -284,7 +342,6 @@ namespace GitHub.Runner.Worker
} }
child.EnvironmentVariables = EnvironmentVariables; child.EnvironmentVariables = EnvironmentVariables;
child.JobDefaults = JobDefaults; child.JobDefaults = JobDefaults;
child.Scopes = Scopes;
child.FileTable = FileTable; child.FileTable = FileTable;
child.StepsContext = StepsContext; child.StepsContext = StepsContext;
foreach (var pair in ExpressionValues) foreach (var pair in ExpressionValues)
@@ -311,9 +368,15 @@ namespace GitHub.Runner.Worker
{ {
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder); child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder);
} }
if (logger != null)
child._logger = HostContext.CreateService<IPagingLogger>(); {
child._logger.Setup(_mainTimelineId, recordId); child._logger = logger;
}
else
{
child._logger = HostContext.CreateService<IPagingLogger>();
child._logger.Setup(_mainTimelineId, recordId);
}
return child; return child;
} }
@@ -335,7 +398,7 @@ namespace GitHub.Runner.Worker
} }
// report total delay caused by server throttling. // report total delay caused by server throttling.
if (_totalThrottlingDelayInMilliseconds > 0) if (_totalThrottlingDelayInMilliseconds > _throttlingDelayReportThreshold)
{ {
this.Warning($"The job has experienced {TimeSpan.FromMilliseconds(_totalThrottlingDelayInMilliseconds).TotalSeconds} seconds total delay caused by server throttling."); this.Warning($"The job has experienced {TimeSpan.FromMilliseconds(_totalThrottlingDelayInMilliseconds).TotalSeconds} seconds total delay caused by server throttling.");
} }
@@ -363,14 +426,18 @@ namespace GitHub.Runner.Worker
} }
} }
_cancellationTokenSource?.Dispose(); if (Root != this)
{
// only dispose TokenSource for step level ExecutionContext
_cancellationTokenSource?.Dispose();
}
_logger.End(); _logger.End();
if (!string.IsNullOrEmpty(ContextName)) if (!string.IsNullOrEmpty(ContextName))
{ {
StepsContext.SetOutcome(ScopeName, ContextName, (Outcome ?? Result ?? TaskResult.Succeeded).ToActionResult().ToString()); StepsContext.SetOutcome(ScopeName, ContextName, (Outcome ?? Result ?? TaskResult.Succeeded).ToActionResult());
StepsContext.SetConclusion(ScopeName, ContextName, (Result ?? TaskResult.Succeeded).ToActionResult().ToString()); StepsContext.SetConclusion(ScopeName, ContextName, (Result ?? TaskResult.Succeeded).ToActionResult());
} }
return Result.Value; return Result.Value;
@@ -429,7 +496,8 @@ namespace GitHub.Runner.Worker
{ {
ArgUtil.NotNullOrEmpty(name, nameof(name)); ArgUtil.NotNullOrEmpty(name, nameof(name));
if (String.IsNullOrEmpty(ContextName)) // if the ContextName follows the __GUID format which is set as the default value for ContextName if null for Composite Actions.
if (String.IsNullOrEmpty(ContextName) || _generatedContextNamePattern.IsMatch(ContextName))
{ {
reference = null; reference = null;
return; return;
@@ -571,7 +639,8 @@ namespace GitHub.Runner.Worker
_cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token); _cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
// Features // Plan
Plan = message.Plan;
Features = PlanUtil.GetFeatures(message.Plan); Features = PlanUtil.GetFeatures(message.Plan);
// Endpoints // Endpoints
@@ -595,16 +664,6 @@ namespace GitHub.Runner.Worker
// Steps context (StepsRunner manages adding the scoped steps context) // Steps context (StepsRunner manages adding the scoped steps context)
StepsContext = new StepsContext(); StepsContext = new StepsContext();
// Scopes
Scopes = new Dictionary<String, ContextScope>(StringComparer.OrdinalIgnoreCase);
if (message.Scopes?.Count > 0)
{
foreach (var scope in message.Scopes)
{
Scopes[scope.Name] = scope;
}
}
// File table // File table
FileTable = new List<String>(message.FileTable ?? new string[0]); FileTable = new List<String>(message.FileTable ?? new string[0]);
@@ -651,7 +710,7 @@ namespace GitHub.Runner.Worker
PrependPath = new List<string>(); PrependPath = new List<string>();
// JobSteps for job ExecutionContext // JobSteps for job ExecutionContext
JobSteps = new Queue<IStep>(); JobSteps = new List<IStep>();
// PostJobSteps for job ExecutionContext // PostJobSteps for job ExecutionContext
PostJobSteps = new Stack<IStep>(); PostJobSteps = new Stack<IStep>();
@@ -851,7 +910,8 @@ namespace GitHub.Runner.Worker
{ {
Interlocked.Add(ref _totalThrottlingDelayInMilliseconds, Convert.ToInt64(data.Delay.TotalMilliseconds)); Interlocked.Add(ref _totalThrottlingDelayInMilliseconds, Convert.ToInt64(data.Delay.TotalMilliseconds));
if (!_throttlingReported) if (!_throttlingReported &&
_totalThrottlingDelayInMilliseconds > _throttlingDelayReportThreshold)
{ {
this.Warning(string.Format("The job is currently being throttled by the server. You may experience delays in console line output, job status reporting, and action log uploads.")); this.Warning(string.Format("The job is currently being throttled by the server. You may experience delays in console line output, job status reporting, and action log uploads."));

View File

@@ -12,6 +12,8 @@ namespace GitHub.Runner.Worker.Expressions
{ {
public sealed class HashFilesFunction : Function public sealed class HashFilesFunction : Function
{ {
private const int _hashFileTimeoutSeconds = 120;
protected sealed override Object EvaluateCore( protected sealed override Object EvaluateCore(
EvaluationContext context, EvaluationContext context,
out ResultMemory resultMemory) out ResultMemory resultMemory)
@@ -89,19 +91,29 @@ namespace GitHub.Runner.Worker.Expressions
} }
env["patterns"] = string.Join(Environment.NewLine, patterns); env["patterns"] = string.Join(Environment.NewLine, patterns);
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace, using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(_hashFileTimeoutSeconds)))
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
cancellationToken: new CancellationTokenSource(TimeSpan.FromSeconds(120)).Token).GetAwaiter().GetResult();
if (exitCode != 0)
{ {
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'"); try
} {
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
cancellationToken: tokenSource.Token).GetAwaiter().GetResult();
return hashResult; if (exitCode != 0)
{
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
}
}
catch (OperationCanceledException) when (tokenSource.IsCancellationRequested)
{
throw new TimeoutException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') couldn't finish within {_hashFileTimeoutSeconds} seconds.");
}
return hashResult;
}
} }
private sealed class HashFilesTrace : ITraceWriter private sealed class HashFilesTrace : ITraceWriter

View File

@@ -10,10 +10,11 @@ namespace GitHub.Runner.Worker
{ {
"action", "action",
"actor", "actor",
"api_url", // temp for GHES alpha release "api_url",
"base_ref", "base_ref",
"event_name", "event_name",
"event_path", "event_path",
"graphql_url",
"head_ref", "head_ref",
"job", "job",
"ref", "ref",
@@ -21,8 +22,8 @@ namespace GitHub.Runner.Worker
"repository_owner", "repository_owner",
"run_id", "run_id",
"run_number", "run_number",
"server_url",
"sha", "sha",
"url", // temp for GHES alpha release
"workflow", "workflow",
"workspace", "workspace",
}; };

View File

@@ -0,0 +1,117 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Handlers
{
[ServiceLocator(Default = typeof(CompositeActionHandler))]
public interface ICompositeActionHandler : IHandler
{
CompositeActionExecutionData Data { get; set; }
}
public sealed class CompositeActionHandler : Handler, ICompositeActionHandler
{
public CompositeActionExecutionData Data { get; set; }
public Task RunAsync(ActionRunStage stage)
{
// Validate args.
Trace.Entering();
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
ArgUtil.NotNull(Inputs, nameof(Inputs));
var githubContext = ExecutionContext.ExpressionValues["github"] as GitHubContext;
ArgUtil.NotNull(githubContext, nameof(githubContext));
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
// Resolve action steps
var actionSteps = Data.Steps;
// Create Context Data to reuse for each composite action step
var inputsData = new DictionaryContextData();
foreach (var i in Inputs)
{
inputsData[i.Key] = new StringContextData(i.Value);
}
// Add each composite action step to the front of the queue
int location = 0;
foreach (Pipelines.ActionStep aStep in actionSteps)
{
// Ex:
// runs:
// using: "composite"
// steps:
// - uses: example/test-composite@v2 (a)
// - run echo hello world (b)
// - run echo hello world 2 (c)
//
// ethanchewy/test-composite/action.yaml
// runs:
// using: "composite"
// steps:
// - run echo hello world 3 (d)
// - run echo hello world 4 (e)
//
// Steps processed as follow:
// | a |
// | a | => | d |
// (Run step d)
// | a |
// | a | => | e |
// (Run step e)
// | a |
// (Run step a)
// | b |
// (Run step b)
// | c |
// (Run step c)
// Done.
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = aStep;
actionRunner.Stage = stage;
actionRunner.Condition = aStep.Condition;
var step = ExecutionContext.RegisterNestedStep(actionRunner, inputsData, location, Environment);
InitializeScope(step);
location++;
}
// Create a step that handles all the composite action steps' outputs
Pipelines.ActionStep cleanOutputsStep = new Pipelines.ActionStep();
cleanOutputsStep.ContextName = ExecutionContext.ContextName;
// Use the same reference type as our composite steps.
cleanOutputsStep.Reference = Action;
var actionRunner2 = HostContext.CreateService<IActionRunner>();
actionRunner2.Action = cleanOutputsStep;
actionRunner2.Stage = ActionRunStage.Main;
actionRunner2.Condition = "always()";
ExecutionContext.RegisterNestedStep(actionRunner2, inputsData, location, Environment, true);
return Task.CompletedTask;
}
private void InitializeScope(IStep step)
{
var stepsContext = step.ExecutionContext.StepsContext;
var scopeName = step.ExecutionContext.ScopeName;
step.ExecutionContext.ExpressionValues["steps"] = stepsContext.GetScope(scopeName);
}
}
}

View File

@@ -0,0 +1,53 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Schema;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Handlers
{
[ServiceLocator(Default = typeof(CompositeActionOutputHandler))]
public interface ICompositeActionOutputHandler : IHandler
{
CompositeActionExecutionData Data { get; set; }
}
public sealed class CompositeActionOutputHandler : Handler, ICompositeActionOutputHandler
{
public CompositeActionExecutionData Data { get; set; }
public Task RunAsync(ActionRunStage stage)
{
// Evaluate the mapped outputs value
if (Data.Outputs != null)
{
// Evaluate the outputs in the steps context to easily retrieve the values
var actionManifestManager = HostContext.GetService<IActionManifestManager>();
// Format ExpressionValues to Dictionary<string, PipelineContextData>
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
foreach (var pair in ExecutionContext.ExpressionValues)
{
evaluateContext[pair.Key] = pair.Value;
}
// Get the evluated composite outputs' values mapped to the outputs named
DictionaryContextData actionOutputs = actionManifestManager.EvaluateCompositeOutputs(ExecutionContext, Data.Outputs, evaluateContext);
// Set the outputs for the outputs object in the whole composite action
actionManifestManager.SetAllCompositeOutputs(ExecutionContext.FinalizeContext, actionOutputs);
}
return Task.CompletedTask;
}
}
}

View File

@@ -52,7 +52,12 @@ namespace GitHub.Runner.Worker.Handlers
ExecutionContext.Output($"Dockerfile for action: '{dockerFile}'."); ExecutionContext.Output($"Dockerfile for action: '{dockerFile}'.");
var imageName = $"{dockerManger.DockerInstanceLabel}:{ExecutionContext.Id.ToString("N")}"; var imageName = $"{dockerManger.DockerInstanceLabel}:{ExecutionContext.Id.ToString("N")}";
var buildExitCode = await dockerManger.DockerBuild(ExecutionContext, ExecutionContext.GetGitHubContext("workspace"), Directory.GetParent(dockerFile).FullName, imageName); var buildExitCode = await dockerManger.DockerBuild(
ExecutionContext,
ExecutionContext.GetGitHubContext("workspace"),
dockerFile,
Directory.GetParent(dockerFile).FullName,
imageName);
if (buildExitCode != 0) if (buildExitCode != 0)
{ {
throw new InvalidOperationException($"Docker build failed with exit code {buildExitCode}"); throw new InvalidOperationException($"Docker build failed with exit code {buildExitCode}");

View File

@@ -66,6 +66,19 @@ namespace GitHub.Runner.Worker.Handlers
handler = HostContext.CreateService<IRunnerPluginHandler>(); handler = HostContext.CreateService<IRunnerPluginHandler>();
(handler as IRunnerPluginHandler).Data = data as PluginActionExecutionData; (handler as IRunnerPluginHandler).Data = data as PluginActionExecutionData;
} }
else if (data.ExecutionType == ActionExecutionType.Composite)
{
if (executionContext.FinalizeContext == null)
{
handler = HostContext.CreateService<ICompositeActionHandler>();
(handler as ICompositeActionHandler).Data = data as CompositeActionExecutionData;
}
else
{
handler = HostContext.CreateService<ICompositeActionOutputHandler>();
(handler as ICompositeActionOutputHandler).Data = data as CompositeActionExecutionData;
}
}
else else
{ {
// This should never happen. // This should never happen.

View File

@@ -352,15 +352,24 @@ namespace GitHub.Runner.Worker.Handlers
if (File.Exists(gitConfigPath)) if (File.Exists(gitConfigPath))
{ {
// Check if the config contains the workflow repository url // Check if the config contains the workflow repository url
var qualifiedRepository = _executionContext.GetGitHubContext("repository"); var serverUrl = _executionContext.GetGitHubContext("server_url");
var configMatch = $"url = https://github.com/{qualifiedRepository}"; serverUrl = !string.IsNullOrEmpty(serverUrl) ? serverUrl : "https://github.com";
var host = new Uri(serverUrl, UriKind.Absolute).Host;
var nameWithOwner = _executionContext.GetGitHubContext("repository");
var patterns = new[] {
$"url = {serverUrl}/{nameWithOwner}",
$"url = git@{host}:{nameWithOwner}.git",
};
var content = File.ReadAllText(gitConfigPath); var content = File.ReadAllText(gitConfigPath);
foreach (var line in content.Split("\n").Select(x => x.Trim())) foreach (var line in content.Split("\n").Select(x => x.Trim()))
{ {
if (String.Equals(line, configMatch, StringComparison.OrdinalIgnoreCase)) foreach (var pattern in patterns)
{ {
repositoryPath = directoryPath; if (String.Equals(line, pattern, StringComparison.OrdinalIgnoreCase))
break; {
repositoryPath = directoryPath;
break;
}
} }
} }
} }

View File

@@ -149,14 +149,14 @@ namespace GitHub.Runner.Worker.Handlers
throw new NotSupportedException(msg); throw new NotSupportedException(msg);
} }
nodeExternal = "node12_alpine"; nodeExternal = "node12_alpine";
executionContext.Output($"Container distribution is alpine. Running JavaScript Action with external tool: {nodeExternal}"); executionContext.Debug($"Container distribution is alpine. Running JavaScript Action with external tool: {nodeExternal}");
return nodeExternal; return nodeExternal;
} }
} }
} }
// Optimistically use the default // Optimistically use the default
nodeExternal = "node12"; nodeExternal = "node12";
executionContext.Output($"Running JavaScript Action with default external tool: {nodeExternal}"); executionContext.Debug($"Running JavaScript Action with default external tool: {nodeExternal}");
return nodeExternal; return nodeExternal;
} }

View File

@@ -21,7 +21,7 @@ namespace GitHub.Runner.Worker
} }
set set
{ {
this["status"] = new StringContextData(value.ToString()); this["status"] = new StringContextData(value.ToString().ToLowerInvariant());
} }
} }

View File

@@ -64,6 +64,20 @@ namespace GitHub.Runner.Worker
context.Debug($"Starting: Set up job"); context.Debug($"Starting: Set up job");
context.Output($"Current runner version: '{BuildConstants.RunnerPackage.Version}'"); context.Output($"Current runner version: '{BuildConstants.RunnerPackage.Version}'");
var setting = HostContext.GetService<IConfigurationStore>().GetSettings();
var credFile = HostContext.GetConfigFile(WellKnownConfigFile.Credentials);
if (File.Exists(credFile))
{
var credData = IOUtil.LoadObject<CredentialData>(credFile);
if (credData != null &&
credData.Data.TryGetValue("clientId", out var clientId))
{
// print out HostName for self-hosted runner
context.Output($"Runner name: '{setting.AgentName}'");
context.Output($"Machine name: '{Environment.MachineName}'");
}
}
var setupInfoFile = HostContext.GetConfigFile(WellKnownConfigFile.SetupInfo); var setupInfoFile = HostContext.GetConfigFile(WellKnownConfigFile.SetupInfo);
if (File.Exists(setupInfoFile)) if (File.Exists(setupInfoFile))
{ {
@@ -131,12 +145,13 @@ namespace GitHub.Runner.Worker
// Temporary hack for GHES alpha // Temporary hack for GHES alpha
var configurationStore = HostContext.GetService<IConfigurationStore>(); var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings(); var runnerSettings = configurationStore.GetSettings();
if (!runnerSettings.IsHostedServer && !string.IsNullOrEmpty(runnerSettings.GitHubUrl)) if (string.IsNullOrEmpty(context.GetGitHubContext("server_url")) && !runnerSettings.IsHostedServer && !string.IsNullOrEmpty(runnerSettings.GitHubUrl))
{ {
var url = new Uri(runnerSettings.GitHubUrl); var url = new Uri(runnerSettings.GitHubUrl);
var portInfo = url.IsDefaultPort ? string.Empty : $":{url.Port.ToString(CultureInfo.InvariantCulture)}"; var portInfo = url.IsDefaultPort ? string.Empty : $":{url.Port.ToString(CultureInfo.InvariantCulture)}";
context.SetGitHubContext("url", $"{url.Scheme}://{url.Host}{portInfo}"); context.SetGitHubContext("server_url", $"{url.Scheme}://{url.Host}{portInfo}");
context.SetGitHubContext("api_url", $"{url.Scheme}://{url.Host}{portInfo}/api/v3"); context.SetGitHubContext("api_url", $"{url.Scheme}://{url.Host}{portInfo}/api/v3");
context.SetGitHubContext("graphql_url", $"{url.Scheme}://{url.Host}{portInfo}/api/graphql");
} }
// Evaluate the job-level environment variables // Evaluate the job-level environment variables

View File

@@ -5,21 +5,13 @@ using GitHub.Services.Common;
using GitHub.Services.WebApi; using GitHub.Services.WebApi;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Globalization;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Threading; using System.Threading;
using System.Threading.Tasks; using System.Threading.Tasks;
using System.Net.Http; using System.Net.Http;
using System.Text;
using System.IO.Compression;
using System.Diagnostics;
using Newtonsoft.Json.Linq;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.Runner.Common; using GitHub.Runner.Common;
using GitHub.Runner.Sdk; using GitHub.Runner.Sdk;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.ObjectTemplating;
namespace GitHub.Runner.Worker namespace GitHub.Runner.Worker
{ {
@@ -122,13 +114,6 @@ namespace GitHub.Runner.Worker
_tempDirectoryManager = HostContext.GetService<ITempDirectoryManager>(); _tempDirectoryManager = HostContext.GetService<ITempDirectoryManager>();
_tempDirectoryManager.InitializeTempDirectory(jobContext); _tempDirectoryManager.InitializeTempDirectory(jobContext);
// // Expand container properties
// jobContext.Container?.ExpandProperties(jobContext.Variables);
// foreach (var sidecar in jobContext.SidecarContainers)
// {
// sidecar.ExpandProperties(jobContext.Variables);
// }
// Get the job extension. // Get the job extension.
Trace.Info("Getting job extension."); Trace.Info("Getting job extension.");
IJobExtension jobExtension = HostContext.CreateService<IJobExtension>(); IJobExtension jobExtension = HostContext.CreateService<IJobExtension>();
@@ -167,7 +152,7 @@ namespace GitHub.Runner.Worker
{ {
foreach (var step in jobSteps) foreach (var step in jobSteps)
{ {
jobContext.JobSteps.Enqueue(step); jobContext.JobSteps.Add(step);
} }
await stepsRunner.RunAsync(jobContext); await stepsRunner.RunAsync(jobContext);
@@ -254,6 +239,12 @@ namespace GitHub.Runner.Worker
Trace.Error(ex); Trace.Error(ex);
return TaskResult.Failed; return TaskResult.Failed;
} }
catch (TaskOrchestrationPlanTerminatedException ex)
{
Trace.Error($"TaskOrchestrationPlanTerminatedException received, while attempting to raise JobCompletedEvent for job {message.JobId}.");
Trace.Error(ex);
return TaskResult.Failed;
}
catch (Exception ex) catch (Exception ex)
{ {
Trace.Error($"Catch exception while attempting to raise JobCompletedEvent for job {message.JobId}, job request {message.RequestId}."); Trace.Error($"Catch exception while attempting to raise JobCompletedEvent for job {message.JobId}, job request {message.RequestId}.");

View File

@@ -59,19 +59,19 @@ namespace GitHub.Runner.Worker
public void SetConclusion( public void SetConclusion(
string scopeName, string scopeName,
string stepName, string stepName,
string conclusion) ActionResult conclusion)
{ {
var step = GetStep(scopeName, stepName); var step = GetStep(scopeName, stepName);
step["conclusion"] = new StringContextData(conclusion); step["conclusion"] = new StringContextData(conclusion.ToString().ToLowerInvariant());
} }
public void SetOutcome( public void SetOutcome(
string scopeName, string scopeName,
string stepName, string stepName,
string outcome) ActionResult outcome)
{ {
var step = GetStep(scopeName, stepName); var step = GetStep(scopeName, stepName);
step["outcome"] = new StringContextData(outcome); step["outcome"] = new StringContextData(outcome.ToString().ToLowerInvariant());
} }
private DictionaryContextData GetStep(string scopeName, string stepName) private DictionaryContextData GetStep(string scopeName, string stepName)

View File

@@ -59,14 +59,14 @@ namespace GitHub.Runner.Worker
checkPostJobActions = true; checkPostJobActions = true;
while (jobContext.PostJobSteps.TryPop(out var postStep)) while (jobContext.PostJobSteps.TryPop(out var postStep))
{ {
jobContext.JobSteps.Enqueue(postStep); jobContext.JobSteps.Add(postStep);
} }
continue; continue;
} }
var step = jobContext.JobSteps.Dequeue(); var step = jobContext.JobSteps[0];
var nextStep = jobContext.JobSteps.Count > 0 ? jobContext.JobSteps.Peek() : null; jobContext.JobSteps.RemoveAt(0);
Trace.Info($"Processing step: DisplayName='{step.DisplayName}'"); Trace.Info($"Processing step: DisplayName='{step.DisplayName}'");
ArgUtil.NotNull(step.ExecutionContext, nameof(step.ExecutionContext)); ArgUtil.NotNull(step.ExecutionContext, nameof(step.ExecutionContext));
@@ -82,27 +82,46 @@ namespace GitHub.Runner.Worker
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, 0)); step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>(PipelineTemplateConstants.HashFiles, 1, byte.MaxValue)); step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>(PipelineTemplateConstants.HashFiles, 1, byte.MaxValue));
// Initialize scope step.ExecutionContext.ExpressionValues["steps"] = step.ExecutionContext.StepsContext.GetScope(step.ExecutionContext.ScopeName);
if (InitializeScope(step, scopeInputs))
{ // Populate env context for each step
// Populate env context for each step Trace.Info("Initialize Env context for step");
Trace.Info("Initialize Env context for step");
#if OS_WINDOWS #if OS_WINDOWS
var envContext = new DictionaryContextData(); var envContext = new DictionaryContextData();
#else #else
var envContext = new CaseSensitiveDictionaryContextData(); var envContext = new CaseSensitiveDictionaryContextData();
#endif #endif
step.ExecutionContext.ExpressionValues["env"] = envContext;
foreach (var pair in step.ExecutionContext.EnvironmentVariables) // Global env
foreach (var pair in step.ExecutionContext.EnvironmentVariables)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
// Stomps over with outside step env
if (step.ExecutionContext.ExpressionValues.TryGetValue("env", out var envContextData))
{
#if OS_WINDOWS
var dict = envContextData as DictionaryContextData;
#else
var dict = envContextData as CaseSensitiveDictionaryContextData;
#endif
foreach (var pair in dict)
{ {
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty); envContext[pair.Key] = pair.Value;
} }
}
if (step is IActionRunner actionStep) step.ExecutionContext.ExpressionValues["env"] = envContext;
bool evaluateStepEnvFailed = false;
if (step is IActionRunner actionStep)
{
// Set GITHUB_ACTION
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
try
{ {
// Set GITHUB_ACTION
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
// Evaluate and merge action's env block to env context // Evaluate and merge action's env block to env context
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(); var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer); var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
@@ -111,7 +130,18 @@ namespace GitHub.Runner.Worker
envContext[env.Key] = new StringContextData(env.Value ?? string.Empty); envContext[env.Key] = new StringContextData(env.Value ?? string.Empty);
} }
} }
catch (Exception ex)
{
// fail the step since there is an evaluate error.
Trace.Info("Caught exception from expression for step.env");
evaluateStepEnvFailed = true;
step.ExecutionContext.Error(ex);
CompleteStep(step, TaskResult.Failed);
}
}
if (!evaluateStepEnvFailed)
{
try try
{ {
// Register job cancellation call back only if job cancellation token not been fire before each step run // Register job cancellation call back only if job cancellation token not been fire before each step run
@@ -195,19 +225,19 @@ namespace GitHub.Runner.Worker
{ {
// Condition == false // Condition == false
Trace.Info("Skipping step due to condition evaluation."); Trace.Info("Skipping step due to condition evaluation.");
CompleteStep(step, nextStep, TaskResult.Skipped, resultCode: conditionTraceWriter.Trace); CompleteStep(step, TaskResult.Skipped, resultCode: conditionTraceWriter.Trace);
} }
else if (conditionEvaluateError != null) else if (conditionEvaluateError != null)
{ {
// fail the step since there is an evaluate error. // fail the step since there is an evaluate error.
step.ExecutionContext.Error(conditionEvaluateError); step.ExecutionContext.Error(conditionEvaluateError);
CompleteStep(step, nextStep, TaskResult.Failed); CompleteStep(step, TaskResult.Failed);
} }
else else
{ {
// Run the step. // Run the step.
await RunStepAsync(step, jobContext.CancellationToken); await RunStepAsync(step, jobContext.CancellationToken);
CompleteStep(step, nextStep); CompleteStep(step);
} }
} }
finally finally
@@ -369,117 +399,9 @@ namespace GitHub.Runner.Worker
step.ExecutionContext.Debug($"Finishing: {step.DisplayName}"); step.ExecutionContext.Debug($"Finishing: {step.DisplayName}");
} }
private bool InitializeScope(IStep step, Dictionary<string, PipelineContextData> scopeInputs) private void CompleteStep(IStep step, TaskResult? result = null, string resultCode = null)
{ {
var executionContext = step.ExecutionContext; var executionContext = step.ExecutionContext;
var stepsContext = executionContext.StepsContext;
if (!string.IsNullOrEmpty(executionContext.ScopeName))
{
// Gather uninitialized current and ancestor scopes
var scope = executionContext.Scopes[executionContext.ScopeName];
var scopesToInitialize = default(Stack<ContextScope>);
while (scope != null && !scopeInputs.ContainsKey(scope.Name))
{
if (scopesToInitialize == null)
{
scopesToInitialize = new Stack<ContextScope>();
}
scopesToInitialize.Push(scope);
scope = string.IsNullOrEmpty(scope.ParentName) ? null : executionContext.Scopes[scope.ParentName];
}
// Initialize current and ancestor scopes
while (scopesToInitialize?.Count > 0)
{
scope = scopesToInitialize.Pop();
executionContext.Debug($"Initializing scope '{scope.Name}'");
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.ParentName);
executionContext.ExpressionValues["inputs"] = !String.IsNullOrEmpty(scope.ParentName) ? scopeInputs[scope.ParentName] : null;
var templateEvaluator = executionContext.ToPipelineTemplateEvaluator();
var inputs = default(DictionaryContextData);
try
{
inputs = templateEvaluator.EvaluateStepScopeInputs(scope.Inputs, executionContext.ExpressionValues, executionContext.ExpressionFunctions);
}
catch (Exception ex)
{
Trace.Info($"Caught exception from initialize scope '{scope.Name}'");
Trace.Error(ex);
executionContext.Error(ex);
executionContext.Complete(TaskResult.Failed);
return false;
}
scopeInputs[scope.Name] = inputs;
}
}
// Setup expression values
var scopeName = executionContext.ScopeName;
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scopeName);
executionContext.ExpressionValues["inputs"] = string.IsNullOrEmpty(scopeName) ? null : scopeInputs[scopeName];
return true;
}
private void CompleteStep(IStep step, IStep nextStep, TaskResult? result = null, string resultCode = null)
{
var executionContext = step.ExecutionContext;
if (!string.IsNullOrEmpty(executionContext.ScopeName))
{
// Gather current and ancestor scopes to finalize
var scope = executionContext.Scopes[executionContext.ScopeName];
var scopesToFinalize = default(Queue<ContextScope>);
var nextStepScopeName = nextStep?.ExecutionContext.ScopeName;
while (scope != null &&
!string.Equals(nextStepScopeName, scope.Name, StringComparison.OrdinalIgnoreCase) &&
!(nextStepScopeName ?? string.Empty).StartsWith($"{scope.Name}.", StringComparison.OrdinalIgnoreCase))
{
if (scopesToFinalize == null)
{
scopesToFinalize = new Queue<ContextScope>();
}
scopesToFinalize.Enqueue(scope);
scope = string.IsNullOrEmpty(scope.ParentName) ? null : executionContext.Scopes[scope.ParentName];
}
// Finalize current and ancestor scopes
var stepsContext = step.ExecutionContext.StepsContext;
while (scopesToFinalize?.Count > 0)
{
scope = scopesToFinalize.Dequeue();
executionContext.Debug($"Finalizing scope '{scope.Name}'");
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.Name);
executionContext.ExpressionValues["inputs"] = null;
var templateEvaluator = executionContext.ToPipelineTemplateEvaluator();
var outputs = default(DictionaryContextData);
try
{
outputs = templateEvaluator.EvaluateStepScopeOutputs(scope.Outputs, executionContext.ExpressionValues, executionContext.ExpressionFunctions);
}
catch (Exception ex)
{
Trace.Info($"Caught exception from finalize scope '{scope.Name}'");
Trace.Error(ex);
executionContext.Error(ex);
executionContext.Complete(TaskResult.Failed);
return;
}
if (outputs?.Count > 0)
{
var parentScopeName = scope.ParentName;
var contextName = scope.ContextName;
foreach (var pair in outputs)
{
var outputName = pair.Key;
var outputValue = pair.Value.ToString();
stepsContext.SetOutput(parentScopeName, contextName, outputName, outputValue, out var reference);
executionContext.Debug($"{reference}='{outputValue}'");
}
}
}
}
executionContext.Complete(result, resultCode: resultCode); executionContext.Complete(result, resultCode: resultCode);
} }

View File

@@ -7,7 +7,8 @@
"name": "string", "name": "string",
"description": "string", "description": "string",
"inputs": "inputs", "inputs": "inputs",
"runs": "runs" "runs": "runs",
"outputs": "outputs"
}, },
"loose-key-type": "non-empty-string", "loose-key-type": "non-empty-string",
"loose-value-type": "any" "loose-value-type": "any"
@@ -28,11 +29,26 @@
"loose-value-type": "any" "loose-value-type": "any"
} }
}, },
"outputs": {
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "outputs-attributes"
}
},
"outputs-attributes": {
"mapping": {
"properties": {
"description": "string",
"value": "output-value"
}
}
},
"runs": { "runs": {
"one-of": [ "one-of": [
"container-runs", "container-runs",
"node12-runs", "node12-runs",
"plugin-runs" "plugin-runs",
"composite-runs"
] ]
}, },
"container-runs": { "container-runs": {
@@ -83,12 +99,49 @@
} }
} }
}, },
"composite-runs": {
"mapping": {
"properties": {
"using": "non-empty-string",
"steps": "composite-steps"
}
}
},
"composite-steps": {
"context": [
"github",
"strategy",
"matrix",
"steps",
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"sequence": {
"item-type": "any"
}
},
"container-runs-context": { "container-runs-context": {
"context": [ "context": [
"inputs" "inputs"
], ],
"string": {} "string": {}
}, },
"output-value": {
"context": [
"github",
"strategy",
"matrix",
"steps",
"inputs",
"job",
"runner",
"env"
],
"string": {}
},
"input-default-context": { "input-default-context": {
"context": [ "context": [
"github", "github",

View File

@@ -317,5 +317,37 @@ namespace GitHub.DistributedTask.WebApi
userState: userState, userState: userState,
cancellationToken: cancellationToken); cancellationToken: cancellationToken);
} }
/// <summary>
/// [Preview API] Resolves information required to download actions (URL, token) defined in an orchestration.
/// </summary>
/// <param name="scopeIdentifier">The project GUID to scope the request</param>
/// <param name="hubName">The name of the server hub: "build" for the Build server or "rm" for the Release Management server</param>
/// <param name="planId"></param>
/// <param name="actionReferenceList"></param>
/// <param name="userState"></param>
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
public virtual Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(
Guid scopeIdentifier,
string hubName,
Guid planId,
ActionReferenceList actionReferenceList,
object userState = null,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
Guid locationId = new Guid("27d7f831-88c1-4719-8ca1-6a061dad90eb");
object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId };
HttpContent content = new ObjectContent<ActionReferenceList>(actionReferenceList, new VssJsonMediaTypeFormatter(true));
return SendAsync<ActionDownloadInfoCollection>(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1),
userState: userState,
cancellationToken: cancellationToken,
content: content);
}
} }
} }

View File

@@ -60,6 +60,20 @@ namespace GitHub.DistributedTask.Logging
return SecurityElement.Escape(value); return SecurityElement.Escape(value);
} }
public static String TrimDoubleQuotes(String value)
{
var trimmed = string.Empty;
if (!string.IsNullOrEmpty(value) &&
value.Length > 8 &&
value.StartsWith('"') &&
value.EndsWith('"'))
{
trimmed = value.Substring(1, value.Length - 2);
}
return trimmed;
}
private static string Base64StringEscapeShift(String value, int shift) private static string Base64StringEscapeShift(String value, int shift)
{ {
var bytes = Encoding.UTF8.GetBytes(value); var bytes = Encoding.UTF8.GetBytes(value);

View File

@@ -42,7 +42,12 @@ namespace GitHub.DistributedTask.Pipelines.ContextData
var floored = Math.Floor(m_value); var floored = Math.Floor(m_value);
if (m_value == floored && m_value <= (Double)Int32.MaxValue && m_value >= (Double)Int32.MinValue) if (m_value == floored && m_value <= (Double)Int32.MaxValue && m_value >= (Double)Int32.MinValue)
{ {
Int32 flooredInt = (Int32)floored; var flooredInt = (Int32)floored;
return (JToken)flooredInt;
}
else if (m_value == floored && m_value <= (Double)Int64.MaxValue && m_value >= (Double)Int64.MinValue)
{
var flooredInt = (Int64)floored;
return (JToken)flooredInt; return (JToken)flooredInt;
} }
else else

View File

@@ -65,6 +65,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String StepEnv = "step-env"; public const String StepEnv = "step-env";
public const String StepIfResult = "step-if-result"; public const String StepIfResult = "step-if-result";
public const String Steps = "steps"; public const String Steps = "steps";
public const String StepsInTemplate = "steps-in-template";
public const String StepsScopeInputs = "steps-scope-inputs"; public const String StepsScopeInputs = "steps-scope-inputs";
public const String StepsScopeOutputs = "steps-scope-outputs"; public const String StepsScopeOutputs = "steps-scope-outputs";
public const String StepsTemplateRoot = "steps-template-root"; public const String StepsTemplateRoot = "steps-template-root";

View File

@@ -29,7 +29,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
var evaluationResult = EvaluationResult.CreateIntermediateResult(null, ifResult); var evaluationResult = EvaluationResult.CreateIntermediateResult(null, ifResult);
return evaluationResult.IsTruthy; return evaluationResult.IsTruthy;
} }
internal static Boolean? ConvertToStepContinueOnError( internal static Boolean? ConvertToStepContinueOnError(
TemplateContext context, TemplateContext context,
TemplateToken token, TemplateToken token,
@@ -264,5 +263,351 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result; return result;
} }
//Note: originally was List<Step> but we need to change to List<ActionStep> to use the "Inputs" attribute
internal static List<ActionStep> ConvertToSteps(
TemplateContext context,
TemplateToken steps)
{
var stepsSequence = steps.AssertSequence($"job {PipelineTemplateConstants.Steps}");
var result = new List<ActionStep>();
foreach (var stepsItem in stepsSequence)
{
var step = ConvertToStep(context, stepsItem);
if (step != null) // step = null means we are hitting error during step conversion, there should be an error in context.errors
{
if (step.Enabled)
{
result.Add(step);
}
}
}
return result;
}
private static ActionStep ConvertToStep(
TemplateContext context,
TemplateToken stepsItem)
{
var step = stepsItem.AssertMapping($"{PipelineTemplateConstants.Steps} item");
var continueOnError = default(ScalarToken);
var env = default(TemplateToken);
var id = default(StringToken);
var ifCondition = default(String);
var ifToken = default(ScalarToken);
var name = default(ScalarToken);
var run = default(ScalarToken);
var scope = default(StringToken);
var timeoutMinutes = default(ScalarToken);
var uses = default(StringToken);
var with = default(TemplateToken);
var workingDir = default(ScalarToken);
var path = default(ScalarToken);
var clean = default(ScalarToken);
var fetchDepth = default(ScalarToken);
var lfs = default(ScalarToken);
var submodules = default(ScalarToken);
var shell = default(ScalarToken);
foreach (var stepProperty in step)
{
var propertyName = stepProperty.Key.AssertString($"{PipelineTemplateConstants.Steps} item key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Clean:
clean = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Clean}");
break;
case PipelineTemplateConstants.ContinueOnError:
ConvertToStepContinueOnError(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
continueOnError = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} {PipelineTemplateConstants.ContinueOnError}");
break;
case PipelineTemplateConstants.Env:
ConvertToStepEnvironment(context, stepProperty.Value, StringComparer.Ordinal, allowExpressions: true); // Validate early if possible
env = stepProperty.Value;
break;
case PipelineTemplateConstants.FetchDepth:
fetchDepth = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.FetchDepth}");
break;
case PipelineTemplateConstants.Id:
id = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Id}");
if (!NameValidation.IsValid(id.Value, true))
{
context.Error(id, $"Step id {id.Value} is invalid. Ids must start with a letter or '_' and contain only alphanumeric characters, '-', or '_'");
}
break;
case PipelineTemplateConstants.If:
ifToken = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.If}");
break;
case PipelineTemplateConstants.Lfs:
lfs = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Lfs}");
break;
case PipelineTemplateConstants.Name:
name = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Name}");
break;
case PipelineTemplateConstants.Path:
path = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Path}");
break;
case PipelineTemplateConstants.Run:
run = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Run}");
break;
case PipelineTemplateConstants.Shell:
shell = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Shell}");
break;
case PipelineTemplateConstants.Scope:
scope = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Scope}");
break;
case PipelineTemplateConstants.Submodules:
submodules = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Submodules}");
break;
case PipelineTemplateConstants.TimeoutMinutes:
ConvertToStepTimeout(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
timeoutMinutes = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.TimeoutMinutes}");
break;
case PipelineTemplateConstants.Uses:
uses = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}");
break;
case PipelineTemplateConstants.With:
ConvertToStepInputs(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
with = stepProperty.Value;
break;
case PipelineTemplateConstants.WorkingDirectory:
workingDir = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.WorkingDirectory}");
break;
default:
propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Steps} item key"); // throws
break;
}
}
// Fixup the if-condition
var isDefaultScope = String.IsNullOrEmpty(scope?.Value);
ifCondition = ConvertToIfCondition(context, ifToken, false, isDefaultScope);
if (run != null)
{
var result = new ActionStep
{
ScopeName = scope?.Value,
ContextName = id?.Value,
ContinueOnError = continueOnError,
DisplayNameToken = name,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes,
Environment = env,
Reference = new ScriptReference(),
};
var inputs = new MappingToken(null, null, null);
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script), run);
if (workingDir != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.WorkingDirectory), workingDir);
}
if (shell != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Shell), shell);
}
result.Inputs = inputs;
return result;
}
else
{
uses.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}");
var result = new ActionStep
{
ScopeName = scope?.Value,
ContextName = id?.Value,
ContinueOnError = continueOnError,
DisplayNameToken = name,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes,
Inputs = with,
Environment = env,
};
if (uses.Value.StartsWith("docker://", StringComparison.Ordinal))
{
var image = uses.Value.Substring("docker://".Length);
result.Reference = new ContainerRegistryReference { Image = image };
}
else if (uses.Value.StartsWith("./") || uses.Value.StartsWith(".\\"))
{
result.Reference = new RepositoryPathReference
{
RepositoryType = PipelineConstants.SelfAlias,
Path = uses.Value
};
}
else
{
var usesSegments = uses.Value.Split('@');
var pathSegments = usesSegments[0].Split(new[] { '/', '\\' }, StringSplitOptions.RemoveEmptyEntries);
var gitRef = usesSegments.Length == 2 ? usesSegments[1] : String.Empty;
if (usesSegments.Length != 2 ||
pathSegments.Length < 2 ||
String.IsNullOrEmpty(pathSegments[0]) ||
String.IsNullOrEmpty(pathSegments[1]) ||
String.IsNullOrEmpty(gitRef))
{
// todo: loc
context.Error(uses, $"Expected format {{org}}/{{repo}}[/path]@ref. Actual '{uses.Value}'");
}
else
{
var repositoryName = $"{pathSegments[0]}/{pathSegments[1]}";
var directoryPath = pathSegments.Length > 2 ? String.Join("/", pathSegments.Skip(2)) : String.Empty;
result.Reference = new RepositoryPathReference
{
RepositoryType = RepositoryTypes.GitHub,
Name = repositoryName,
Ref = gitRef,
Path = directoryPath,
};
}
}
return result;
}
}
/// <summary>
/// When empty, default to "success()".
/// When a status function is not referenced, format as "success() &amp;&amp; &lt;CONDITION&gt;".
/// </summary>
private static String ConvertToIfCondition(
TemplateContext context,
TemplateToken token,
Boolean isJob,
Boolean isDefaultScope)
{
String condition;
if (token is null)
{
condition = null;
}
else if (token is BasicExpressionToken expressionToken)
{
condition = expressionToken.Expression;
}
else
{
var stringToken = token.AssertString($"{(isJob ? "job" : "step")} {PipelineTemplateConstants.If}");
condition = stringToken.Value;
}
if (String.IsNullOrWhiteSpace(condition))
{
return $"{PipelineTemplateConstants.Success}()";
}
var expressionParser = new ExpressionParser();
var functions = default(IFunctionInfo[]);
var namedValues = default(INamedValueInfo[]);
if (isJob)
{
namedValues = s_jobIfNamedValues;
// TODO: refactor into seperate functions
// functions = PhaseCondition.FunctionInfo;
}
else
{
namedValues = isDefaultScope ? s_stepNamedValues : s_stepInTemplateNamedValues;
functions = s_stepConditionFunctions;
}
var node = default(ExpressionNode);
try
{
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
}
catch (Exception ex)
{
context.Error(token, ex);
return null;
}
if (node == null)
{
return $"{PipelineTemplateConstants.Success}()";
}
var hasStatusFunction = node.Traverse().Any(x =>
{
if (x is Function function)
{
return String.Equals(function.Name, PipelineTemplateConstants.Always, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Cancelled, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Failure, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Success, StringComparison.OrdinalIgnoreCase);
}
return false;
});
return hasStatusFunction ? condition : $"{PipelineTemplateConstants.Success}() && ({condition})";
}
private static readonly INamedValueInfo[] s_jobIfNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly INamedValueInfo[] s_stepNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly INamedValueInfo[] s_stepInTemplateNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Inputs),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly IFunctionInfo[] s_stepConditionFunctions = new IFunctionInfo[]
{
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Always, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Cancelled, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Failure, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Success, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.HashFiles, 1, Byte.MaxValue),
};
} }
} }

View File

@@ -159,6 +159,31 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result; return result;
} }
public List<ActionStep> LoadCompositeSteps(
TemplateToken token)
{
var result = default(List<ActionStep>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(null, null, setMissingContext: false);
// TODO: we might want to to have a bool to prevent it from filling in with missing context w/ dummy variables
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsInTemplate, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToSteps(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public Dictionary<String, String> EvaluateStepEnvironment( public Dictionary<String, String> EvaluateStepEnvironment(
TemplateToken token, TemplateToken token,
DictionaryContextData contextData, DictionaryContextData contextData,
@@ -400,7 +425,8 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
private TemplateContext CreateContext( private TemplateContext CreateContext(
DictionaryContextData contextData, DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions, IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState = null) IEnumerable<KeyValuePair<String, Object>> expressionState = null,
bool setMissingContext = true)
{ {
var result = new TemplateContext var result = new TemplateContext
{ {
@@ -449,18 +475,21 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
// - Evaluating early when all referenced contexts are available, even though all allowed // - Evaluating early when all referenced contexts are available, even though all allowed
// contexts may not yet be available. For example, evaluating step display name can often // contexts may not yet be available. For example, evaluating step display name can often
// be performed early. // be performed early.
foreach (var name in s_expressionValueNames) if (setMissingContext)
{ {
if (!result.ExpressionValues.ContainsKey(name)) foreach (var name in s_expressionValueNames)
{ {
result.ExpressionValues[name] = null; if (!result.ExpressionValues.ContainsKey(name))
{
result.ExpressionValues[name] = null;
}
} }
} foreach (var name in s_expressionFunctionNames)
foreach (var name in s_expressionFunctionNames)
{
if (!functionNames.Contains(name))
{ {
result.ExpressionFunctions.Add(new FunctionInfo<NoOperation>(name, 0, Int32.MaxValue)); if (!functionNames.Contains(name))
{
result.ExpressionFunctions.Add(new FunctionInfo<NoOperation>(name, 0, Int32.MaxValue));
}
} }
} }

View File

@@ -94,5 +94,12 @@ namespace GitHub.DistributedTask.Pipelines
public static readonly String Resources = "resources"; public static readonly String Resources = "resources";
public static readonly String All = "all"; public static readonly String All = "all";
} }
public static class ScriptStepInputs
{
public static readonly String Script = "script";
public static readonly String WorkingDirectory = "workingDirectory";
public static readonly String Shell = "shell";
}
} }
} }

View File

@@ -0,0 +1,40 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionDownloadInfo
{
[DataMember(EmitDefaultValue = false)]
public ActionDownloadAuthentication Authentication { get; set; }
[DataMember(EmitDefaultValue = false)]
public string NameWithOwner { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ResolvedNameWithOwner { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ResolvedSha { get; set; }
[DataMember(EmitDefaultValue = false)]
public string TarballUrl { get; set; }
[DataMember(EmitDefaultValue = false)]
public string Ref { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ZipballUrl { get; set; }
}
[DataContract]
public class ActionDownloadAuthentication
{
[DataMember(EmitDefaultValue = false)]
public DateTime ExpiresAt { get; set; }
[DataMember(EmitDefaultValue = false)]
public string Token { get; set; }
}
}

View File

@@ -0,0 +1,16 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionDownloadInfoCollection
{
[DataMember]
public IDictionary<string, ActionDownloadInfo> Actions
{
get;
set;
}
}
}

View File

@@ -0,0 +1,22 @@
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionReference
{
[DataMember]
public string NameWithOwner
{
get;
set;
}
[DataMember]
public string Ref
{
get;
set;
}
}
}

View File

@@ -0,0 +1,16 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionReferenceList
{
[DataMember]
public IList<ActionReference> Actions
{
get;
set;
}
}
}

View File

@@ -119,6 +119,15 @@ namespace GitHub.Services.OAuth
} }
} }
public async Task<string> ValidateCredentialAsync(CancellationToken cancellationToken)
{
var tokenHttpClient = new VssOAuthTokenHttpClient(this.SignInUrl);
var tokenResponse = await tokenHttpClient.GetTokenAsync(this.Grant, this.ClientCredential, this.TokenParameters, cancellationToken);
// return the underlying authentication error
return tokenResponse.Error;
}
/// <summary> /// <summary>
/// Issues a token request to the configured secure token service. On success, the access token issued by the /// Issues a token request to the configured secure token service. On success, the access token issued by the
/// token service is returned to the caller /// token service is returned to the caller
@@ -131,7 +140,7 @@ namespace GitHub.Services.OAuth
CancellationToken cancellationToken) CancellationToken cancellationToken)
{ {
if (this.SignInUrl == null || if (this.SignInUrl == null ||
this.Grant == null || this.Grant == null ||
this.ClientCredential == null) this.ClientCredential == null)
{ {
return null; return null;

View File

@@ -39,7 +39,7 @@ namespace GitHub.Services.OAuth
/// <summary> /// <summary>
/// Gets or sets the error description for the response. /// Gets or sets the error description for the response.
/// </summary> /// </summary>
[DataMember(Name = "errordescription", EmitDefaultValue = false)] [DataMember(Name = "error_description", EmitDefaultValue = false)]
public String ErrorDescription public String ErrorDescription
{ {
get; get;

View File

@@ -85,6 +85,8 @@ namespace GitHub.Runner.Common.Tests
_hc.SecretMasker.AddValue("Pass word 123!"); _hc.SecretMasker.AddValue("Pass word 123!");
_hc.SecretMasker.AddValue("Pass<word>123!"); _hc.SecretMasker.AddValue("Pass<word>123!");
_hc.SecretMasker.AddValue("Pass'word'123!"); _hc.SecretMasker.AddValue("Pass'word'123!");
_hc.SecretMasker.AddValue("\"Password123!!\"");
_hc.SecretMasker.AddValue("\"short\"");
// Assert. // Assert.
Assert.Equal("123***123", _hc.SecretMasker.MaskSecrets("123Password123!123")); Assert.Equal("123***123", _hc.SecretMasker.MaskSecrets("123Password123!123"));
@@ -99,6 +101,9 @@ namespace GitHub.Runner.Common.Tests
Assert.Equal("YWJjOlBh***", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"abc:Password123!")))); Assert.Equal("YWJjOlBh***", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"abc:Password123!"))));
Assert.Equal("YWJjZDpQ***", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"abcd:Password123!")))); Assert.Equal("YWJjZDpQ***", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"abcd:Password123!"))));
Assert.Equal("YWJjZGU6***", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"abcde:Password123!")))); Assert.Equal("YWJjZGU6***", _hc.SecretMasker.MaskSecrets(Convert.ToBase64String(Encoding.UTF8.GetBytes($"abcde:Password123!"))));
Assert.Equal("123***123", _hc.SecretMasker.MaskSecrets("123Password123!!123"));
Assert.Equal("123short123", _hc.SecretMasker.MaskSecrets("123short123"));
Assert.Equal("123***123", _hc.SecretMasker.MaskSecrets("123\"short\"123"));
} }
finally finally
{ {

File diff suppressed because it is too large Load Diff

View File

@@ -16,7 +16,9 @@ namespace GitHub.Runner.Common.Tests
private static readonly List<string> SkippedFiles = new List<string>() private static readonly List<string> SkippedFiles = new List<string>()
{ {
"Runner.Common\\HostContext.cs", "Runner.Common\\HostContext.cs",
"Runner.Common/HostContext.cs" "Runner.Common/HostContext.cs",
"Runner.Common\\HttpClientHandlerFactory.cs",
"Runner.Common/HttpClientHandlerFactory.cs"
}; };
[Fact] [Fact]

View File

@@ -70,5 +70,24 @@ namespace GitHub.Runner.Common.Tests.Util
} }
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void WhichHandleFullyQualifiedPath()
{
using (TestHostContext hc = new TestHostContext(this))
{
//Arrange
Tracing trace = hc.GetTrace();
// Act.
var gitPath = WhichUtil.Which("git", require: true, trace: trace);
var gitPath2 = WhichUtil.Which(gitPath, require: true, trace: trace);
// Assert.
Assert.Equal(gitPath, gitPath2);
}
}
} }
} }

File diff suppressed because it is too large Load Diff

View File

@@ -759,6 +759,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Setup(x => x.Variables).Returns(new Variables(_hc, new Dictionary<string, VariableValue>())); _ec.Setup(x => x.Variables).Returns(new Variables(_hc, new Dictionary<string, VariableValue>()));
_ec.Setup(x => x.ExpressionValues).Returns(new DictionaryContextData()); _ec.Setup(x => x.ExpressionValues).Returns(new DictionaryContextData());
_ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>()); _ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
_ec.Setup(x => x.FileTable).Returns(new List<String>());
_ec.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { _hc.GetTrace().Info($"{tag}{message}"); }); _ec.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { _hc.GetTrace().Info($"{tag}{message}"); });
_ec.Setup(x => x.AddIssue(It.IsAny<Issue>(), It.IsAny<string>())).Callback((Issue issue, string message) => { _hc.GetTrace().Info($"[{issue.Type}]{issue.Message ?? message}"); }); _ec.Setup(x => x.AddIssue(It.IsAny<Issue>(), It.IsAny<string>())).Callback((Issue issue, string message) => { _hc.GetTrace().Info($"[{issue.Type}]{issue.Message ?? message}"); });
} }

View File

@@ -278,6 +278,59 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal("${{ matrix.node }}", _actionRunner.DisplayName); Assert.Equal("${{ matrix.node }}", _actionRunner.DisplayName);
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public async void WarnInvalidInputs()
{
//Arrange
Setup();
var actionId = Guid.NewGuid();
var actionInputs = new MappingToken(null, null, null);
actionInputs.Add(new StringToken(null, null, null, "input1"), new StringToken(null, null, null, "test1"));
actionInputs.Add(new StringToken(null, null, null, "input2"), new StringToken(null, null, null, "test2"));
actionInputs.Add(new StringToken(null, null, null, "invalid1"), new StringToken(null, null, null, "invalid1"));
actionInputs.Add(new StringToken(null, null, null, "invalid2"), new StringToken(null, null, null, "invalid2"));
var action = new Pipelines.ActionStep()
{
Name = "action",
Id = actionId,
Reference = new Pipelines.RepositoryPathReference()
{
Name = "actions/runner",
Ref = "v1"
},
Inputs = actionInputs
};
_actionRunner.Action = action;
Dictionary<string, string> finialInputs = new Dictionary<string, string>();
_handlerFactory.Setup(x => x.Create(It.IsAny<IExecutionContext>(), It.IsAny<ActionStepDefinitionReference>(), It.IsAny<IStepHost>(), It.IsAny<ActionExecutionData>(), It.IsAny<Dictionary<string, string>>(), It.IsAny<Dictionary<string, string>>(), It.IsAny<Variables>(), It.IsAny<string>()))
.Callback((IExecutionContext executionContext, Pipelines.ActionStepDefinitionReference actionReference, IStepHost stepHost, ActionExecutionData data, Dictionary<string, string> inputs, Dictionary<string, string> environment, Variables runtimeVariables, string taskDirectory) =>
{
finialInputs = inputs;
})
.Returns(new Mock<IHandler>().Object);
//Act
await _actionRunner.RunAsync();
foreach (var input in finialInputs)
{
_hc.GetTrace().Info($"Input: {input.Key}={input.Value}");
}
//Assert
Assert.Equal("test1", finialInputs["input1"]);
Assert.Equal("test2", finialInputs["input2"]);
Assert.Equal("github", finialInputs["input3"]);
Assert.Equal("invalid1", finialInputs["invalid1"]);
Assert.Equal("invalid2", finialInputs["invalid2"]);
_ec.Verify(x => x.AddIssue(It.Is<Issue>(s => s.Message.Contains("Unexpected input(s) 'invalid1', 'invalid2'")), It.IsAny<string>()), Times.Once);
}
private void Setup([CallerMemberName] string name = "") private void Setup([CallerMemberName] string name = "")
{ {
_ecTokenSource?.Dispose(); _ecTokenSource?.Dispose();
@@ -326,6 +379,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>()); _ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
_ec.Setup(x => x.IntraActionState).Returns(new Dictionary<string, string>()); _ec.Setup(x => x.IntraActionState).Returns(new Dictionary<string, string>());
_ec.Setup(x => x.EnvironmentVariables).Returns(new Dictionary<string, string>()); _ec.Setup(x => x.EnvironmentVariables).Returns(new Dictionary<string, string>());
_ec.Setup(x => x.FileTable).Returns(new List<String>());
_ec.Setup(x => x.SetGitHubContext(It.IsAny<string>(), It.IsAny<string>())); _ec.Setup(x => x.SetGitHubContext(It.IsAny<string>(), It.IsAny<string>()));
_ec.Setup(x => x.GetGitHubContext(It.IsAny<string>())).Returns("{\"foo\":\"bar\"}"); _ec.Setup(x => x.GetGitHubContext(It.IsAny<string>())).Returns("{\"foo\":\"bar\"}");
_ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token); _ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token);

View File

@@ -1,4 +1,5 @@
using GitHub.DistributedTask.WebApi; using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Worker; using GitHub.Runner.Worker;
using Moq; using Moq;
using System; using System;
@@ -323,6 +324,60 @@ namespace GitHub.Runner.Common.Tests.Worker
} }
} }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void ActionResult_Lowercase()
{
using (TestHostContext hc = CreateTestContext())
{
TaskOrchestrationPlanReference plan = new TaskOrchestrationPlanReference();
TimelineReference timeline = new TimelineReference();
Guid jobId = Guid.NewGuid();
string jobName = "some job name";
var jobRequest = new Pipelines.AgentJobRequestMessage(plan, timeline, jobId, jobName, jobName, null, null, null, new Dictionary<string, VariableValue>(), new List<MaskHint>(), new Pipelines.JobResources(), new Pipelines.ContextData.DictionaryContextData(), new Pipelines.WorkspaceOptions(), new List<Pipelines.ActionStep>(), null, null, null, null);
jobRequest.Resources.Repositories.Add(new Pipelines.RepositoryResource()
{
Alias = Pipelines.PipelineConstants.SelfAlias,
Id = "github",
Version = "sha1"
});
jobRequest.ContextData["github"] = new Pipelines.ContextData.DictionaryContextData();
jobRequest.Variables["ACTIONS_STEP_DEBUG"] = "true";
// Arrange: Setup the paging logger.
var pagingLogger1 = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
hc.EnqueueInstance(pagingLogger1.Object);
hc.SetSingleton(jobServerQueue.Object);
var jobContext = new Runner.Worker.ExecutionContext();
jobContext.Initialize(hc);
// Act.
jobContext.InitializeJob(jobRequest, CancellationToken.None);
jobContext.StepsContext.SetConclusion(null, "step1", ActionResult.Success);
var conclusion1 = (jobContext.StepsContext.GetScope(null)["step1"] as DictionaryContextData)["conclusion"].ToString();
Assert.Equal(conclusion1, conclusion1.ToLowerInvariant());
jobContext.StepsContext.SetOutcome(null, "step2", ActionResult.Cancelled);
var outcome1 = (jobContext.StepsContext.GetScope(null)["step2"] as DictionaryContextData)["outcome"].ToString();
Assert.Equal(outcome1, outcome1.ToLowerInvariant());
jobContext.StepsContext.SetConclusion(null, "step3", ActionResult.Failure);
var conclusion2 = (jobContext.StepsContext.GetScope(null)["step3"] as DictionaryContextData)["conclusion"].ToString();
Assert.Equal(conclusion2, conclusion2.ToLowerInvariant());
jobContext.StepsContext.SetOutcome(null, "step4", ActionResult.Skipped);
var outcome2 = (jobContext.StepsContext.GetScope(null)["step4"] as DictionaryContextData)["outcome"].ToString();
Assert.Equal(outcome2, outcome2.ToLowerInvariant());
jobContext.JobContext.Status = ActionResult.Success;
Assert.Equal(jobContext.JobContext["status"].ToString(), jobContext.JobContext["status"].ToString().ToLowerInvariant());
}
}
private TestHostContext CreateTestContext([CallerMemberName] String testName = "") private TestHostContext CreateTestContext([CallerMemberName] String testName = "")
{ {
var hc = new TestHostContext(this, testName); var hc = new TestHostContext(this, testName);

View File

@@ -686,14 +686,17 @@ namespace GitHub.Runner.Common.Tests.Worker
// <WORKSPACE>/workflow-repo/nested-other-repo // <WORKSPACE>/workflow-repo/nested-other-repo
// <WORKSPACE>/other-repo // <WORKSPACE>/other-repo
// <WORKSPACE>/other-repo/nested-workflow-repo // <WORKSPACE>/other-repo/nested-workflow-repo
// <WORKSPACE>/workflow-repo-using-ssh
var workflowRepository = Path.Combine(workspaceDirectory, "workflow-repo"); var workflowRepository = Path.Combine(workspaceDirectory, "workflow-repo");
var nestedOtherRepository = Path.Combine(workspaceDirectory, "workflow-repo", "nested-other-repo"); var nestedOtherRepository = Path.Combine(workspaceDirectory, "workflow-repo", "nested-other-repo");
var otherRepository = Path.Combine(workspaceDirectory, workflowRepository, "nested-other-repo"); var otherRepository = Path.Combine(workspaceDirectory, workflowRepository, "nested-other-repo");
var nestedWorkflowRepository = Path.Combine(workspaceDirectory, "other-repo", "nested-workflow-repo"); var nestedWorkflowRepository = Path.Combine(workspaceDirectory, "other-repo", "nested-workflow-repo");
var workflowRepositoryUsingSsh = Path.Combine(workspaceDirectory, "workflow-repo-using-ssh");
await CreateRepository(hostContext, workflowRepository, "https://github.com/my-org/workflow-repo"); await CreateRepository(hostContext, workflowRepository, "https://github.com/my-org/workflow-repo");
await CreateRepository(hostContext, nestedOtherRepository, "https://github.com/my-org/other-repo"); await CreateRepository(hostContext, nestedOtherRepository, "https://github.com/my-org/other-repo");
await CreateRepository(hostContext, otherRepository, "https://github.com/my-org/other-repo"); await CreateRepository(hostContext, otherRepository, "https://github.com/my-org/other-repo");
await CreateRepository(hostContext, nestedWorkflowRepository, "https://github.com/my-org/workflow-repo"); await CreateRepository(hostContext, nestedWorkflowRepository, "https://github.com/my-org/workflow-repo");
await CreateRepository(hostContext, workflowRepositoryUsingSsh, "git@github.com:my-org/workflow-repo.git");
// Create test files // Create test files
var file_noRepository = Path.Combine(workspaceDirectory, "no-repo.txt"); var file_noRepository = Path.Combine(workspaceDirectory, "no-repo.txt");
@@ -703,7 +706,8 @@ namespace GitHub.Runner.Common.Tests.Worker
var file_nestedOtherRepository = Path.Combine(nestedOtherRepository, "nested-other-repo"); var file_nestedOtherRepository = Path.Combine(nestedOtherRepository, "nested-other-repo");
var file_otherRepository = Path.Combine(otherRepository, "other-repo.txt"); var file_otherRepository = Path.Combine(otherRepository, "other-repo.txt");
var file_nestedWorkflowRepository = Path.Combine(nestedWorkflowRepository, "nested-workflow-repo.txt"); var file_nestedWorkflowRepository = Path.Combine(nestedWorkflowRepository, "nested-workflow-repo.txt");
foreach (var file in new[] { file_noRepository, file_workflowRepository, file_workflowRepository_nestedDirectory, file_workflowRepository_failsafe, file_nestedOtherRepository, file_otherRepository, file_nestedWorkflowRepository }) var file_workflowRepositoryUsingSsh = Path.Combine(workflowRepositoryUsingSsh, "workflow-repo-using-ssh.txt");
foreach (var file in new[] { file_noRepository, file_workflowRepository, file_workflowRepository_nestedDirectory, file_workflowRepository_failsafe, file_nestedOtherRepository, file_otherRepository, file_nestedWorkflowRepository, file_workflowRepositoryUsingSsh })
{ {
Directory.CreateDirectory(Path.GetDirectoryName(file)); Directory.CreateDirectory(Path.GetDirectoryName(file));
File.WriteAllText(file, ""); File.WriteAllText(file, "");
@@ -718,8 +722,9 @@ namespace GitHub.Runner.Common.Tests.Worker
Process($"{file_nestedOtherRepository}: some error 6"); Process($"{file_nestedOtherRepository}: some error 6");
Process($"{file_otherRepository}: some error 7"); Process($"{file_otherRepository}: some error 7");
Process($"{file_nestedWorkflowRepository}: some error 8"); Process($"{file_nestedWorkflowRepository}: some error 8");
Process($"{file_workflowRepositoryUsingSsh}: some error 9");
Assert.Equal(8, _issues.Count); Assert.Equal(9, _issues.Count);
Assert.Equal("some error 1", _issues[0].Item1.Message); Assert.Equal("some error 1", _issues[0].Item1.Message);
Assert.False(_issues[0].Item1.Data.ContainsKey("file")); Assert.False(_issues[0].Item1.Data.ContainsKey("file"));
@@ -744,6 +749,9 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal("some error 8", _issues[7].Item1.Message); Assert.Equal("some error 8", _issues[7].Item1.Message);
Assert.Equal(file_nestedWorkflowRepository.Substring(nestedWorkflowRepository.Length + 1).Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar), _issues[7].Item1.Data["file"]); Assert.Equal(file_nestedWorkflowRepository.Substring(nestedWorkflowRepository.Length + 1).Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar), _issues[7].Item1.Data["file"]);
Assert.Equal("some error 9", _issues[8].Item1.Message);
Assert.Equal(file_workflowRepositoryUsingSsh.Substring(workflowRepositoryUsingSsh.Length + 1).Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar), _issues[8].Item1.Data["file"]);
} }
Environment.SetEnvironmentVariable("RUNNER_TEST_GET_REPOSITORY_PATH_FAILSAFE", ""); Environment.SetEnvironmentVariable("RUNNER_TEST_GET_REPOSITORY_PATH_FAILSAFE", "");

View File

@@ -80,7 +80,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{ {
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList())); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -115,7 +115,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{ {
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList())); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -154,7 +154,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{ {
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Steps.Select(x => x.Object).ToList())); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -208,7 +208,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{ {
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Steps.Select(x => x.Object).ToList())); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -287,7 +287,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{ {
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Steps.Select(x => x.Object).ToList())); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -330,7 +330,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{ {
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Step.Select(x => x.Object).ToList())); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Step.Select(x => x.Object).ToList()));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -361,7 +361,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{ {
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList())); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -391,7 +391,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{ {
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList())); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -417,7 +417,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object })); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object }));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -426,11 +426,11 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded); Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded);
#if OS_WINDOWS #if OS_WINDOWS
Assert.Equal("100", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("100")); Assert.Equal("100", step1.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("100"));
Assert.Equal("github_actions", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env2"].AssertString("github_actions")); Assert.Equal("github_actions", step1.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env2"].AssertString("github_actions"));
#else #else
Assert.Equal("100", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("100")); Assert.Equal("100", step1.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("100"));
Assert.Equal("github_actions", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env2"].AssertString("github_actions")); Assert.Equal("github_actions", step1.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env2"].AssertString("github_actions"));
#endif #endif
} }
} }
@@ -455,7 +455,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object })); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object }));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -463,13 +463,13 @@ namespace GitHub.Runner.Common.Tests.Worker
// Assert. // Assert.
Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded); Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded);
#if OS_WINDOWS #if OS_WINDOWS
Assert.Equal("1000", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("1000")); Assert.Equal("1000", step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("github_actions", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env3"].AssertString("github_actions")); Assert.Equal("github_actions", step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env3"].AssertString("github_actions"));
Assert.False(_ec.Object.ExpressionValues["env"].AssertDictionary("env").ContainsKey("env2")); Assert.False(step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env").ContainsKey("env2"));
#else #else
Assert.Equal("1000", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("1000")); Assert.Equal("1000", step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("github_actions", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env3"].AssertString("github_actions")); Assert.Equal("github_actions", step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env3"].AssertString("github_actions"));
Assert.False(_ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env").ContainsKey("env2")); Assert.False(step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env").ContainsKey("env2"));
#endif #endif
} }
} }
@@ -493,7 +493,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object })); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object }));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -501,11 +501,11 @@ namespace GitHub.Runner.Common.Tests.Worker
// Assert. // Assert.
Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded); Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded);
#if OS_WINDOWS #if OS_WINDOWS
Assert.Equal("1000", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("1000")); Assert.Equal("1000", step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("something", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env2"].AssertString("something")); Assert.Equal("something", step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env2"].AssertString("something"));
#else #else
Assert.Equal("1000", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("1000")); Assert.Equal("1000", step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("something", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env2"].AssertString("something")); Assert.Equal("something", step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env2"].AssertString("something"));
#endif #endif
} }
} }
@@ -524,7 +524,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object, step3.Object })); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -536,12 +536,12 @@ namespace GitHub.Runner.Common.Tests.Worker
step2.Verify(x => x.RunAsync(), Times.Once); step2.Verify(x => x.RunAsync(), Times.Once);
step3.Verify(x => x.RunAsync(), Times.Once); step3.Verify(x => x.RunAsync(), Times.Once);
Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString(), _stepContext.GetScope(null)["step1"].AssertDictionary("")["outcome"].AssertString("")); Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString().ToLowerInvariant(), _stepContext.GetScope(null)["step1"].AssertDictionary("")["outcome"].AssertString(""));
Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString(), _stepContext.GetScope(null)["step1"].AssertDictionary("")["conclusion"].AssertString("")); Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString().ToLowerInvariant(), _stepContext.GetScope(null)["step1"].AssertDictionary("")["conclusion"].AssertString(""));
Assert.Equal(TaskResult.Failed.ToActionResult().ToString(), _stepContext.GetScope(null)["step2"].AssertDictionary("")["outcome"].AssertString("")); Assert.Equal(TaskResult.Failed.ToActionResult().ToString().ToLowerInvariant(), _stepContext.GetScope(null)["step2"].AssertDictionary("")["outcome"].AssertString(""));
Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString(), _stepContext.GetScope(null)["step2"].AssertDictionary("")["conclusion"].AssertString("")); Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString().ToLowerInvariant(), _stepContext.GetScope(null)["step2"].AssertDictionary("")["conclusion"].AssertString(""));
Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString(), _stepContext.GetScope(null)["step3"].AssertDictionary("")["outcome"].AssertString("")); Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString().ToLowerInvariant(), _stepContext.GetScope(null)["step3"].AssertDictionary("")["outcome"].AssertString(""));
Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString(), _stepContext.GetScope(null)["step3"].AssertDictionary("")["conclusion"].AssertString("")); Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString().ToLowerInvariant(), _stepContext.GetScope(null)["step3"].AssertDictionary("")["conclusion"].AssertString(""));
} }
} }
@@ -560,7 +560,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null; _ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object, step3.Object })); _ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
// Act. // Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object); await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -572,12 +572,12 @@ namespace GitHub.Runner.Common.Tests.Worker
step2.Verify(x => x.RunAsync(), Times.Once); step2.Verify(x => x.RunAsync(), Times.Once);
step3.Verify(x => x.RunAsync(), Times.Once); step3.Verify(x => x.RunAsync(), Times.Once);
Assert.Equal(TaskResult.Skipped.ToActionResult().ToString(), _stepContext.GetScope(null)["step1"].AssertDictionary("")["outcome"].AssertString("")); Assert.Equal(TaskResult.Skipped.ToActionResult().ToString().ToLowerInvariant(), _stepContext.GetScope(null)["step1"].AssertDictionary("")["outcome"].AssertString(""));
Assert.Equal(TaskResult.Skipped.ToActionResult().ToString(), _stepContext.GetScope(null)["step1"].AssertDictionary("")["conclusion"].AssertString("")); Assert.Equal(TaskResult.Skipped.ToActionResult().ToString().ToLowerInvariant(), _stepContext.GetScope(null)["step1"].AssertDictionary("")["conclusion"].AssertString(""));
Assert.Equal(TaskResult.Failed.ToActionResult().ToString(), _stepContext.GetScope(null)["step2"].AssertDictionary("")["outcome"].AssertString("")); Assert.Equal(TaskResult.Failed.ToActionResult().ToString().ToLowerInvariant(), _stepContext.GetScope(null)["step2"].AssertDictionary("")["outcome"].AssertString(""));
Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString(), _stepContext.GetScope(null)["step2"].AssertDictionary("")["conclusion"].AssertString("")); Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString().ToLowerInvariant(), _stepContext.GetScope(null)["step2"].AssertDictionary("")["conclusion"].AssertString(""));
Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString(), _stepContext.GetScope(null)["step3"].AssertDictionary("")["outcome"].AssertString("")); Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString().ToLowerInvariant(), _stepContext.GetScope(null)["step3"].AssertDictionary("")["outcome"].AssertString(""));
Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString(), _stepContext.GetScope(null)["step3"].AssertDictionary("")["conclusion"].AssertString("")); Assert.Equal(TaskResult.Succeeded.ToActionResult().ToString().ToLowerInvariant(), _stepContext.GetScope(null)["step3"].AssertDictionary("")["conclusion"].AssertString(""));
} }
} }
@@ -602,7 +602,7 @@ namespace GitHub.Runner.Common.Tests.Worker
stepContext.Setup(x => x.WriteDebug).Returns(true); stepContext.Setup(x => x.WriteDebug).Returns(true);
stepContext.Setup(x => x.Variables).Returns(_variables); stepContext.Setup(x => x.Variables).Returns(_variables);
stepContext.Setup(x => x.EnvironmentVariables).Returns(_env); stepContext.Setup(x => x.EnvironmentVariables).Returns(_env);
stepContext.Setup(x => x.ExpressionValues).Returns(_contexts); stepContext.Setup(x => x.ExpressionValues).Returns(new DictionaryContextData());
stepContext.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>()); stepContext.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
stepContext.Setup(x => x.JobContext).Returns(_jobContext); stepContext.Setup(x => x.JobContext).Returns(_jobContext);
stepContext.Setup(x => x.StepsContext).Returns(_stepContext); stepContext.Setup(x => x.StepsContext).Returns(_stepContext);
@@ -615,8 +615,8 @@ namespace GitHub.Runner.Common.Tests.Worker
stepContext.Object.Result = r; stepContext.Object.Result = r;
} }
_stepContext.SetOutcome("", stepContext.Object.ContextName, (stepContext.Object.Outcome ?? stepContext.Object.Result ?? TaskResult.Succeeded).ToActionResult().ToString()); _stepContext.SetOutcome("", stepContext.Object.ContextName, (stepContext.Object.Outcome ?? stepContext.Object.Result ?? TaskResult.Succeeded).ToActionResult());
_stepContext.SetConclusion("", stepContext.Object.ContextName, (stepContext.Object.Result ?? TaskResult.Succeeded).ToActionResult().ToString()); _stepContext.SetConclusion("", stepContext.Object.ContextName, (stepContext.Object.Result ?? TaskResult.Succeeded).ToActionResult());
}); });
var trace = hc.GetTrace(); var trace = hc.GetTrace();
stepContext.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { trace.Info($"[{tag}]{message}"); }); stepContext.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { trace.Info($"[{tag}]{message}"); });

View File

@@ -1,4 +1,4 @@
#!/bin/bash #!/usr/bin/env bash
############################################################################### ###############################################################################
# #

View File

@@ -1 +1 @@
2.169.0 2.267.0