Compare commits

..

29 Commits

Author SHA1 Message Date
Marek Mahut
dcda342ecc use /usr/bin/env to find bash in scripts (#314) 2021-04-22 17:45:33 -04:00
Tingluo Huang
a711bd9494 add workflow_dispatch 2020-07-28 14:52:38 -04:00
Ethan Chiu
5e0cde8649 Composite Actions UI (#578)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Initial Start for FileTable stuff

* Progress towards passing FileTable or FileID or FileName

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Pass fileID to all children token of root action token

* Change confusing term context => templateContext, Eliminate _fileTable and only use ExecutionContext.FileTable + update this table when need be

* Remove unnessary FileID attribute from CompositeActionExecutionData

* Clean up file path for error message

* Remove todo

* Initial start/framework for output handling

* Outline different class vs Handler approach

* Remove InitializeScope

* Remove InitializeScope

* Fix Workflow Step Env overiding Parent Env

* First Approach for Attaching ID + Group ID to each Composite Action Step

* Add GroupID to the ActionDefinitionData

* starting foundation for handling clean up outputs step

* Pass outputs data to each composite action step to enable set-output functionality

* Create ScopeName for whole composite action.

This will enable us to add to the StepsContext[ScopeName] for the composite action which will allow us to use all these outputs in the cleanup step

* Hook up composite output step to handler => tmmrw implement composite output handler

* Add post composite action step to cleanup outputs => triggers composite output cleanup handler

* Fix Outputs Token handling start. Add individual step scope names.

* Set up Scope Name and Context Name naming system{

* Figured out how to pass Parent Execution Context to clean up step

* Figured out how to pass Parent Execution Context and scope names to
clean up step

* Add GetOutput function for StepsContext

* Generate child scope name correctly if parent scope name is null

* Simplify InitializeScope()

* Outputs are set correctly and able to get all final outputs in handler

* Parse through Action Outputs

* Fix null ScopeName + ContextName in CompositeOutputHandler

* Shift over handling of Action Outputs to output handler

* First attempt to fix null retrievals for output variables

* Basic Support for Outputs Done.

* Clean up pt.1

* Refactor outputs to avoid using Action Reference

* Clean up code

* Clean up part 2

* Add clarifying comments for the output handler

* Remove TODO

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Add file length check

* Clean up

* Fix logging issue

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error

* Fix period

* Sanity check for fileTable add + remove unn. code

* revert back

* Add back line break

* Fix null errors

* Address situation if FileTable is null + add sanity check for adding file to fileTable

* add line

* Revert

* Fix unit tests to instantiate a FileTable

* Fix logic for trimming manifestfile path

* Add null check

* Revert

* Revert

* revert

* spacing

* Add filetable to testing file, remove ? since we know filetable should never be non null

* Fix Throw logic

* Clarify template outputs token

* Add another type support for outputs to avoid container unit tests errors

* Add mapping for parity

* Build support for new outputs format

* Build support for new outputs format

* Refactor to avoid duplication of action yaml for workflow yaml

* revert

* revert

* revert

* spacing
2020-07-13 17:55:15 -04:00
Ethan Chiu
cb2b323781 Composite Run Steps Outputs (#568)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Initial Start for FileTable stuff

* Progress towards passing FileTable or FileID or FileName

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Pass fileID to all children token of root action token

* Change confusing term context => templateContext, Eliminate _fileTable and only use ExecutionContext.FileTable + update this table when need be

* Remove unnessary FileID attribute from CompositeActionExecutionData

* Clean up file path for error message

* Remove todo

* Initial start/framework for output handling

* Outline different class vs Handler approach

* Remove InitializeScope

* Remove InitializeScope

* Fix Workflow Step Env overiding Parent Env

* First Approach for Attaching ID + Group ID to each Composite Action Step

* Add GroupID to the ActionDefinitionData

* starting foundation for handling clean up outputs step

* Pass outputs data to each composite action step to enable set-output functionality

* Create ScopeName for whole composite action.

This will enable us to add to the StepsContext[ScopeName] for the composite action which will allow us to use all these outputs in the cleanup step

* Hook up composite output step to handler => tmmrw implement composite output handler

* Add post composite action step to cleanup outputs => triggers composite output cleanup handler

* Fix Outputs Token handling start. Add individual step scope names.

* Set up Scope Name and Context Name naming system{

* Figured out how to pass Parent Execution Context to clean up step

* Figured out how to pass Parent Execution Context and scope names to
clean up step

* Add GetOutput function for StepsContext

* Generate child scope name correctly if parent scope name is null

* Simplify InitializeScope()

* Outputs are set correctly and able to get all final outputs in handler

* Parse through Action Outputs

* Fix null ScopeName + ContextName in CompositeOutputHandler

* Shift over handling of Action Outputs to output handler

* First attempt to fix null retrievals for output variables

* Basic Support for Outputs Done.

* Clean up pt.1

* Refactor outputs to avoid using Action Reference

* Clean up code

* Clean up part 2

* Add clarifying comments for the output handler

* Remove TODO

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Add file length check

* Clean up

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error

* Fix period

* Sanity check for fileTable add + remove unn. code

* revert back

* Add back line break

* Fix null errors

* Address situation if FileTable is null + add sanity check for adding file to fileTable

* add line

* Revert

* Fix unit tests to instantiate a FileTable

* Fix logic for trimming manifestfile path

* Add null check

* Revert

* Revert

* revert

* spacing

* Add filetable to testing file, remove ? since we know filetable should never be non null

* Fix Throw logic

* Clarify template outputs token

* Add another type support for outputs to avoid container unit tests errors

* Add mapping for parity

* Build support for new outputs format

* Refactor to avoid duplication of action yaml for workflow yaml

* Move SDK work in ActionManifestManager, Condense Code

* Defer runs evaluation till after for loop to ensure order doesn't matter

* Fix logic error in setting scope and context names

* Add Regex + Add Child Context name null resolution

* move private function to bottom of class
2020-07-13 17:23:19 -04:00
Ethan Chiu
6c3958f365 Composite Run Steps ADR (#554)
* start

* Inputs + Outputs

* Clarify docs

* Finish Environment

* Add if condition

* Clarify language

* Update 0549-composite-run-steps.md

* timeout-minutes

* Finish

* add relevant example

* Fix syntax

* fix env example

* fix yaml syntax

* Update 0549-composite-run-steps.md

* Update file names, add more relevant example if condition

* Add note to continue-on-error

* Apply changes to If Condition

* bolding

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Syntax support + spacing

* Add guiding principles.

* Update 0549-composite-run-steps.md

* Reverse order.

* Update 0549-composite-run-steps.md

* change from job to step

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Add Secrets

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Fix output example

* Fix output example

* Fix action examples to use using.

* fix output variable name

* update workingDir + env

* Defaults + continue-on-error

* Update Outputs Section

* Eliminate Env

* Secrets

* Update timeout-minutes

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md

* Fix example.

* Remove TODOs

* Update 0549-composite-run-steps.md

* Update 0549-composite-run-steps.md
2020-07-13 12:30:31 -04:00
Ethan Chiu
9d7bd4706b Improve Error Messaging for Actions by Using ExecutionContext's FileTable as Single Source of Truth and by Passing FileID to All Children Tokens. (#564)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Initial Start for FileTable stuff

* Progress towards passing FileTable or FileID or FileName

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Pass fileID to all children token of root action token

* Change confusing term context => templateContext, Eliminate _fileTable and only use ExecutionContext.FileTable + update this table when need be

* Remove unnessary FileID attribute from CompositeActionExecutionData

* Clean up file path for error message

* Remove todo

* Fix Workflow Step Env overiding Parent Env

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Add file length check

* Clean up

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error

* Fix period

* Sanity check for fileTable add + remove unn. code

* revert back

* Add back line break

* Fix null errors

* Address situation if FileTable is null + add sanity check for adding file to fileTable

* add line

* Revert

* Fix unit tests to instantiate a FileTable

* Fix logic for trimming manifestfile path

* Add null check

* Add filetable to testing file, remove ? since we know filetable should never be non null
2020-07-08 17:15:16 -04:00
Ethan Chiu
5822a38c39 Add bash command for running custom runner (#569) 2020-07-08 11:20:38 -04:00
Ethan Chiu
d42c9da2d7 Composite Actions: Support Env Flow (#557)
* Composite Action Run Steps

* Env Flow => Able to get env variables and overwrite current env variables => but it doesn't 'stick'

* clean up

* Clean up trace messages + add Trace debug in ActionManager

* Add debugging message

* Optimize runtime of code

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Fix env format

* Remove comment

* Remove TODO message for context

* Add verbose trace logs which are only viewable by devs

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps

* Fix Workflow Step Env overiding Parent Env

* Remove env in composite action scope

* Clean up

* Revert back

* revert back

* add back envToken

* Remove unnecessary code

* Figure out how to handle set-env edge cases

* formatting

* fix unit tests

* Fix windows unit test syntax error
2020-07-08 10:16:51 -04:00
eric sciple
121deedeb5 Fix trailing '.0' for Int64 values (#572) 2020-06-30 17:25:47 -04:00
Ethan Chiu
a0942ed345 Composite Actions Support for Multiple Run Steps (#549)
* Composite Action Run Steps

* Clean up trace messages + add Trace debug in ActionManager

* Change String to string

* Add comma to Composite

* Change JobSteps to a List, Change Register Step function name

* Add TODO, remove unn. content

* Remove unnecessary code

* Fix unit tests

* Add verbose trace logs which are only viewable by devs

* Sort usings in Composite Action Handler

* Change 0 to location

* Update context variables in composite action yaml

* Add helpful error message for null steps
2020-06-23 15:35:32 -04:00
TingluoHuang
7cef9a27ca release 2.267.0 runner. 2020-06-23 14:05:28 -04:00
Tingluo Huang
df7e16954e print runner and machine name to log. (#539) 2020-06-23 13:57:37 -04:00
eric sciple
4e7d27a53c remove temporary logic when resolving action download info (#550) 2020-06-15 13:13:47 -04:00
Lokesh Gopu
89d1418e48 Update exception message (#540) 2020-06-11 17:25:50 -04:00
Tingluo Huang
e728b8594d fix race condition. (#538) 2020-06-11 16:17:24 -04:00
Tingluo Huang
de4490d06d Restore SELinux context on service file when SELinux is enabled (#525) 2020-06-11 15:40:09 -04:00
Tingluo Huang
2e800f857e Skip search $PATH on command with fully qualified path (#526) 2020-06-11 13:52:42 -04:00
Tingluo Huang
312c7668a8 Fix DataContract with Token service (#532) 2020-06-11 12:11:35 -04:00
Tingluo Huang
eaf39bb058 add libicu66 for Ubuntu 20.04 (#535) 2020-06-11 12:11:13 -04:00
eric sciple
5815819f24 Resolve action download info (#515) 2020-06-09 08:53:28 -04:00
Ethan Chiu
1aea046932 Add substep for developer flow for clarity (#523) 2020-06-08 10:47:58 -04:00
Ethan Chiu
eda463601c Update Links and Language to Git + VSCode (#522) 2020-06-08 10:19:17 -04:00
Nick Fields
f994ae0542 Reduce input validation warnings (#506)
* Only raise a single warning for unexpected inputs

* Update invalid input test to raise single warning
2020-06-05 23:09:14 -04:00
Tingluo Huang
3c5aef791c Fix null ref exception in SecretMasker caused by hashfiles timeout. (#516) 2020-06-05 23:02:10 -04:00
Tingluo Huang
c4626d0c3a Remove SPS/Token migration code. Remove GHES url manipulate code. (#513)
* Remove SPS/Token migration code. Remove GHES url manipulate code.

* feedback.
2020-06-03 23:24:53 -04:00
eric sciple
416a7ac4b8 prepare to switch to service resolves archive download info (#508) 2020-06-02 17:21:50 -04:00
TingluoHuang
11435857e4 prepare 2.263.0 runner release. 2020-05-21 15:49:10 -04:00
Tingluo Huang
6f260012a3 Fix inputs validation warning, fix post step display name, fix worker crash due to error in step.env (#490) 2020-05-21 11:09:50 -04:00
eric sciple
4fc87ddfc6 fix problem matcher for GHES (#488) 2020-05-19 16:15:03 -04:00
62 changed files with 3908 additions and 2079 deletions

View File

@@ -1,6 +1,7 @@
name: Runner CI
on:
workflow_dispatch:
push:
branches:
- master

View File

@@ -0,0 +1,275 @@
# ADR 054x: Composite Run Steps
**Date**: 2020-06-17
**Status**: Proposed
**Relevant PR**: https://github.com/actions/runner/pull/549
## Context
Customers want to be able to compose actions from actions (ex: https://github.com/actions/runner/issues/438)
An important step towards meeting this goal is to build in functionality for actions where users can simply execute any number of steps.
## Guiding Principles
We don't want the workflow author to need to know how the internal workings of the action work. Users shouldn't know the internal workings of the composite action (for example, `default.shell` and `default.workingDir` should not be inherited from the workflow file to the action file). When deciding how to design certain parts of composite run steps, we want to think one logical step from the consumer.
A composite action is treated as **one** individual job step (aka encapsulation).
## Decision
**In this ADR, we only support running multiple run steps in an Action.** In doing so, we build in support for mapping and flowing the inputs, outputs, and env variables (ex: All nested steps should have access to its parents' input variables and nested steps can overwrite the input variables).
## Steps
Example `workflow.yml`
```yaml
jobs:
build:
runs-on: self-hosted
steps:
- id: step1
uses: actions/setup-python@v1
- id: step2
uses: actions/setup-node@v2
- uses: actions/checkout@v2
- uses: user/composite@v1
- name: workflow step 1
run: echo hello world 3
- name: workflow step 2
run: echo hello world 4
```
Example `user/composite/action.yml`
```yaml
runs:
using: "composite"
steps:
- run: pip install -r requirements.txt
- run: npm install
```
Example Output
```yaml
[npm installation output]
[pip requirements output]
echo hello world 3
echo hello world 4
```
We add a token called "composite" which allows our Runner code to process composite actions. By invoking "using: composite", our Runner code then processes the "steps" attribute, converts this template code to a list of steps, and finally runs each run step sequentially. If any step fails and there are no `if` conditions defined, the whole composite action job fails.
## Inputs
Example `workflow.yml`:
```yaml
steps:
- id: foo
uses: user/composite@v1
with:
your_name: "Octocat"
```
Example `user/composite/action.yml`:
```yaml
inputs:
your_name:
description: 'Your name'
default: 'Ethan'
runs:
using: "composite"
steps:
- run: echo hello ${{ inputs.your_name }}
```
Example Output:
```
hello Octocat
```
Each input variable in the composite action is only viewable in its own scope.
## Outputs
Example `workflow.yml`:
```yaml
...
steps:
- id: foo
uses: user/composite@v1
- run: echo random-number ${{ steps.foo.outputs.random-number }}
```
Example `user/composite/action.yml`:
```yaml
outputs:
random-number:
description: "Random number"
value: ${{ steps.random-number-generator.outputs.random-id }}
runs:
using: "composite"
steps:
- id: random-number-generator
run: echo "::set-output name=random-number::$(echo $RANDOM)"
```
Example Output:
```
::set-output name=my-output::43243
random-number 43243
```
Each of the output variables from the composite action is viewable from the workflow file that uses the composite action. In other words, every child action output(s) is viewable only by its parent using dot notation (ex `steps.foo.outputs.random-number`).
Moreover, the output ids are only accessible within the scope where it was defined. Note that in the example above, in our `workflow.yml` file, it should not have access to output id (i.e. `random-id`). The reason why we are doing this is because we don't want to require the workflow author to know the internal workings of the composite action.
## Context
Similar to the workflow file, the composite action has access to the [same context objects](https://help.github.com/en/actions/reference/context-and-expression-syntax-for-github-actions#contexts) (ex: `github`, `env`, `strategy`).
## Environment
In the Composite Action, you'll only be able to use `::set-env::` to set environment variables just like you could with other actions.
## Secrets
**Note** : This feature will be focused on in a future ADR.
We'll pass the secrets from the composite action's parents (ex: the workflow file) to the composite action. Secrets can be created in the composite action with the secrets context. In the actions yaml, we'll automatically mask the secret.
## If Condition
Example `workflow.yml`:
```yaml
steps:
- run: exit 1
- uses: user/composite@v1 # <--- this will run, as it's marked as always runing
if: always()
```
Example `user/composite/action.yml`:
```yaml
runs:
using: "composite"
steps:
- run: echo "just succeeding"
- run: echo "I will run, as my current scope is succeeding"
if: success()
- run: exit 1
- run: echo "I will not run, as my current scope is now failing"
```
See the paragraph below for a rudimentary approach (thank you to @cybojenix for the idea, example, and explanation for this approach):
The `if` statement in the parent (in the example above, this is the `workflow.yml`) shows whether or not we should run the composite action. So, our composite action will run since the `if` condition for running the composite action is `always()`.
**Note that the if condition on the parent does not propogate to the rest of its children though.**
In the child action (in this example, this is the `action.yml`), it starts with a clean slate (in other words, no imposing if conditions). Similar to the logic in the paragraph above, `echo "I will run, as my current scope is succeeding"` will run since the `if` condition checks if the previous steps **within this composite action** has not failed. `run: echo "I will not run, as my current scope is now failing"` will not run since the previous step resulted in an error and by default, the if expression is set to `success()` if the if condition is not set for a step.
What if a step has `cancelled()`? We do the opposite of our approach above if `cancelled()` is used for any of our composite run steps. We will cancel any step that has this condition if the workflow is cancelled at all.
## Timeout-minutes
Example `workflow.yml`:
```yaml
steps:
- id: bar
uses: user/test@v1
timeout-minutes: 50
```
Example `user/composite/action.yml`:
```yaml
runs:
using: "composite"
steps:
- id: foo1
run: echo test 1
timeout-minutes: 10
- id: foo2
run: echo test 2
- id: foo3
run: echo test 3
timeout-minutes: 10
```
A composite action in its entirety is a job. You can set both timeout-minutes for the whole composite action or its steps as long as the the sum of the `timeout-minutes` for each composite action step that has the attribute `timeout-minutes` is less than or equals to `timeout-minutes` for the composite action. There is no default timeout-minutes for each composite action step.
If the time taken for any of the steps in combination or individually exceed the whole composite action `timeout-minutes` attribute, the whole job will fail (1). If an individual step exceeds its own `timeout-minutes` attribute but the total time that has been used including this step is below the overall composite action `timeout-minutes`, the individual step will fail but the rest of the steps will run based on their own `timeout-minutes` attribute (they will still abide by condition (1) though).
For reference, in the example above, if the composite step `foo1` takes 11 minutes to run, that step will fail but the rest of the steps, `foo1` and `foo2`, will proceed as long as their total runtime with the previous failed `foo1` action is less than the composite action's `timeout-minutes` (50 minutes). If the composite step `foo2` takes 51 minutes to run, it will cause the whole composite action job to fail. I
The rationale behind this is that users can configure their steps with the `if` condition to conditionally set how steps rely on each other. Due to the additional capabilities that are offered with combining `timeout-minutes` and/or `if`, we wanted the `timeout-minutes` condition to be as dumb as possible and not effect other steps.
[Usage limits still apply](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions?query=if%28%29#usage-limits)
## Continue-on-error
Example `workflow.yml`:
```yaml
steps:
- run: exit 1
- id: bar
uses: user/test@v1
continue-on-error: false
- id: foo
run: echo "Hello World" <------- This step will not run
```
Example `user/composite/action.yml`:
```yaml
runs:
using: "composite"
steps:
- run: exit 1
continue-on-error: true
- run: echo "Hello World 2" <----- This step will run
```
If any of the steps fail in the composite action and the `continue-on-error` is set to `false` for the whole composite action step in the workflow file, then the steps below it will run. On the flip side, if `continue-on-error` is set to `true` for the whole composite action step in the workflow file, the next job step will run.
For the composite action steps, it follows the same logic as above. In this example, `"Hello World 2"` will be outputted because the previous step has `continue-on-error` set to `true` although that previous step errored.
## Defaults
The composite action author will be required to set the `shell` and `workingDir` of the composite action. Moreover, the composite action author will be able to explicitly set the shell for each composite run step. The workflow author will not have the ability to change these attributes.
## Visualizing Composite Action in the GitHub Actions UI
We want all the composite action's steps to be condensed into the original composite action node.
Here is a visual represenation of the [first example](#Steps)
```yaml
| composite_action_node |
| echo hello world 1 |
| echo hello world 2 |
| echo hello world 3 |
| echo hello world 4 |
```
## Conclusion
This ADR lays the framework for eventually supporting nested Composite Actions within Composite Actions. This ADR allows for users to run multiple run steps within a GitHub Composite Action with the support of inputs, outputs, environment, and context for use in any steps as well as the if, timeout-minutes, and the continue-on-error attributes for each Composite Action step.

View File

@@ -23,7 +23,7 @@ An ADR is an Architectural Decision Record. This allows consensus on the direct
### Required Dev Dependencies
![Win](res/win_sm.png) Git for Windows [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
![Win](res/win_sm.png) ![*nix](res/linux_sm.png) Git for Windows and Linux [Install Here](https://git-scm.com/downloads) (needed for dev sh script)
### To Build, Test, Layout
@@ -43,6 +43,7 @@ Sample developer flow:
```bash
git clone https://github.com/actions/runner
cd runner
cd ./src
./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout
<make code changes>
@@ -50,10 +51,23 @@ cd ./src
./dev.(sh/cmd) test # run all unit tests before git commit/push
```
View logs:
```bash
cd runner/_layout/_diag
ls
cat (Runner/Worker)_TIMESTAMP.log # view your log file
```
Run Runner:
```bash
cd runner/_layout
./run.sh # run your custom runner
```
### Editors
[Using Visual Studio Code](https://code.visualstudio.com/)
[Using Visual Studio 2019](https://www.visualstudio.com/vs/)
[Using Visual Studio](https://code.visualstudio.com/docs)
### Styling

View File

@@ -1,20 +1,18 @@
## Features
- Sample scripts to automate scalable runners (#427)
- Raise warning when action input does not match action.yml. (#429)
- Add secret masker for trimming double quotes. (#440)
- Use the API_URL and munge action URLs for GHES (#437 #469)
- Help trace worker crash in Kusto. (#450)
- update checkout@v1 for GHES (#470)
- Resolve action download info from server (#508, #515, #550)
- Print runner and machine name to log. (#539)
## Bugs
- Print node version in debug instead of output. (#433)
- Better error when runner removed from service. (#441)
- Add help info for '--labels' config option (#472)
- Sps/token migration fix, job.status/steps.outcome/steps.conclusion case match with GitHub check suites conclusion. (#462)
- Docker build using -f instead of implied default (#471)
- Reduce input validation warnings (#506)
- Fix null ref exception in SecretMasker caused by `hashfiles` timeout. (#516)
- Add libicu66 to `./installDependencies.sh` for Ubuntu 20.04 (#535)
- Fix DataContract with Token service (#532)
- Skip search $PATH on command with fully qualified path (#526)
- Restore SELinux context on service file when SELinux is enabled (#525)
## Misc
- Make release notes code blocks copy-paste-able (#430)
- Fix spelling of RHEL and CentOS. (#436)
- Add CodeQL Analysis workflow (#459)
- Remove SPS/Token migration code. Remove GHES url manipulate code. (#513)
- Add sub-step for developer flow for clarity (#523)
- Update Links and Language to Git + VSCode (#522)
- Update runner configuration exception message (#540)
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.

View File

@@ -154,7 +154,16 @@ function Invoke-With-Retry([ScriptBlock]$ScriptBlock, [int]$MaxAttempts = 3, [in
function Get-Machine-Architecture() {
Say-Invocation $MyInvocation
# possible values: amd64, x64, x86, arm64, arm
# On PS x86, PROCESSOR_ARCHITECTURE reports x86 even on x64 systems.
# To get the correct architecture, we need to use PROCESSOR_ARCHITEW6432.
# PS x64 doesn't define this, so we fall back to PROCESSOR_ARCHITECTURE.
# Possible values: amd64, x64, x86, arm64, arm
if( $ENV:PROCESSOR_ARCHITEW6432 -ne $null )
{
return $ENV:PROCESSOR_ARCHITEW6432
}
return $ENV:PROCESSOR_ARCHITECTURE
}
@@ -684,3 +693,196 @@ Prepend-Sdk-InstallRoot-To-Path -InstallRoot $InstallRoot -BinFolderRelativePath
Say "Installation finished"
exit 0
# SIG # Begin signature block
# MIIjhwYJKoZIhvcNAQcCoIIjeDCCI3QCAQExDzANBglghkgBZQMEAgEFADB5Bgor
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCAiKYSY4KtkeThH
# d5M1aXqv1K0/pff07QwfUbYZ/qX5LqCCDYUwggYDMIID66ADAgECAhMzAAABiK9S
# 1rmSbej5AAAAAAGIMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy
# b3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNpZ25p
# bmcgUENBIDIwMTEwHhcNMjAwMzA0MTgzOTQ4WhcNMjEwMzAzMTgzOTQ4WjB0MQsw
# CQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9u
# ZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMR4wHAYDVQQDExVNaWNy
# b3NvZnQgQ29ycG9yYXRpb24wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
# AQCSCNryE+Cewy2m4t/a74wZ7C9YTwv1PyC4BvM/kSWPNs8n0RTe+FvYfU+E9uf0
# t7nYlAzHjK+plif2BhD+NgdhIUQ8sVwWO39tjvQRHjP2//vSvIfmmkRoML1Ihnjs
# 9kQiZQzYRDYYRp9xSQYmRwQjk5hl8/U7RgOiQDitVHaU7BT1MI92lfZRuIIDDYBd
# vXtbclYJMVOwqZtv0O9zQCret6R+fRSGaDNfEEpcILL+D7RV3M4uaJE4Ta6KAOdv
# V+MVaJp1YXFTZPKtpjHO6d9pHQPZiG7NdC6QbnRGmsa48uNQrb6AfmLKDI1Lp31W
# MogTaX5tZf+CZT9PSuvjOCLNAgMBAAGjggGCMIIBfjAfBgNVHSUEGDAWBgorBgEE
# AYI3TAgBBggrBgEFBQcDAzAdBgNVHQ4EFgQUj9RJL9zNrPcL10RZdMQIXZN7MG8w
# VAYDVR0RBE0wS6RJMEcxLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
# dGlvbnMgTGltaXRlZDEWMBQGA1UEBRMNMjMwMDEyKzQ1ODM4NjAfBgNVHSMEGDAW
# gBRIbmTlUAXTgqoXNzcitW2oynUClTBUBgNVHR8ETTBLMEmgR6BFhkNodHRwOi8v
# d3d3Lm1pY3Jvc29mdC5jb20vcGtpb3BzL2NybC9NaWNDb2RTaWdQQ0EyMDExXzIw
# MTEtMDctMDguY3JsMGEGCCsGAQUFBwEBBFUwUzBRBggrBgEFBQcwAoZFaHR0cDov
# L3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9jZXJ0cy9NaWNDb2RTaWdQQ0EyMDEx
# XzIwMTEtMDctMDguY3J0MAwGA1UdEwEB/wQCMAAwDQYJKoZIhvcNAQELBQADggIB
# ACnXo8hjp7FeT+H6iQlV3CcGnkSbFvIpKYafgzYCFo3UHY1VHYJVb5jHEO8oG26Q
# qBELmak6MTI+ra3WKMTGhE1sEIlowTcp4IAs8a5wpCh6Vf4Z/bAtIppP3p3gXk2X
# 8UXTc+WxjQYsDkFiSzo/OBa5hkdW1g4EpO43l9mjToBdqEPtIXsZ7Hi1/6y4gK0P
# mMiwG8LMpSn0n/oSHGjrUNBgHJPxgs63Slf58QGBznuXiRaXmfTUDdrvhRocdxIM
# i8nXQwWACMiQzJSRzBP5S2wUq7nMAqjaTbeXhJqD2SFVHdUYlKruvtPSwbnqSRWT
# GI8s4FEXt+TL3w5JnwVZmZkUFoioQDMMjFyaKurdJ6pnzbr1h6QW0R97fWc8xEIz
# LIOiU2rjwWAtlQqFO8KNiykjYGyEf5LyAJKAO+rJd9fsYR+VBauIEQoYmjnUbTXM
# SY2Lf5KMluWlDOGVh8q6XjmBccpaT+8tCfxpaVYPi1ncnwTwaPQvVq8RjWDRB7Pa
# 8ruHgj2HJFi69+hcq7mWx5nTUtzzFa7RSZfE5a1a5AuBmGNRr7f8cNfa01+tiWjV
# Kk1a+gJUBSP0sIxecFbVSXTZ7bqeal45XSDIisZBkWb+83TbXdTGMDSUFKTAdtC+
# r35GfsN8QVy59Hb5ZYzAXczhgRmk7NyE6jD0Ym5TKiW5MIIHejCCBWKgAwIBAgIK
# YQ6Q0gAAAAAAAzANBgkqhkiG9w0BAQsFADCBiDELMAkGA1UEBhMCVVMxEzARBgNV
# BAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jv
# c29mdCBDb3Jwb3JhdGlvbjEyMDAGA1UEAxMpTWljcm9zb2Z0IFJvb3QgQ2VydGlm
# aWNhdGUgQXV0aG9yaXR5IDIwMTEwHhcNMTEwNzA4MjA1OTA5WhcNMjYwNzA4MjEw
# OTA5WjB+MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UE
# BxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSgwJgYD
# VQQDEx9NaWNyb3NvZnQgQ29kZSBTaWduaW5nIFBDQSAyMDExMIICIjANBgkqhkiG
# 9w0BAQEFAAOCAg8AMIICCgKCAgEAq/D6chAcLq3YbqqCEE00uvK2WCGfQhsqa+la
# UKq4BjgaBEm6f8MMHt03a8YS2AvwOMKZBrDIOdUBFDFC04kNeWSHfpRgJGyvnkmc
# 6Whe0t+bU7IKLMOv2akrrnoJr9eWWcpgGgXpZnboMlImEi/nqwhQz7NEt13YxC4D
# dato88tt8zpcoRb0RrrgOGSsbmQ1eKagYw8t00CT+OPeBw3VXHmlSSnnDb6gE3e+
# lD3v++MrWhAfTVYoonpy4BI6t0le2O3tQ5GD2Xuye4Yb2T6xjF3oiU+EGvKhL1nk
# kDstrjNYxbc+/jLTswM9sbKvkjh+0p2ALPVOVpEhNSXDOW5kf1O6nA+tGSOEy/S6
# A4aN91/w0FK/jJSHvMAhdCVfGCi2zCcoOCWYOUo2z3yxkq4cI6epZuxhH2rhKEmd
# X4jiJV3TIUs+UsS1Vz8kA/DRelsv1SPjcF0PUUZ3s/gA4bysAoJf28AVs70b1FVL
# 5zmhD+kjSbwYuER8ReTBw3J64HLnJN+/RpnF78IcV9uDjexNSTCnq47f7Fufr/zd
# sGbiwZeBe+3W7UvnSSmnEyimp31ngOaKYnhfsi+E11ecXL93KCjx7W3DKI8sj0A3
# T8HhhUSJxAlMxdSlQy90lfdu+HggWCwTXWCVmj5PM4TasIgX3p5O9JawvEagbJjS
# 4NaIjAsCAwEAAaOCAe0wggHpMBAGCSsGAQQBgjcVAQQDAgEAMB0GA1UdDgQWBBRI
# bmTlUAXTgqoXNzcitW2oynUClTAZBgkrBgEEAYI3FAIEDB4KAFMAdQBiAEMAQTAL
# BgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBRyLToCMZBD
# uRQFTuHqp8cx0SOJNDBaBgNVHR8EUzBRME+gTaBLhklodHRwOi8vY3JsLm1pY3Jv
# c29mdC5jb20vcGtpL2NybC9wcm9kdWN0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
# MDNfMjIuY3JsMF4GCCsGAQUFBwEBBFIwUDBOBggrBgEFBQcwAoZCaHR0cDovL3d3
# dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNSb29DZXJBdXQyMDExXzIwMTFf
# MDNfMjIuY3J0MIGfBgNVHSAEgZcwgZQwgZEGCSsGAQQBgjcuAzCBgzA/BggrBgEF
# BQcCARYzaHR0cDovL3d3dy5taWNyb3NvZnQuY29tL3BraW9wcy9kb2NzL3ByaW1h
# cnljcHMuaHRtMEAGCCsGAQUFBwICMDQeMiAdAEwAZQBnAGEAbABfAHAAbwBsAGkA
# YwB5AF8AcwB0AGEAdABlAG0AZQBuAHQALiAdMA0GCSqGSIb3DQEBCwUAA4ICAQBn
# 8oalmOBUeRou09h0ZyKbC5YR4WOSmUKWfdJ5DJDBZV8uLD74w3LRbYP+vj/oCso7
# v0epo/Np22O/IjWll11lhJB9i0ZQVdgMknzSGksc8zxCi1LQsP1r4z4HLimb5j0b
# pdS1HXeUOeLpZMlEPXh6I/MTfaaQdION9MsmAkYqwooQu6SpBQyb7Wj6aC6VoCo/
# KmtYSWMfCWluWpiW5IP0wI/zRive/DvQvTXvbiWu5a8n7dDd8w6vmSiXmE0OPQvy
# CInWH8MyGOLwxS3OW560STkKxgrCxq2u5bLZ2xWIUUVYODJxJxp/sfQn+N4sOiBp
# mLJZiWhub6e3dMNABQamASooPoI/E01mC8CzTfXhj38cbxV9Rad25UAqZaPDXVJi
# hsMdYzaXht/a8/jyFqGaJ+HNpZfQ7l1jQeNbB5yHPgZ3BtEGsXUfFL5hYbXw3MYb
# BL7fQccOKO7eZS/sl/ahXJbYANahRr1Z85elCUtIEJmAH9AAKcWxm6U/RXceNcbS
# oqKfenoi+kiVH6v7RyOA9Z74v2u3S5fi63V4GuzqN5l5GEv/1rMjaHXmr/r8i+sL
# gOppO6/8MO0ETI7f33VtY5E90Z1WTk+/gFcioXgRMiF670EKsT/7qMykXcGhiJtX
# cVZOSEXAQsmbdlsKgEhr/Xmfwb1tbWrJUnMTDXpQzTGCFVgwghVUAgEBMIGVMH4x
# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01p
# Y3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTECEzMAAAGIr1LWuZJt6PkAAAAA
# AYgwDQYJYIZIAWUDBAIBBQCgga4wGQYJKoZIhvcNAQkDMQwGCisGAQQBgjcCAQQw
# HAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUwLwYJKoZIhvcNAQkEMSIEIFxZ
# Yezh3liQqiGQuXNa+zYfoSIbLqOpdEn2ZKskBkisMEIGCisGAQQBgjcCAQwxNDAy
# oBSAEgBNAGkAYwByAG8AcwBvAGYAdKEagBhodHRwOi8vd3d3Lm1pY3Jvc29mdC5j
# b20wDQYJKoZIhvcNAQEBBQAEggEAjLUrwCXJCPHZulZuKAQSX+MfnIRFAhlN7ru2
# 6H8rudvhkWgqMISkLb9gFDPR5FhR4sqdYgKW4P0ERao9ypCGi1FWDLqygC2XBbHj
# NEQHBxHJs5SMsMAXNSIcYHqVAvhF3nXoseaNBkhOTrkQ1FS/fW7AfDGRbsiiESzv
# lebf92shZylBFKOsKQLAL0mF/B7xrxHJIj5dgQoD1phATRNHOEQj3jgmkidFWowV
# 4r8MzbxRhAEORbnJexlUoDQJQH3YwxuUyXkTvrYMTKSbGJLlwRaZQbrcBU0k4gCH
# y8Sci+p9Rq+aOTzLCoNrZyh9E7OdwVDm1FJAtY30bV50T2WSFKGCEuIwghLeBgor
# BgEEAYI3AwMBMYISzjCCEsoGCSqGSIb3DQEHAqCCErswghK3AgEDMQ8wDQYJYIZI
# AWUDBAIBBQAwggFRBgsqhkiG9w0BCRABBKCCAUAEggE8MIIBOAIBAQYKKwYBBAGE
# WQoDATAxMA0GCWCGSAFlAwQCAQUABCD7JNcBBSfhlKPL1tN3CEKRKJuT/dZ8RO9K
# orYLXJeLTwIGXvN89YD7GBMyMDIwMDcwMTE0MTYyMC40MDVaMASAAgH0oIHQpIHN
# MIHKMQswCQYDVQQGEwJVUzELMAkGA1UECBMCV0ExEDAOBgNVBAcTB1JlZG1vbmQx
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEtMCsGA1UECxMkTWljcm9z
# b2Z0IElyZWxhbmQgT3BlcmF0aW9ucyBMaW1pdGVkMSYwJAYDVQQLEx1UaGFsZXMg
# VFNTIEVTTjoxNzlFLTRCQjAtODI0NjElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUt
# U3RhbXAgU2VydmljZaCCDjkwggTxMIID2aADAgECAhMzAAABDKp4btzMQkzBAAAA
# AAEMMA0GCSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNo
# aW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29y
# cG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEw
# MB4XDTE5MTAyMzIzMTkxNloXDTIxMDEyMTIzMTkxNlowgcoxCzAJBgNVBAYTAlVT
# MQswCQYDVQQIEwJXQTEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9z
# b2Z0IENvcnBvcmF0aW9uMS0wKwYDVQQLEyRNaWNyb3NvZnQgSXJlbGFuZCBPcGVy
# YXRpb25zIExpbWl0ZWQxJjAkBgNVBAsTHVRoYWxlcyBUU1MgRVNOOjE3OUUtNEJC
# MC04MjQ2MSUwIwYDVQQDExxNaWNyb3NvZnQgVGltZS1TdGFtcCBTZXJ2aWNlMIIB
# IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq5011+XqVJmQKtiw39igeEMv
# CLcZ1forbmxsDkpnCN1SrThKI+n2Pr3zqTzJVgdJFCoKm1ks1gtRJ7HaL6tDkrOw
# 8XJmfJaxyQAluCQ+e40NI+A4w+u59Gy89AVY5lJNrmCva6gozfg1kxw6abV5WWr+
# PjEpNCshO4hxv3UqgMcCKnT2YVSZzF1Gy7APub1fY0P1vNEuOFKrNCEEvWIKRrqs
# eyBB73G8KD2yw6jfz0VKxNSRAdhJV/ghOyrDt5a+L6C3m1rpr8sqiof3iohv3ANI
# gNqw6ex+4+G+B7JMbIHbGpPdebedL6ePbuBCnbgJoDn340k0aw6ij21GvvUnkQID
# AQABo4IBGzCCARcwHQYDVR0OBBYEFAlCOq9DDIa0A0oqgKtM5vjuZeK+MB8GA1Ud
# IwQYMBaAFNVjOlyKMZDzQ3t8RhvFM2hahW1VMFYGA1UdHwRPME0wS6BJoEeGRWh0
# dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kvY3JsL3Byb2R1Y3RzL01pY1RpbVN0
# YVBDQV8yMDEwLTA3LTAxLmNybDBaBggrBgEFBQcBAQROMEwwSgYIKwYBBQUHMAKG
# Pmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9wa2kvY2VydHMvTWljVGltU3RhUENB
# XzIwMTAtMDctMDEuY3J0MAwGA1UdEwEB/wQCMAAwEwYDVR0lBAwwCgYIKwYBBQUH
# AwgwDQYJKoZIhvcNAQELBQADggEBAET3xBg/IZ9zdOfwbDGK7cK3qKYt/qUOlbRB
# zgeNjb32K86nGeRGkBee10dVOEGWUw6KtBeWh1LQ70b64/tLtiLcsf9JzaAyDYb1
# sRmMi5fjRZ753TquaT8V7NJ7RfEuYfvZlubfQD0MVbU4tzsdZdYuxE37V2J9pN89
# j7GoFNtAnSnCn1MRxENAILgt9XzeQzTEDhFYW0N2DNphTkRPXGjpDmwi6WtkJ5fv
# 0iTyB4dwEC+/ed0lGbFLcytJoMwfTNMdH6gcnHlMzsniornGFZa5PPiV78XoZ9Fe
# upKo8ZKNGhLLLB5GTtqfHex5no3ioVSq+NthvhX0I/V+iXJsopowggZxMIIEWaAD
# AgECAgphCYEqAAAAAAACMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzET
# MBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMV
# TWljcm9zb2Z0IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBD
# ZXJ0aWZpY2F0ZSBBdXRob3JpdHkgMjAxMDAeFw0xMDA3MDEyMTM2NTVaFw0yNTA3
# MDEyMTQ2NTVaMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw
# DgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24x
# JjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwMIIBIjANBgkq
# hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqR0NvHcRijog7PwTl/X6f2mUa3RUENWl
# CgCChfvtfGhLLF/Fw+Vhwna3PmYrW/AVUycEMR9BGxqVHc4JE458YTBZsTBED/Fg
# iIRUQwzXTbg4CLNC3ZOs1nMwVyaCo0UN0Or1R4HNvyRgMlhgRvJYR4YyhB50YWeR
# X4FUsc+TTJLBxKZd0WETbijGGvmGgLvfYfxGwScdJGcSchohiq9LZIlQYrFd/Xcf
# PfBXday9ikJNQFHRD5wGPmd/9WbAA5ZEfu/QS/1u5ZrKsajyeioKMfDaTgaRtogI
# Neh4HLDpmc085y9Euqf03GS9pAHBIAmTeM38vMDJRF1eFpwBBU8iTQIDAQABo4IB
# 5jCCAeIwEAYJKwYBBAGCNxUBBAMCAQAwHQYDVR0OBBYEFNVjOlyKMZDzQ3t8RhvF
# M2hahW1VMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1UdDwQEAwIBhjAP
# BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNX2VsuP6KJcYmjRPZSQW9fOmhjE
# MFYGA1UdHwRPME0wS6BJoEeGRWh0dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kv
# Y3JsL3Byb2R1Y3RzL01pY1Jvb0NlckF1dF8yMDEwLTA2LTIzLmNybDBaBggrBgEF
# BQcBAQROMEwwSgYIKwYBBQUHMAKGPmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9w
# a2kvY2VydHMvTWljUm9vQ2VyQXV0XzIwMTAtMDYtMjMuY3J0MIGgBgNVHSABAf8E
# gZUwgZIwgY8GCSsGAQQBgjcuAzCBgTA9BggrBgEFBQcCARYxaHR0cDovL3d3dy5t
# aWNyb3NvZnQuY29tL1BLSS9kb2NzL0NQUy9kZWZhdWx0Lmh0bTBABggrBgEFBQcC
# AjA0HjIgHQBMAGUAZwBhAGwAXwBQAG8AbABpAGMAeQBfAFMAdABhAHQAZQBtAGUA
# bgB0AC4gHTANBgkqhkiG9w0BAQsFAAOCAgEAB+aIUQ3ixuCYP4FxAz2do6Ehb7Pr
# psz1Mb7PBeKp/vpXbRkws8LFZslq3/Xn8Hi9x6ieJeP5vO1rVFcIK1GCRBL7uVOM
# zPRgEop2zEBAQZvcXBf/XPleFzWYJFZLdO9CEMivv3/Gf/I3fVo/HPKZeUqRUgCv
# OA8X9S95gWXZqbVr5MfO9sp6AG9LMEQkIjzP7QOllo9ZKby2/QThcJ8ySif9Va8v
# /rbljjO7Yl+a21dA6fHOmWaQjP9qYn/dxUoLkSbiOewZSnFjnXshbcOco6I8+n99
# lmqQeKZt0uGc+R38ONiU9MalCpaGpL2eGq4EQoO4tYCbIjggtSXlZOz39L9+Y1kl
# D3ouOVd2onGqBooPiRa6YacRy5rYDkeagMXQzafQ732D8OE7cQnfXXSYIghh2rBQ
# Hm+98eEA3+cxB6STOvdlR3jo+KhIq/fecn5ha293qYHLpwmsObvsxsvYgrRyzR30
# uIUBHoD7G4kqVDmyW9rIDVWZeodzOwjmmC3qjeAzLhIp9cAvVCch98isTtoouLGp
# 25ayp0Kiyc8ZQU3ghvkqmqMRZjDTu3QyS99je/WZii8bxyGvWbWu3EQ8l1Bx16HS
# xVXjad5XwdHeMMD9zOZN+w2/XU/pnR4ZOC+8z1gFLu8NoFA12u8JJxzVs341Hgi6
# 2jbb01+P3nSISRKhggLLMIICNAIBATCB+KGB0KSBzTCByjELMAkGA1UEBhMCVVMx
# CzAJBgNVBAgTAldBMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
# ZnQgQ29ycG9yYXRpb24xLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
# dGlvbnMgTGltaXRlZDEmMCQGA1UECxMdVGhhbGVzIFRTUyBFU046MTc5RS00QkIw
# LTgyNDYxJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1wIFNlcnZpY2WiIwoB
# ATAHBgUrDgMCGgMVAMsg9FQ9pgPLXI2Ld5z7xDS0QAZ9oIGDMIGApH4wfDELMAkG
# A1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQx
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9z
# b2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAwDQYJKoZIhvcNAQEFBQACBQDipo0MMCIY
# DzIwMjAwNzAxMTIxODIwWhgPMjAyMDA3MDIxMjE4MjBaMHQwOgYKKwYBBAGEWQoE
# ATEsMCowCgIFAOKmjQwCAQAwBwIBAAICE70wBwIBAAICEeIwCgIFAOKn3owCAQAw
# NgYKKwYBBAGEWQoEAjEoMCYwDAYKKwYBBAGEWQoDAqAKMAgCAQACAwehIKEKMAgC
# AQACAwGGoDANBgkqhkiG9w0BAQUFAAOBgQCOPjlHOH8nYtgt2XnpKXenxPUR03ED
# xPBm8XR5Z1vIq53RU9jG6yYcYNTdK+q38SGZtu0W/SgagTfKCQhjhRakuv7rGSs2
# dlhx9LGCoc/q1vqmZpRSjkqWVcc/NzmldUWIWnLlV6rmLGoDmfCH5BcsiU6Eo6wU
# iUVwnnXoqsCaBzGCAw0wggMJAgEBMIGTMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQI
# EwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
# ZnQgQ29ycG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBD
# QSAyMDEwAhMzAAABDKp4btzMQkzBAAAAAAEMMA0GCWCGSAFlAwQCAQUAoIIBSjAa
# BgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwLwYJKoZIhvcNAQkEMSIEIDpwhjyu
# zgu3Kmxpnpz86ZlthBqEzG5vaEMOkYRyuFCaMIH6BgsqhkiG9w0BCRACLzGB6jCB
# 5zCB5DCBvQQgg5AWKX7M1+m2//+V7qmRvt1K/ww5Muu8XzGJBqygVCkwgZgwgYCk
# fjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYDVQQD
# Ex1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMAITMwAAAQyqeG7czEJMwQAA
# AAABDDAiBCD11urvv5vgo4gFVQ2NMVrzgxT87Yuiq16YdswYbaYeITANBgkqhkiG
# 9w0BAQsFAASCAQAi3q8hwcT2ft4b2EleaiyZxOImV/cKusmth1dtCh5/Jb0GbOld
# f5cSalrjf42MNPodWAtgmWozkYrQF6HxnsOiYiamfRA8E3E7xyRMy7AFfAhjcwMi
# xaW4Iye6E1Ec6LtULANxfDtG/KIdCWdZxKqOezL3nzFNQWmm1mXPV+UnKpnJkA3E
# DsQOUWk8J6ojDurhrP536WI+3arg8PcnppHBLd/xNKYdlsTb+6qndgzKXkDDt1CV
# 4zCyuZ7bO8eyZAmNoSZz22k7vus9UjBz/CDhXylo20N43nr29rWPItUgH4uvOGQn
# t26Y/yjBaQImz32psrfJEMbQ7cl789s8WOx8
# SIG # End signature block

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
PACKAGERUNTIME=$1
PRECACHE=$2

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
SVC_NAME="{{SvcNameVar}}"
SVC_NAME=${SVC_NAME// /_}

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
user_id=`id -u`
@@ -70,8 +70,8 @@ then
exit 1
fi
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt install -y libicu66 || apt install -y libicu63 || apt install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
if [ $? -ne 0 ]
then
echo "'apt' failed with exit code '$?'"
@@ -99,8 +99,8 @@ then
exit 1
fi
# libicu version prefer: libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
# libicu version prefer: libicu66 -> libicu63 -> libicu60 -> libicu57 -> libicu55 -> libicu52
apt-get install -y libicu66 || apt-get install -y libicu63 || apt-get install -y libicu60 || apt install -y libicu57 || apt install -y libicu55 || apt install -y libicu52
if [ $? -ne 0 ]
then
echo "'apt-get' failed with exit code '$?'"

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# convert SIGTERM signal to SIGINT
# for more info on how to propagate SIGTERM to a child process see: http://veithen.github.io/2014/11/16/sigterm-propagation.html

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
SVC_NAME="{{SvcNameVar}}"
SVC_NAME=${SVC_NAME// /_}
@@ -63,12 +63,25 @@ function install()
sed "s/{{User}}/${run_as_user}/g; s/{{Description}}/$(echo ${SVC_DESCRIPTION} | sed -e 's/[\/&]/\\&/g')/g; s/{{RunnerRoot}}/$(echo ${RUNNER_ROOT} | sed -e 's/[\/&]/\\&/g')/g;" "${TEMPLATE_PATH}" > "${TEMP_PATH}" || failed "failed to create replacement temp file"
mv "${TEMP_PATH}" "${UNIT_PATH}" || failed "failed to copy unit file"
# Recent Fedora based Linux (CentOS/Redhat) has SELinux enabled by default
# We need to restore security context on the unit file we added otherwise SystemD have no access to it.
command -v getenforce > /dev/null
if [ $? -eq 0 ]
then
selinuxEnabled=$(getenforce)
if [[ $selinuxEnabled == "Enforcing" ]]
then
# SELinux is enabled, we will need to Restore SELinux Context for the service file
restorecon -r -v "${UNIT_PATH}" || failed "failed to restore SELinux context on ${UNIT_PATH}"
fi
fi
# unit file should not be executable and world writable
chmod 664 ${UNIT_PATH} || failed "failed to set permissions on ${UNIT_PATH}"
chmod 664 "${UNIT_PATH}" || failed "failed to set permissions on ${UNIT_PATH}"
systemctl daemon-reload || failed "failed to reload daemons"
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
# Since we started with sudo, runsvc.sh will be owned by root. Change this to current login user.
cp ./bin/runsvc.sh ./runsvc.sh || failed "failed to copy runsvc.sh"
chown ${run_as_uid}:${run_as_gid} ./runsvc.sh || failed "failed to set owner for runsvc.sh"
chmod 755 ./runsvc.sh || failed "failed to set permission for runsvc.sh"

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# runner will replace key words in the template and generate a batch script to run.
# Keywords:

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
user_id=`id -u`

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
varCheckList=(
'LANG'

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
# Validate not sudo
user_id=`id -u`

View File

@@ -108,9 +108,9 @@ namespace GitHub.Runner.Common
CredentialData GetMigratedCredentials();
RunnerSettings GetSettings();
void SaveCredential(CredentialData credential);
void SaveMigratedCredential(CredentialData credential);
void SaveSettings(RunnerSettings settings);
void DeleteCredential();
void DeleteMigratedCredential();
void DeleteSettings();
}
@@ -232,21 +232,6 @@ namespace GitHub.Runner.Common
File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden);
}
public void SaveMigratedCredential(CredentialData credential)
{
Trace.Info("Saving {0} migrated credential @ {1}", credential.Scheme, _migratedCredFilePath);
if (File.Exists(_migratedCredFilePath))
{
// Delete existing credential file first, since the file is hidden and not able to overwrite.
Trace.Info("Delete exist runner migrated credential file.");
IOUtil.DeleteFile(_migratedCredFilePath);
}
IOUtil.SaveObject(credential, _migratedCredFilePath);
Trace.Info("Migrated Credentials Saved.");
File.SetAttributes(_migratedCredFilePath, File.GetAttributes(_migratedCredFilePath) | FileAttributes.Hidden);
}
public void SaveSettings(RunnerSettings settings)
{
Trace.Info("Saving runner settings.");
@@ -268,6 +253,11 @@ namespace GitHub.Runner.Common
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
}
public void DeleteMigratedCredential()
{
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
}
public void DeleteSettings()
{
IOUtil.Delete(_configFilePath, default(CancellationToken));

View File

@@ -22,6 +22,7 @@ namespace GitHub.Runner.Common
Task<List<TimelineRecord>> UpdateTimelineRecordsAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, IEnumerable<TimelineRecord> records, CancellationToken cancellationToken);
Task RaisePlanEventAsync<T>(Guid scopeIdentifier, string hubName, Guid planId, T eventData, CancellationToken cancellationToken) where T : JobEvent;
Task<Timeline> GetTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken);
}
public sealed class JobServer : RunnerService, IJobServer
@@ -113,5 +114,14 @@ namespace GitHub.Runner.Common
CheckConnection();
return _taskClient.GetTimelineAsync(scopeIdentifier, hubName, planId, timelineId, includeRecords: true, cancellationToken: cancellationToken);
}
//-----------------------------------------------------------------
// Action download info
//-----------------------------------------------------------------
public Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(Guid scopeIdentifier, string hubName, Guid planId, ActionReferenceList actions, CancellationToken cancellationToken)
{
CheckConnection();
return _taskClient.ResolveActionDownloadInfoAsync(scopeIdentifier, hubName, planId, actions, cancellationToken: cancellationToken);
}
}
}

View File

@@ -50,10 +50,6 @@ namespace GitHub.Runner.Common
// agent update
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState);
// runner authorization url
Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId);
Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error);
}
public sealed class RunnerServer : RunnerService, IRunnerServer

View File

@@ -96,13 +96,14 @@ namespace GitHub.Runner.Common
Trace.Info($"WRITE: {message}");
if (!Silent)
{
if(colorCode != null)
if (colorCode != null)
{
Console.ForegroundColor = colorCode.Value;
Console.Write(message);
Console.ResetColor();
}
else {
else
{
Console.Write(message);
}
}
@@ -120,13 +121,14 @@ namespace GitHub.Runner.Common
Trace.Info($"WRITE LINE: {line}");
if (!Silent)
{
if(colorCode != null)
if (colorCode != null)
{
Console.ForegroundColor = colorCode.Value;
Console.WriteLine(line);
Console.ResetColor();
}
else {
else
{
Console.WriteLine(line);
}
}

View File

@@ -119,6 +119,19 @@ namespace GitHub.Runner.Listener.Configuration
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
// Warn if the Actions server url and GHES server url has different Host
if (!runnerSettings.IsHostedServer)
{
// Example actionsServerUrl is https://my-ghes/_services/pipelines/[...]
// Example githubServerUrl is https://my-ghes
var actionsServerUrl = new Uri(runnerSettings.ServerUrl);
var githubServerUrl = new Uri(runnerSettings.GitHubUrl);
if (!string.Equals(actionsServerUrl.Authority, githubServerUrl.Authority, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException($"GitHub Actions is not properly configured in GHES. GHES url: {runnerSettings.GitHubUrl}, Actions url: {runnerSettings.ServerUrl}.");
}
}
// Validate can connect.
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), creds);
@@ -197,7 +210,7 @@ namespace GitHub.Runner.Listener.Configuration
else if (command.Unattended)
{
// if not replace and it is unattended config.
throw new TaskAgentExistsException($"Pool {runnerSettings.PoolId} already contains a runner with name {runnerSettings.AgentName}.");
throw new TaskAgentExistsException($"A runner exists with the same name {runnerSettings.AgentName}.");
}
}
else
@@ -221,36 +234,11 @@ namespace GitHub.Runner.Listener.Configuration
// Add Agent Id to settings
runnerSettings.AgentId = agent.Id;
// respect the serverUrl resolve by server.
// in case of agent configured using collection url instead of account url.
string agentServerUrl;
if (agent.Properties.TryGetValidatedValue<string>("ServerUrl", out agentServerUrl) &&
!string.IsNullOrEmpty(agentServerUrl))
{
Trace.Info($"Agent server url resolve by server: '{agentServerUrl}'.");
// we need make sure the Schema/Host/Port component of the url remain the same.
UriBuilder inputServerUrl = new UriBuilder(runnerSettings.ServerUrl);
UriBuilder serverReturnedServerUrl = new UriBuilder(agentServerUrl);
if (Uri.Compare(inputServerUrl.Uri, serverReturnedServerUrl.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0)
{
inputServerUrl.Path = serverReturnedServerUrl.Path;
Trace.Info($"Replace server returned url's scheme://host:port component with user input server url's scheme://host:port: '{inputServerUrl.Uri.AbsoluteUri}'.");
runnerSettings.ServerUrl = inputServerUrl.Uri.AbsoluteUri;
}
else
{
runnerSettings.ServerUrl = agentServerUrl;
}
}
// See if the server supports our OAuth key exchange for credentials
if (agent.Authorization != null &&
agent.Authorization.ClientId != Guid.Empty &&
agent.Authorization.AuthorizationUrl != null)
{
UriBuilder configServerUrl = new UriBuilder(runnerSettings.ServerUrl);
UriBuilder oauthEndpointUrlBuilder = new UriBuilder(agent.Authorization.AuthorizationUrl);
var credentialData = new CredentialData
{
Scheme = Constants.Configuration.OAuth,
@@ -258,7 +246,6 @@ namespace GitHub.Runner.Listener.Configuration
{
{ "clientId", agent.Authorization.ClientId.ToString("D") },
{ "authorizationUrl", agent.Authorization.AuthorizationUrl.AbsoluteUri },
{ "oauthEndpointUrl", oauthEndpointUrlBuilder.Uri.AbsoluteUri },
},
};
@@ -464,7 +451,7 @@ namespace GitHub.Runner.Listener.Configuration
// update should replace the existing labels
agent.Version = BuildConstants.RunnerPackage.Version;
agent.OSDescription = RuntimeInformation.OSDescription;
agent.Labels.Clear();
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
@@ -475,7 +462,7 @@ namespace GitHub.Runner.Listener.Configuration
{
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
}
return agent;
}

View File

@@ -13,7 +13,7 @@ namespace GitHub.Runner.Listener.Configuration
public interface ICredentialManager : IRunnerService
{
ICredentialProvider GetCredentialProvider(string credType);
VssCredentials LoadCredentials(bool preferMigrated = true);
VssCredentials LoadCredentials();
}
public class CredentialManager : RunnerService, ICredentialManager
@@ -40,7 +40,7 @@ namespace GitHub.Runner.Listener.Configuration
return creds;
}
public VssCredentials LoadCredentials(bool preferMigrated = true)
public VssCredentials LoadCredentials()
{
IConfigurationStore store = HostContext.GetService<IConfigurationStore>();
@@ -50,14 +50,16 @@ namespace GitHub.Runner.Listener.Configuration
}
CredentialData credData = store.GetCredentials();
if (preferMigrated)
var migratedCred = store.GetMigratedCredentials();
if (migratedCred != null)
{
var migratedCred = store.GetMigratedCredentials();
if (migratedCred != null)
{
credData = migratedCred;
}
credData = migratedCred;
// Re-write .credentials with Token URL
store.SaveCredential(credData);
// Delete .credentials_migrated
store.DeleteMigratedCredential();
}
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);

View File

@@ -13,10 +13,7 @@ using System.Diagnostics;
using System.Runtime.InteropServices;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Services.WebApi;
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]
namespace GitHub.Runner.Listener
{
[ServiceLocator(Default = typeof(MessageListener))]
@@ -35,30 +32,18 @@ namespace GitHub.Runner.Listener
private ITerminal _term;
private IRunnerServer _runnerServer;
private TaskAgentSession _session;
private ICredentialManager _credMgr;
private IConfigurationStore _configStore;
private TimeSpan _getNextMessageRetryInterval;
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new Dictionary<string, int>();
// Whether load credentials from .credentials_migrated file
internal bool _useMigratedCredentials;
// need to check auth url if there is only .credentials and auth schema is OAuth
internal bool _needToCheckAuthorizationUrlUpdate;
internal Task<VssCredentials> _authorizationUrlMigrationBackgroundTask;
internal Task _authorizationUrlRollbackReattemptDelayBackgroundTask;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = HostContext.GetService<ITerminal>();
_runnerServer = HostContext.GetService<IRunnerServer>();
_credMgr = HostContext.GetService<ICredentialManager>();
_configStore = HostContext.GetService<IConfigurationStore>();
}
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
@@ -73,8 +58,8 @@ namespace GitHub.Runner.Listener
// Create connection.
Trace.Info("Loading Credentials");
_useMigratedCredentials = !StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_SPSAUTHURL"));
VssCredentials creds = _credMgr.LoadCredentials(_useMigratedCredentials);
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials creds = credMgr.LoadCredentials();
var agent = new TaskAgentReference
{
@@ -89,17 +74,6 @@ namespace GitHub.Runner.Listener
string errorMessage = string.Empty;
bool encounteringError = false;
var originalCreds = _configStore.GetCredentials();
var migratedCreds = _configStore.GetMigratedCredentials();
if (migratedCreds == null)
{
_useMigratedCredentials = false;
if (originalCreds.Scheme == Constants.Configuration.OAuth)
{
_needToCheckAuthorizationUrlUpdate = true;
}
}
while (true)
{
token.ThrowIfCancellationRequested();
@@ -127,12 +101,6 @@ namespace GitHub.Runner.Listener
encounteringError = false;
}
if (_needToCheckAuthorizationUrlUpdate)
{
// start background task try to get new authorization url
_authorizationUrlMigrationBackgroundTask = GetNewOAuthAuthorizationSetting(token);
}
return true;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
@@ -164,44 +132,10 @@ namespace GitHub.Runner.Listener
}
}
if (ex is TaskAgentSessionConflictException)
{
try
{
var newCred = await GetNewOAuthAuthorizationSetting(token, true);
if (newCred != null)
{
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), newCred);
Trace.Info("Updated connection to use migrated credential for next CreateSession call.");
_useMigratedCredentials = true;
_authorizationUrlMigrationBackgroundTask = null;
_needToCheckAuthorizationUrlUpdate = false;
}
}
catch (Exception e)
{
Trace.Error("Fail to refresh connection with new authorization url.");
Trace.Error(e);
}
}
if (!IsSessionCreationExceptionRetriable(ex))
{
if (_useMigratedCredentials && !(ex is TaskAgentSessionConflictException))
{
// migrated credentials might cause lose permission during permission check,
// we will force to use original credential and try again
_useMigratedCredentials = false;
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
creds = _credMgr.LoadCredentials(false);
Trace.Error("Fallback to original credentials and try again.");
}
else
{
_term.WriteError($"Failed to create session. {ex.Message}");
return false;
}
_term.WriteError($"Failed to create session. {ex.Message}");
return false;
}
if (!encounteringError) //print the message only on the first error
@@ -262,51 +196,6 @@ namespace GitHub.Runner.Listener
encounteringError = false;
continuousError = 0;
}
if (_needToCheckAuthorizationUrlUpdate &&
_authorizationUrlMigrationBackgroundTask?.IsCompleted == true)
{
if (HostContext.GetService<IJobDispatcher>().Busy ||
HostContext.GetService<ISelfUpdater>().Busy)
{
Trace.Info("Job or runner updates in progress, update credentials next time.");
}
else
{
try
{
var newCred = await _authorizationUrlMigrationBackgroundTask;
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), newCred);
Trace.Info("Updated connection to use migrated credential for next GetMessage call.");
_useMigratedCredentials = true;
_authorizationUrlMigrationBackgroundTask = null;
_needToCheckAuthorizationUrlUpdate = false;
}
catch (Exception ex)
{
Trace.Error("Fail to refresh connection with new authorization url.");
Trace.Error(ex);
}
}
}
if (_authorizationUrlRollbackReattemptDelayBackgroundTask?.IsCompleted == true)
{
try
{
// we rolled back to use original creds about 2 days before, now it's a good time to try migrated creds again.
Trace.Info("Re-attempt to use migrated credential");
var migratedCreds = _credMgr.LoadCredentials();
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedCreds);
_useMigratedCredentials = true;
_authorizationUrlRollbackReattemptDelayBackgroundTask = null;
}
catch (Exception ex)
{
Trace.Error("Fail to refresh connection with new authorization url on rollback reattempt.");
Trace.Error(ex);
}
}
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
@@ -330,21 +219,7 @@ namespace GitHub.Runner.Listener
}
else if (!IsGetNextMessageExceptionRetriable(ex))
{
if (_useMigratedCredentials)
{
// migrated credentials might cause lose permission during permission check,
// we will force to use original credential and try again
_useMigratedCredentials = false;
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
var originalCreds = _credMgr.LoadCredentials(false);
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), originalCreds);
Trace.Error("Fallback to original credentials and try again.");
}
else
{
throw;
}
throw;
}
else
{
@@ -536,94 +411,5 @@ namespace GitHub.Runner.Listener
return true;
}
}
private async Task<VssCredentials> GetNewOAuthAuthorizationSetting(CancellationToken token, bool adhoc = false)
{
Trace.Info("Start checking oauth authorization url update.");
while (true)
{
try
{
var migratedAuthorizationUrl = await _runnerServer.GetRunnerAuthUrlAsync(_settings.PoolId, _settings.AgentId);
if (!string.IsNullOrEmpty(migratedAuthorizationUrl))
{
var credData = _configStore.GetCredentials();
var clientId = credData.Data.GetValueOrDefault("clientId", null);
var currentAuthorizationUrl = credData.Data.GetValueOrDefault("authorizationUrl", null);
Trace.Info($"Current authorization url: {currentAuthorizationUrl}, new authorization url: {migratedAuthorizationUrl}");
if (string.Equals(currentAuthorizationUrl, migratedAuthorizationUrl, StringComparison.OrdinalIgnoreCase))
{
// We don't need to update credentials.
Trace.Info("No needs to update authorization url");
if (adhoc)
{
return null;
}
else
{
await Task.Delay(TimeSpan.FromMilliseconds(-1), token);
}
}
var keyManager = HostContext.GetService<IRSAKeyManager>();
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
var migratedClientCredential = new VssOAuthJwtBearerClientCredential(clientId, migratedAuthorizationUrl, signingCredentials);
var migratedRunnerCredential = new VssOAuthCredential(new Uri(migratedAuthorizationUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, migratedClientCredential);
Trace.Info("Try connect service with Token Service OAuth endpoint.");
var runnerServer = HostContext.CreateService<IRunnerServer>();
await runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedRunnerCredential);
await runnerServer.GetAgentPoolsAsync();
Trace.Info($"Successfully connected service with new authorization url.");
var migratedCredData = new CredentialData
{
Scheme = Constants.Configuration.OAuth,
Data =
{
{ "clientId", clientId },
{ "authorizationUrl", migratedAuthorizationUrl },
{ "oauthEndpointUrl", migratedAuthorizationUrl },
},
};
_configStore.SaveMigratedCredential(migratedCredData);
return migratedRunnerCredential;
}
else
{
Trace.Verbose("No authorization url updates");
}
}
catch (Exception ex) when (!token.IsCancellationRequested)
{
Trace.Error("Fail to get/test new authorization url.");
Trace.Error(ex);
try
{
await _runnerServer.ReportRunnerAuthUrlErrorAsync(_settings.PoolId, _settings.AgentId, ex.ToString());
}
catch (Exception e)
{
// best effort
Trace.Error("Fail to report the migration error");
Trace.Error(e);
}
}
if (adhoc)
{
return null;
}
else
{
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromMinutes(30), TimeSpan.FromMinutes(45));
await HostContext.Delay(backoff, token);
}
}
}
}
}

View File

@@ -102,7 +102,9 @@ namespace GitHub.Runner.Listener
IRunner runner = context.GetService<IRunner>();
try
{
return await runner.ExecuteCommand(command);
var returnCode = await runner.ExecuteCommand(command);
trace.Info($"Runner execution has finished with return code {returnCode}");
return returnCode;
}
catch (OperationCanceledException) when (context.RunnerShutdownToken.IsCancellationRequested)
{

View File

@@ -318,7 +318,12 @@ namespace GitHub.Runner.Sdk
}
}
using (var registration = cancellationToken.Register(async () => await CancelAndKillProcessTree(killProcessOnCancel)))
var cancellationFinished = new TaskCompletionSource<bool>();
using (var registration = cancellationToken.Register(async () =>
{
await CancelAndKillProcessTree(killProcessOnCancel);
cancellationFinished.TrySetResult(true);
}))
{
Trace.Info($"Process started with process id {_proc.Id}, waiting for process exit.");
while (true)
@@ -341,6 +346,13 @@ namespace GitHub.Runner.Sdk
// data buffers one last time before returning
ProcessOutput();
if (cancellationToken.IsCancellationRequested)
{
// Ensure cancellation also finish on the cancellationToken.Register thread.
await cancellationFinished.Task;
Trace.Info($"Process Cancellation finished.");
}
Trace.Info($"Finished process {_proc.Id} with exit code {_proc.ExitCode}, and elapsed time {_stopWatch.Elapsed}.");
}

View File

@@ -11,6 +11,11 @@ namespace GitHub.Runner.Sdk
{
ArgUtil.NotNullOrEmpty(command, nameof(command));
trace?.Info($"Which: '{command}'");
if (Path.IsPathFullyQualified(command) && File.Exists(command))
{
trace?.Info($"Fully qualified path: '{command}'");
return command;
}
string path = Environment.GetEnvironmentVariable(PathUtil.PathVariable);
if (string.IsNullOrEmpty(path))
{

View File

@@ -14,6 +14,7 @@ using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container;
using GitHub.Services.Common;
using WebApi = GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
@@ -73,6 +74,11 @@ namespace GitHub.Runner.Worker
// Clear the cache (for self-hosted runners)
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
var newActionMetadata = executionContext.Variables.GetBoolean("DistributedTask.NewActionMetadata") ?? false;
var repositoryActions = new List<Pipelines.ActionStep>();
foreach (var action in actions)
{
if (action.Reference.Type == Pipelines.ActionSourceType.ContainerRegistry)
@@ -90,7 +96,8 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'");
imagesToPull[containerReference.Image].Add(action.Id);
}
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository)
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && !newActionMetadata)
{
// only download the repository archive
await DownloadRepositoryActionAsync(executionContext, action);
@@ -124,6 +131,81 @@ namespace GitHub.Runner.Worker
}
}
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
{
var definition = LoadAction(executionContext, action);
if (definition.Data.Execution.HasPre)
{
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = action;
actionRunner.Stage = ActionRunStage.Pre;
actionRunner.Condition = definition.Data.Execution.InitCondition;
Trace.Info($"Add 'pre' execution for {action.Id}");
preStepTracker[action.Id] = actionRunner;
}
}
}
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository && newActionMetadata)
{
repositoryActions.Add(action);
}
}
if (repositoryActions.Count > 0)
{
// Get the download info
var downloadInfos = await GetDownloadInfoAsync(executionContext, repositoryActions);
// Download each action
foreach (var action in repositoryActions)
{
var lookupKey = GetDownloadInfoLookupKey(action);
if (string.IsNullOrEmpty(lookupKey))
{
continue;
}
if (!downloadInfos.TryGetValue(lookupKey, out var downloadInfo))
{
throw new Exception($"Missing download info for {lookupKey}");
}
await DownloadRepositoryActionAsync(executionContext, downloadInfo);
}
// More preparation based on content in the repository (action.yml)
foreach (var action in repositoryActions)
{
var setupInfo = PrepareRepositoryActionAsync(executionContext, action);
if (setupInfo != null)
{
if (!string.IsNullOrEmpty(setupInfo.Image))
{
if (!imagesToPull.ContainsKey(setupInfo.Image))
{
imagesToPull[setupInfo.Image] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to pull image '{setupInfo.Image}'");
imagesToPull[setupInfo.Image].Add(action.Id);
}
else
{
ArgUtil.NotNullOrEmpty(setupInfo.ActionRepository, nameof(setupInfo.ActionRepository));
if (!imagesToBuild.ContainsKey(setupInfo.ActionRepository))
{
imagesToBuild[setupInfo.ActionRepository] = new List<Guid>();
}
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.ActionRepository}' needs to build image '{setupInfo.Dockerfile}'");
imagesToBuild[setupInfo.ActionRepository].Add(action.Id);
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
}
}
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
{
@@ -150,8 +232,7 @@ namespace GitHub.Runner.Worker
containerSetupSteps.Add(new JobExtensionRunner(runAsync: this.PullActionContainerAsync,
condition: $"{PipelineTemplateConstants.Success}()",
displayName: $"Pull {imageToPull.Key}",
data: new ContainerSetupInfo(imageToPull.Value, imageToPull.Key),
repositoryRef: null));
data: new ContainerSetupInfo(imageToPull.Value, imageToPull.Key)));
}
}
@@ -164,8 +245,7 @@ namespace GitHub.Runner.Worker
containerSetupSteps.Add(new JobExtensionRunner(runAsync: this.BuildActionContainerAsync,
condition: $"{PipelineTemplateConstants.Success}()",
displayName: $"Build {setupInfo.ActionRepository}",
data: new ContainerSetupInfo(imageToBuild.Value, setupInfo.Dockerfile, setupInfo.WorkingDirectory),
repositoryRef: setupInfo.RepositoryRef));
data: new ContainerSetupInfo(imageToBuild.Value, setupInfo.Dockerfile, setupInfo.WorkingDirectory)));
}
}
@@ -315,6 +395,14 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}.");
}
}
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
var compositeAction = definition.Data.Execution as CompositeActionExecutionData;
Trace.Info($"Load {compositeAction.Steps?.Count ?? 0} action steps.");
Trace.Verbose($"Details: {StringUtil.ConvertToJson(compositeAction?.Steps)}");
Trace.Info($"Load: {compositeAction.Outputs?.Count ?? 0} number of outputs");
Trace.Info($"Details: {StringUtil.ConvertToJson(compositeAction?.Outputs)}");
}
else
{
throw new NotSupportedException(definition.Data.Execution.ExecutionType.ToString());
@@ -466,6 +554,80 @@ namespace GitHub.Runner.Worker
}
}
// This implementation is temporary and will be replaced with a REST API call to the service to resolve
private async Task<IDictionary<string, WebApi.ActionDownloadInfo>> GetDownloadInfoAsync(IExecutionContext executionContext, List<Pipelines.ActionStep> actions)
{
executionContext.Output("Getting action download info");
// Convert to action reference
var actionReferences = actions
.GroupBy(x => GetDownloadInfoLookupKey(x))
.Where(x => !string.IsNullOrEmpty(x.Key))
.Select(x =>
{
var action = x.First();
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
return new WebApi.ActionReference
{
NameWithOwner = repositoryReference.Name,
Ref = repositoryReference.Ref,
};
})
.ToList();
// Nothing to resolve?
if (actionReferences.Count == 0)
{
return new Dictionary<string, WebApi.ActionDownloadInfo>();
}
// Resolve download info
var jobServer = HostContext.GetService<IJobServer>();
var actionDownloadInfos = default(WebApi.ActionDownloadInfoCollection);
for (var attempt = 1; attempt <= 3; attempt++)
{
try
{
actionDownloadInfos = await jobServer.ResolveActionDownloadInfoAsync(executionContext.Plan.ScopeIdentifier, executionContext.Plan.PlanType, executionContext.Plan.PlanId, new WebApi.ActionReferenceList { Actions = actionReferences }, executionContext.CancellationToken);
break;
}
catch (Exception ex) when (attempt < 3)
{
executionContext.Output($"Failed to resolve action download info. Error: {ex.Message}");
executionContext.Debug(ex.ToString());
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
{
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
executionContext.Output($"Retrying in {backoff.TotalSeconds} seconds");
await Task.Delay(backoff);
}
}
}
ArgUtil.NotNull(actionDownloadInfos, nameof(actionDownloadInfos));
ArgUtil.NotNull(actionDownloadInfos.Actions, nameof(actionDownloadInfos.Actions));
var apiUrl = GetApiUrl(executionContext);
var defaultAccessToken = executionContext.GetGitHubContext("token");
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
foreach (var actionDownloadInfo in actionDownloadInfos.Actions.Values)
{
// Add secret
HostContext.SecretMasker.AddValue(actionDownloadInfo.Authentication?.Token);
// Default auth token
if (string.IsNullOrEmpty(actionDownloadInfo.Authentication?.Token))
{
actionDownloadInfo.Authentication = new WebApi.ActionDownloadAuthentication { Token = defaultAccessToken };
}
}
return actionDownloadInfos.Actions;
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, Pipelines.ActionStep repositoryAction)
{
Trace.Entering();
@@ -509,9 +671,8 @@ namespace GitHub.Runner.Worker
{
string apiUrl = GetApiUrl(executionContext);
string archiveLink = BuildLinkToActionArchive(apiUrl, repositoryReference.Name, repositoryReference.Ref);
Trace.Info($"Download archive '{archiveLink}' to '{destDirectory}'.");
var downloadDetails = new ActionDownloadDetails(archiveLink, ConfigureAuthorizationFromContext);
await DownloadRepositoryActionAsync(executionContext, downloadDetails, destDirectory);
await DownloadRepositoryActionAsync(executionContext, downloadDetails, null, destDirectory);
return;
}
else
@@ -535,10 +696,9 @@ namespace GitHub.Runner.Worker
foreach (var downloadAttempt in downloadAttempts)
{
Trace.Info($"Download archive '{downloadAttempt.ArchiveLink}' to '{destDirectory}'.");
try
{
await DownloadRepositoryActionAsync(executionContext, downloadAttempt, destDirectory);
await DownloadRepositoryActionAsync(executionContext, downloadAttempt, null, destDirectory);
return;
}
catch (ActionNotFoundException)
@@ -551,6 +711,32 @@ namespace GitHub.Runner.Worker
}
}
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo)
{
Trace.Entering();
ArgUtil.NotNull(executionContext, nameof(executionContext));
ArgUtil.NotNull(downloadInfo, nameof(downloadInfo));
ArgUtil.NotNullOrEmpty(downloadInfo.NameWithOwner, nameof(downloadInfo.NameWithOwner));
ArgUtil.NotNullOrEmpty(downloadInfo.Ref, nameof(downloadInfo.Ref));
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), downloadInfo.NameWithOwner.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), downloadInfo.Ref);
string watermarkFile = GetWatermarkFilePath(destDirectory);
if (File.Exists(watermarkFile))
{
executionContext.Debug($"Action '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}' already downloaded at '{destDirectory}'.");
return;
}
else
{
// make sure we get a clean folder ready to use.
IOUtil.DeleteDirectory(destDirectory, executionContext.CancellationToken);
Directory.CreateDirectory(destDirectory);
executionContext.Output($"Download action repository '{downloadInfo.NameWithOwner}@{downloadInfo.Ref}'");
}
await DownloadRepositoryActionAsync(executionContext, null, downloadInfo, destDirectory);
}
private string GetApiUrl(IExecutionContext executionContext)
{
string apiUrl = executionContext.GetGitHubContext("api_url");
@@ -571,7 +757,8 @@ namespace GitHub.Runner.Worker
#endif
}
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, ActionDownloadDetails actionDownloadDetails, string destDirectory)
// todo: Remove the parameter "actionDownloadDetails" when feature flag DistributedTask.NewActionMetadata is removed
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, ActionDownloadDetails actionDownloadDetails, WebApi.ActionDownloadInfo downloadInfo, string destDirectory)
{
//download and extract action in a temp folder and rename it on success
string tempDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), "_temp_" + Guid.NewGuid());
@@ -579,11 +766,12 @@ namespace GitHub.Runner.Worker
#if OS_WINDOWS
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.zip");
string link = downloadInfo?.ZipballUrl ?? actionDownloadDetails.ArchiveLink;
#else
string archiveFile = Path.Combine(tempDirectory, $"{Guid.NewGuid()}.tar.gz");
string link = downloadInfo?.TarballUrl ?? actionDownloadDetails.ArchiveLink;
#endif
string link = actionDownloadDetails.ArchiveLink;
Trace.Info($"Save archive '{link}' into {archiveFile}.");
try
{
@@ -603,7 +791,16 @@ namespace GitHub.Runner.Worker
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
actionDownloadDetails.ConfigureAuthorization(executionContext, httpClient);
// Legacy
if (downloadInfo == null)
{
actionDownloadDetails.ConfigureAuthorization(executionContext, httpClient);
}
// FF DistributedTask.NewActionMetadata
else
{
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadInfo.Authentication?.Token);
}
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
using (var response = await httpClient.GetAsync(link))
@@ -743,6 +940,7 @@ namespace GitHub.Runner.Worker
}
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private void ConfigureAuthorizationFromContext(IExecutionContext executionContext, HttpClient httpClient)
{
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
@@ -778,7 +976,6 @@ namespace GitHub.Runner.Worker
}
var setupInfo = new ActionContainer();
setupInfo.RepositoryRef = repositoryReference;
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref);
string actionEntryDirectory = destDirectory;
string dockerFileRelativePath = repositoryReference.Name;
@@ -849,6 +1046,11 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action plugin: {(actionDefinitionData.Execution as PluginActionExecutionData).Plugin}, no more preparation.");
return null;
}
else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
Trace.Info($"Action composite: {(actionDefinitionData.Execution as CompositeActionExecutionData).Steps}, no more preparation.");
return null;
}
else
{
throw new NotSupportedException(actionDefinitionData.Execution.ExecutionType.ToString());
@@ -875,6 +1077,51 @@ namespace GitHub.Runner.Worker
}
}
private static string GetDownloadInfoLookupKey(Pipelines.ActionStep action)
{
if (action.Reference.Type != Pipelines.ActionSourceType.Repository)
{
return null;
}
var repositoryReference = action.Reference as Pipelines.RepositoryPathReference;
ArgUtil.NotNull(repositoryReference, nameof(repositoryReference));
if (string.Equals(repositoryReference.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
{
return null;
}
if (!string.Equals(repositoryReference.RepositoryType, Pipelines.RepositoryTypes.GitHub, StringComparison.OrdinalIgnoreCase))
{
throw new NotSupportedException(repositoryReference.RepositoryType);
}
ArgUtil.NotNullOrEmpty(repositoryReference.Name, nameof(repositoryReference.Name));
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
}
private static string GetDownloadInfoLookupKey(WebApi.ActionDownloadInfo info)
{
ArgUtil.NotNullOrEmpty(info.NameWithOwner, nameof(info.NameWithOwner));
ArgUtil.NotNullOrEmpty(info.Ref, nameof(info.Ref));
return $"{info.NameWithOwner}@{info.Ref}";
}
private AuthenticationHeaderValue CreateAuthHeader(string token)
{
if (string.IsNullOrEmpty(token))
{
return null;
}
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
HostContext.SecretMasker.AddValue(base64EncodingToken);
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
// todo: Remove when feature flag DistributedTask.NewActionMetadata is removed
private class ActionDownloadDetails
{
public string ArchiveLink { get; }
@@ -914,6 +1161,7 @@ namespace GitHub.Runner.Worker
NodeJS,
Plugin,
Script,
Composite,
}
public sealed class ContainerActionExecutionData : ActionExecutionData
@@ -970,6 +1218,15 @@ namespace GitHub.Runner.Worker
public override bool HasPost => false;
}
public sealed class CompositeActionExecutionData : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Composite;
public override bool HasPre => false;
public override bool HasPost => false;
public List<Pipelines.ActionStep> Steps { get; set; }
public MappingToken Outputs { get; set; }
}
public abstract class ActionExecutionData
{
private string _initCondition = $"{Constants.Expressions.Always}()";
@@ -1025,6 +1282,5 @@ namespace GitHub.Runner.Worker
public string Dockerfile { get; set; }
public string WorkingDirectory { get; set; }
public string ActionRepository { get; set; }
public Pipelines.RepositoryPathReference RepositoryRef { get; set; }
}
}

View File

@@ -14,6 +14,7 @@ using YamlDotNet.Core;
using YamlDotNet.Core.Events;
using System.Globalization;
using System.Linq;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
@@ -22,18 +23,20 @@ namespace GitHub.Runner.Worker
{
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
DictionaryContextData EvaluateCompositeOutputs(IExecutionContext executionContext, TemplateToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
void SetAllCompositeOutputs(IExecutionContext parentExecutionContext, DictionaryContextData actionOutputs);
}
public sealed class ActionManifestManager : RunnerService, IActionManifestManager
{
private TemplateSchema _actionManifestSchema;
private IReadOnlyList<String> _fileTable;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -54,25 +57,45 @@ namespace GitHub.Runner.Worker
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
{
var context = CreateContext(executionContext);
var templateContext = CreateContext(executionContext);
ActionDefinitionData actionDefinition = new ActionDefinitionData();
// Clean up file name real quick
// Instead of using Regex which can be computationally expensive,
// we can just remove the # of characters from the fileName according to the length of the basePath
string basePath = HostContext.GetDirectory(WellKnownDirectory.Actions);
string fileRelativePath = manifestFile;
if (manifestFile.Contains(basePath))
{
fileRelativePath = manifestFile.Remove(0, basePath.Length + 1);
}
try
{
var token = default(TemplateToken);
// Get the file ID
var fileId = context.GetFileId(manifestFile);
_fileTable = context.GetFileTable();
var fileId = templateContext.GetFileId(fileRelativePath);
// Add this file to the FileTable in executionContext if it hasn't been added already
// we use > since fileID is 1 indexed
if (fileId > executionContext.FileTable.Count)
{
executionContext.FileTable.Add(fileRelativePath);
}
// Read the file
var fileContent = File.ReadAllText(manifestFile);
using (var stringReader = new StringReader(fileContent))
{
var yamlObjectReader = new YamlObjectReader(null, stringReader);
token = TemplateReader.Read(context, "action-root", yamlObjectReader, fileId, out _);
var yamlObjectReader = new YamlObjectReader(fileId, stringReader);
token = TemplateReader.Read(templateContext, "action-root", yamlObjectReader, fileId, out _);
}
var actionMapping = token.AssertMapping("action manifest root");
var actionOutputs = default(MappingToken);
var actionRunValueToken = default(TemplateToken);
foreach (var actionPair in actionMapping)
{
var propertyName = actionPair.Key.AssertString($"action.yml property key");
@@ -83,44 +106,61 @@ namespace GitHub.Runner.Worker
actionDefinition.Name = actionPair.Value.AssertString("name").Value;
break;
case "outputs":
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
actionOutputs = actionPair.Value.AssertMapping("outputs");
break;
}
Trace.Info($"Ignore action property outputs. Outputs for a whole action is not supported yet.");
break;
case "description":
actionDefinition.Description = actionPair.Value.AssertString("description").Value;
break;
case "inputs":
ConvertInputs(context, actionPair.Value, actionDefinition);
ConvertInputs(templateContext, actionPair.Value, actionDefinition);
break;
case "runs":
actionDefinition.Execution = ConvertRuns(context, actionPair.Value);
// Defer runs token evaluation to after for loop to ensure that order of outputs doesn't matter.
actionRunValueToken = actionPair.Value;
break;
default:
Trace.Info($"Ignore action property {propertyName}.");
break;
}
}
// Evaluate Runs Last
if (actionRunValueToken != null)
{
actionDefinition.Execution = ConvertRuns(executionContext, templateContext, actionRunValueToken, actionOutputs);
}
}
catch (Exception ex)
{
Trace.Error(ex);
context.Errors.Add(ex);
templateContext.Errors.Add(ex);
}
if (context.Errors.Count > 0)
if (templateContext.Errors.Count > 0)
{
foreach (var error in context.Errors)
foreach (var error in templateContext.Errors)
{
Trace.Error($"Action.yml load error: {error.Message}");
executionContext.Error(error.Message);
}
throw new ArgumentException($"Fail to load {manifestFile}");
throw new ArgumentException($"Fail to load {fileRelativePath}");
}
if (actionDefinition.Execution == null)
{
executionContext.Debug($"Loaded action.yml file: {StringUtil.ConvertToJson(actionDefinition)}");
throw new ArgumentException($"Top level 'runs:' section is required for {manifestFile}");
throw new ArgumentException($"Top level 'runs:' section is required for {fileRelativePath}");
}
else
{
@@ -130,6 +170,61 @@ namespace GitHub.Runner.Worker
return actionDefinition;
}
public void SetAllCompositeOutputs(
IExecutionContext parentExecutionContext,
DictionaryContextData actionOutputs)
{
// Each pair is structured like this
// We ignore "description" for now
// {
// "the-output-name": {
// "description": "",
// "value": "the value"
// },
// ...
// }
foreach (var pair in actionOutputs)
{
var outputsName = pair.Key;
var outputsAttributes = pair.Value as DictionaryContextData;
outputsAttributes.TryGetValue("value", out var val);
var outputsValue = val as StringContextData;
// Set output in the whole composite scope.
if (!String.IsNullOrEmpty(outputsName) && !String.IsNullOrEmpty(outputsValue))
{
parentExecutionContext.SetOutput(outputsName, outputsValue, out _);
}
}
}
public DictionaryContextData EvaluateCompositeOutputs(
IExecutionContext executionContext,
TemplateToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
{
var result = default(DictionaryContextData);
if (token != null)
{
var context = CreateContext(executionContext, extraExpressionValues);
try
{
token = TemplateEvaluator.Evaluate(context, "outputs", token, 0, null, omitHeader: true);
context.Errors.Check();
result = token.ToContextData().AssertDictionary("composite outputs");
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? new DictionaryContextData();
}
public List<string> EvaluateContainerArguments(
IExecutionContext executionContext,
SequenceToken token,
@@ -281,21 +376,20 @@ namespace GitHub.Runner.Worker
result.ExpressionFunctions.Add(item);
}
// Add the file table
if (_fileTable?.Count > 0)
// Add the file table from the Execution Context
for (var i = 0; i < executionContext.FileTable.Count; i++)
{
for (var i = 0 ; i < _fileTable.Count ; i++)
{
result.GetFileId(_fileTable[i]);
}
result.GetFileId(executionContext.FileTable[i]);
}
return result;
}
private ActionExecutionData ConvertRuns(
IExecutionContext executionContext,
TemplateContext context,
TemplateToken inputsToken)
TemplateToken inputsToken,
MappingToken outputs = null)
{
var runsMapping = inputsToken.AssertMapping("runs");
var usingToken = default(StringToken);
@@ -311,6 +405,8 @@ namespace GitHub.Runner.Worker
var postToken = default(StringToken);
var postEntrypointToken = default(StringToken);
var postIfToken = default(StringToken);
var stepsLoaded = default(List<Pipelines.ActionStep>);
foreach (var run in runsMapping)
{
var runsKey = run.Key.AssertString("runs key").Value;
@@ -355,6 +451,15 @@ namespace GitHub.Runner.Worker
case "pre-if":
preIfToken = run.Value.AssertString("pre-if");
break;
case "steps":
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
var steps = run.Value.AssertSequence("steps");
var evaluator = executionContext.ToPipelineTemplateEvaluator();
stepsLoaded = evaluator.LoadCompositeSteps(steps);
break;
}
throw new Exception("You aren't supposed to be using Composite Actions yet!");
default:
Trace.Info($"Ignore run property {runsKey}.");
break;
@@ -402,6 +507,22 @@ namespace GitHub.Runner.Worker
};
}
}
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase) && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
if (stepsLoaded == null)
{
// TODO: Add a more helpful error message + including file name, etc. to show user that it's because of their yaml file
throw new ArgumentNullException($"No steps provided.");
}
else
{
return new CompositeActionExecutionData()
{
Steps = stepsLoaded,
Outputs = outputs
};
}
}
else
{
throw new ArgumentOutOfRangeException($"'using: {usingToken.Value}' is not supported, use 'docker' or 'node12' instead.");

View File

@@ -94,6 +94,13 @@ namespace GitHub.Runner.Worker
if (handlerData.HasPost && (Stage == ActionRunStage.Pre || Stage == ActionRunStage.Main))
{
string postDisplayName = $"Post {this.DisplayName}";
if (Stage == ActionRunStage.Pre &&
this.DisplayName.StartsWith("Pre ", StringComparison.OrdinalIgnoreCase))
{
// Trim the leading `Pre ` from the display name.
// Otherwise, we will get `Post Pre xxx` as DisplayName for the Post step.
postDisplayName = $"Post {this.DisplayName.Substring("Pre ".Length)}";
}
var repositoryReference = Action.Reference as RepositoryPathReference;
var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}";
var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" :
@@ -155,6 +162,13 @@ namespace GitHub.Runner.Worker
}
var validInputs = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
if (handlerData.ExecutionType == ActionExecutionType.Container)
{
// container action always accept 'entryPoint' and 'args' as inputs
// https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstepswithargs
validInputs.Add("entryPoint");
validInputs.Add("args");
}
// Merge the default inputs from the definition
if (definition.Data?.Inputs != null)
{
@@ -173,13 +187,19 @@ namespace GitHub.Runner.Worker
// Validate inputs only for actions with action.yml
if (Action.Reference.Type == Pipelines.ActionSourceType.Repository)
{
var unexpectedInputs = new List<string>();
foreach (var input in userInputs)
{
if (!validInputs.Contains(input))
{
ExecutionContext.Warning($"Unexpected input '{input}', valid inputs are ['{string.Join("', '", validInputs)}']");
unexpectedInputs.Add(input);
}
}
if (unexpectedInputs.Count > 0)
{
ExecutionContext.Warning($"Unexpected input(s) '{string.Join("', '", unexpectedInputs)}', valid inputs are ['{string.Join("', '", validInputs)}']");
}
}
// Load the action environment.

View File

@@ -46,8 +46,7 @@ namespace GitHub.Runner.Worker
var postJobStep = new JobExtensionRunner(runAsync: this.StopContainersAsync,
condition: $"{PipelineTemplateConstants.Always}()",
displayName: "Stop containers",
data: data,
repositoryRef: null);
data: data);
executionContext.Debug($"Register post job cleanup for stopping/deleting containers.");
executionContext.RegisterPostJobStep(postJobStep);

View File

@@ -6,6 +6,7 @@ using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using System.Web;
@@ -44,6 +45,7 @@ namespace GitHub.Runner.Worker
TaskResult? CommandResult { get; set; }
CancellationToken CancellationToken { get; }
List<ServiceEndpoint> Endpoints { get; }
TaskOrchestrationPlanReference Plan { get; }
PlanFeatures Features { get; }
Variables Variables { get; }
@@ -51,7 +53,6 @@ namespace GitHub.Runner.Worker
IDictionary<String, IDictionary<String, String>> JobDefaults { get; }
Dictionary<string, VariableValue> JobOutputs { get; }
IDictionary<String, String> EnvironmentVariables { get; }
IDictionary<String, ContextScope> Scopes { get; }
IList<String> FileTable { get; }
StepsContext StepsContext { get; }
DictionaryContextData ExpressionValues { get; }
@@ -62,17 +63,19 @@ namespace GitHub.Runner.Worker
JobContext JobContext { get; }
// Only job level ExecutionContext has JobSteps
Queue<IStep> JobSteps { get; }
List<IStep> JobSteps { get; }
// Only job level ExecutionContext has PostJobSteps
Stack<IStep> PostJobSteps { get; }
bool EchoOnActionCommand { get; set; }
IExecutionContext FinalizeContext { get; set; }
// Initialize
void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token);
void CancelToken();
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null);
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null);
// logging
bool WriteDebug { get; }
@@ -104,6 +107,7 @@ namespace GitHub.Runner.Worker
// others
void ForceTaskComplete();
void RegisterPostJobStep(IStep step);
IStep RegisterNestedStep(IActionRunner step, DictionaryContextData inputsData, int location, Dictionary<string, string> envData, bool cleanUp = false);
}
public sealed class ExecutionContext : RunnerService, IExecutionContext
@@ -118,6 +122,9 @@ namespace GitHub.Runner.Worker
private event OnMatcherChanged _onMatcherChanged;
// Regex used for checking if ScopeName meets the condition that shows that its id is null.
private readonly static Regex _generatedContextNamePattern = new Regex("^__[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$", RegexOptions.Compiled | RegexOptions.CultureInvariant | RegexOptions.IgnoreCase);
private IssueMatcherConfig[] _matchers;
private IPagingLogger _logger;
@@ -141,12 +148,12 @@ namespace GitHub.Runner.Worker
public Task ForceCompleted => _forceCompleted.Task;
public CancellationToken CancellationToken => _cancellationTokenSource.Token;
public List<ServiceEndpoint> Endpoints { get; private set; }
public TaskOrchestrationPlanReference Plan { get; private set; }
public Variables Variables { get; private set; }
public Dictionary<string, string> IntraActionState { get; private set; }
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; private set; }
public Dictionary<string, VariableValue> JobOutputs { get; private set; }
public IDictionary<String, String> EnvironmentVariables { get; private set; }
public IDictionary<String, ContextScope> Scopes { get; private set; }
public IList<String> FileTable { get; private set; }
public StepsContext StepsContext { get; private set; }
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
@@ -157,7 +164,7 @@ namespace GitHub.Runner.Worker
public List<ContainerInfo> ServiceContainers { get; private set; }
// Only job level ExecutionContext has JobSteps
public Queue<IStep> JobSteps { get; private set; }
public List<IStep> JobSteps { get; private set; }
// Only job level ExecutionContext has PostJobSteps
public Stack<IStep> PostJobSteps { get; private set; }
@@ -167,6 +174,7 @@ namespace GitHub.Runner.Worker
public bool EchoOnActionCommand { get; set; }
public IExecutionContext FinalizeContext { get; set; }
public TaskResult? Result
{
@@ -260,22 +268,62 @@ namespace GitHub.Runner.Worker
return;
}
string refName = step.GetRefName();
step.ExecutionContext = Root.CreatePostChild(step.DisplayName, refName, IntraActionState);
step.ExecutionContext = Root.CreatePostChild(step.DisplayName, IntraActionState);
Root.PostJobSteps.Push(step);
}
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null)
/// <summary>
/// Helper function used in CompositeActionHandler::RunAsync to
/// add a child node, aka a step, to the current job to the Root.JobSteps based on the location.
/// </summary>
public IStep RegisterNestedStep(
IActionRunner step,
DictionaryContextData inputsData,
int location,
Dictionary<string, string> envData,
bool cleanUp = false)
{
// If the context name is empty and the scope name is empty, we would generate a unique scope name for this child in the following format:
// "__<GUID>"
var safeContextName = !string.IsNullOrEmpty(ContextName) ? ContextName : $"__{Guid.NewGuid()}";
// Set Scope Name. Note, for our design, we consider each step in a composite action to have the same scope
// This makes it much simpler to handle their outputs at the end of the Composite Action
var childScopeName = !string.IsNullOrEmpty(ScopeName) ? $"{ScopeName}.{safeContextName}" : safeContextName;
var childContextName = !string.IsNullOrEmpty(step.Action.ContextName) ? step.Action.ContextName : $"__{Guid.NewGuid()}";
step.ExecutionContext = Root.CreateChild(_record.Id, step.DisplayName, _record.Id.ToString("N"), childScopeName, childContextName, logger: _logger);
step.ExecutionContext.ExpressionValues["inputs"] = inputsData;
// Set Parent Attribute for Clean Up Step
if (cleanUp)
{
step.ExecutionContext.FinalizeContext = this;
}
// Add the composite action environment variables to each step.
#if OS_WINDOWS
var envContext = new DictionaryContextData();
#else
var envContext = new CaseSensitiveDictionaryContextData();
#endif
foreach (var pair in envData)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
step.ExecutionContext.ExpressionValues["env"] = envContext;
Root.JobSteps.Insert(location, step);
return step;
}
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null)
{
Trace.Entering();
// TODO: Null out old, non-json refNames only if a FF is set.
if (refName != null && !refName.StartsWith("{"))
{
refName = null;
}
var child = new ExecutionContext();
child.Initialize(HostContext);
child.ScopeName = scopeName;
@@ -283,6 +331,7 @@ namespace GitHub.Runner.Worker
child.Features = Features;
child.Variables = Variables;
child.Endpoints = Endpoints;
child.Plan = Plan;
if (intraActionState == null)
{
child.IntraActionState = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
@@ -293,7 +342,6 @@ namespace GitHub.Runner.Worker
}
child.EnvironmentVariables = EnvironmentVariables;
child.JobDefaults = JobDefaults;
child.Scopes = Scopes;
child.FileTable = FileTable;
child.StepsContext = StepsContext;
foreach (var pair in ExpressionValues)
@@ -320,9 +368,15 @@ namespace GitHub.Runner.Worker
{
child.InitializeTimelineRecord(_mainTimelineId, recordId, _record.Id, ExecutionContextType.Task, displayName, refName, ++_childTimelineRecordOrder);
}
child._logger = HostContext.CreateService<IPagingLogger>();
child._logger.Setup(_mainTimelineId, recordId);
if (logger != null)
{
child._logger = logger;
}
else
{
child._logger = HostContext.CreateService<IPagingLogger>();
child._logger.Setup(_mainTimelineId, recordId);
}
return child;
}
@@ -372,7 +426,11 @@ namespace GitHub.Runner.Worker
}
}
_cancellationTokenSource?.Dispose();
if (Root != this)
{
// only dispose TokenSource for step level ExecutionContext
_cancellationTokenSource?.Dispose();
}
_logger.End();
@@ -438,7 +496,8 @@ namespace GitHub.Runner.Worker
{
ArgUtil.NotNullOrEmpty(name, nameof(name));
if (String.IsNullOrEmpty(ContextName))
// if the ContextName follows the __GUID format which is set as the default value for ContextName if null for Composite Actions.
if (String.IsNullOrEmpty(ContextName) || _generatedContextNamePattern.IsMatch(ContextName))
{
reference = null;
return;
@@ -580,7 +639,8 @@ namespace GitHub.Runner.Worker
_cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(token);
// Features
// Plan
Plan = message.Plan;
Features = PlanUtil.GetFeatures(message.Plan);
// Endpoints
@@ -604,16 +664,6 @@ namespace GitHub.Runner.Worker
// Steps context (StepsRunner manages adding the scoped steps context)
StepsContext = new StepsContext();
// Scopes
Scopes = new Dictionary<String, ContextScope>(StringComparer.OrdinalIgnoreCase);
if (message.Scopes?.Count > 0)
{
foreach (var scope in message.Scopes)
{
Scopes[scope.Name] = scope;
}
}
// File table
FileTable = new List<String>(message.FileTable ?? new string[0]);
@@ -660,7 +710,7 @@ namespace GitHub.Runner.Worker
PrependPath = new List<string>();
// JobSteps for job ExecutionContext
JobSteps = new Queue<IStep>();
JobSteps = new List<IStep>();
// PostJobSteps for job ExecutionContext
PostJobSteps = new Stack<IStep>();
@@ -869,7 +919,7 @@ namespace GitHub.Runner.Worker
}
}
private IExecutionContext CreatePostChild(string displayName, string refName, Dictionary<string, string> intraActionState)
private IExecutionContext CreatePostChild(string displayName, Dictionary<string, string> intraActionState)
{
if (!_expandedForPostJob)
{
@@ -879,9 +929,7 @@ namespace GitHub.Runner.Worker
}
var newGuid = Guid.NewGuid();
// TODO: Check feature flag here, conditionally set refName to newGuid.ToString("N").
return CreateChild(newGuid, displayName, refName, null, null, intraActionState, _childTimelineRecordOrder - Root.PostJobSteps.Count);
return CreateChild(newGuid, displayName, newGuid.ToString("N"), null, null, intraActionState, _childTimelineRecordOrder - Root.PostJobSteps.Count);
}
}

View File

@@ -12,6 +12,8 @@ namespace GitHub.Runner.Worker.Expressions
{
public sealed class HashFilesFunction : Function
{
private const int _hashFileTimeoutSeconds = 120;
protected sealed override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
@@ -89,19 +91,29 @@ namespace GitHub.Runner.Worker.Expressions
}
env["patterns"] = string.Join(Environment.NewLine, patterns);
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
cancellationToken: new CancellationTokenSource(TimeSpan.FromSeconds(120)).Token).GetAwaiter().GetResult();
if (exitCode != 0)
using (var tokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(_hashFileTimeoutSeconds)))
{
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
}
try
{
int exitCode = p.ExecuteAsync(workingDirectory: githubWorkspace,
fileName: node,
arguments: $"\"{hashFilesScript.Replace("\"", "\\\"")}\"",
environment: env,
requireExitCodeZero: false,
cancellationToken: tokenSource.Token).GetAwaiter().GetResult();
return hashResult;
if (exitCode != 0)
{
throw new InvalidOperationException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') failed. Fail to hash files under directory '{githubWorkspace}'");
}
}
catch (OperationCanceledException) when (tokenSource.IsCancellationRequested)
{
throw new TimeoutException($"hashFiles('{ExpressionUtility.StringEscape(string.Join(", ", patterns))}') couldn't finish within {_hashFileTimeoutSeconds} seconds.");
}
return hashResult;
}
}
private sealed class HashFilesTrace : ITraceWriter

View File

@@ -0,0 +1,117 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Handlers
{
[ServiceLocator(Default = typeof(CompositeActionHandler))]
public interface ICompositeActionHandler : IHandler
{
CompositeActionExecutionData Data { get; set; }
}
public sealed class CompositeActionHandler : Handler, ICompositeActionHandler
{
public CompositeActionExecutionData Data { get; set; }
public Task RunAsync(ActionRunStage stage)
{
// Validate args.
Trace.Entering();
ArgUtil.NotNull(ExecutionContext, nameof(ExecutionContext));
ArgUtil.NotNull(Inputs, nameof(Inputs));
var githubContext = ExecutionContext.ExpressionValues["github"] as GitHubContext;
ArgUtil.NotNull(githubContext, nameof(githubContext));
var tempDirectory = HostContext.GetDirectory(WellKnownDirectory.Temp);
// Resolve action steps
var actionSteps = Data.Steps;
// Create Context Data to reuse for each composite action step
var inputsData = new DictionaryContextData();
foreach (var i in Inputs)
{
inputsData[i.Key] = new StringContextData(i.Value);
}
// Add each composite action step to the front of the queue
int location = 0;
foreach (Pipelines.ActionStep aStep in actionSteps)
{
// Ex:
// runs:
// using: "composite"
// steps:
// - uses: example/test-composite@v2 (a)
// - run echo hello world (b)
// - run echo hello world 2 (c)
//
// ethanchewy/test-composite/action.yaml
// runs:
// using: "composite"
// steps:
// - run echo hello world 3 (d)
// - run echo hello world 4 (e)
//
// Steps processed as follow:
// | a |
// | a | => | d |
// (Run step d)
// | a |
// | a | => | e |
// (Run step e)
// | a |
// (Run step a)
// | b |
// (Run step b)
// | c |
// (Run step c)
// Done.
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = aStep;
actionRunner.Stage = stage;
actionRunner.Condition = aStep.Condition;
var step = ExecutionContext.RegisterNestedStep(actionRunner, inputsData, location, Environment);
InitializeScope(step);
location++;
}
// Create a step that handles all the composite action steps' outputs
Pipelines.ActionStep cleanOutputsStep = new Pipelines.ActionStep();
cleanOutputsStep.ContextName = ExecutionContext.ContextName;
// Use the same reference type as our composite steps.
cleanOutputsStep.Reference = Action;
var actionRunner2 = HostContext.CreateService<IActionRunner>();
actionRunner2.Action = cleanOutputsStep;
actionRunner2.Stage = ActionRunStage.Main;
actionRunner2.Condition = "always()";
ExecutionContext.RegisterNestedStep(actionRunner2, inputsData, location, Environment, true);
return Task.CompletedTask;
}
private void InitializeScope(IStep step)
{
var stepsContext = step.ExecutionContext.StepsContext;
var scopeName = step.ExecutionContext.ScopeName;
step.ExecutionContext.ExpressionValues["steps"] = stepsContext.GetScope(scopeName);
}
}
}

View File

@@ -0,0 +1,53 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Schema;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker.Handlers
{
[ServiceLocator(Default = typeof(CompositeActionOutputHandler))]
public interface ICompositeActionOutputHandler : IHandler
{
CompositeActionExecutionData Data { get; set; }
}
public sealed class CompositeActionOutputHandler : Handler, ICompositeActionOutputHandler
{
public CompositeActionExecutionData Data { get; set; }
public Task RunAsync(ActionRunStage stage)
{
// Evaluate the mapped outputs value
if (Data.Outputs != null)
{
// Evaluate the outputs in the steps context to easily retrieve the values
var actionManifestManager = HostContext.GetService<IActionManifestManager>();
// Format ExpressionValues to Dictionary<string, PipelineContextData>
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
foreach (var pair in ExecutionContext.ExpressionValues)
{
evaluateContext[pair.Key] = pair.Value;
}
// Get the evluated composite outputs' values mapped to the outputs named
DictionaryContextData actionOutputs = actionManifestManager.EvaluateCompositeOutputs(ExecutionContext, Data.Outputs, evaluateContext);
// Set the outputs for the outputs object in the whole composite action
actionManifestManager.SetAllCompositeOutputs(ExecutionContext.FinalizeContext, actionOutputs);
}
return Task.CompletedTask;
}
}
}

View File

@@ -66,6 +66,19 @@ namespace GitHub.Runner.Worker.Handlers
handler = HostContext.CreateService<IRunnerPluginHandler>();
(handler as IRunnerPluginHandler).Data = data as PluginActionExecutionData;
}
else if (data.ExecutionType == ActionExecutionType.Composite)
{
if (executionContext.FinalizeContext == null)
{
handler = HostContext.CreateService<ICompositeActionHandler>();
(handler as ICompositeActionHandler).Data = data as CompositeActionExecutionData;
}
else
{
handler = HostContext.CreateService<ICompositeActionOutputHandler>();
(handler as ICompositeActionOutputHandler).Data = data as CompositeActionExecutionData;
}
}
else
{
// This should never happen.

View File

@@ -352,15 +352,24 @@ namespace GitHub.Runner.Worker.Handlers
if (File.Exists(gitConfigPath))
{
// Check if the config contains the workflow repository url
var qualifiedRepository = _executionContext.GetGitHubContext("repository");
var configMatch = $"url = https://github.com/{qualifiedRepository}";
var serverUrl = _executionContext.GetGitHubContext("server_url");
serverUrl = !string.IsNullOrEmpty(serverUrl) ? serverUrl : "https://github.com";
var host = new Uri(serverUrl, UriKind.Absolute).Host;
var nameWithOwner = _executionContext.GetGitHubContext("repository");
var patterns = new[] {
$"url = {serverUrl}/{nameWithOwner}",
$"url = git@{host}:{nameWithOwner}.git",
};
var content = File.ReadAllText(gitConfigPath);
foreach (var line in content.Split("\n").Select(x => x.Trim()))
{
if (String.Equals(line, configMatch, StringComparison.OrdinalIgnoreCase))
foreach (var pattern in patterns)
{
repositoryPath = directoryPath;
break;
if (String.Equals(line, pattern, StringComparison.OrdinalIgnoreCase))
{
repositoryPath = directoryPath;
break;
}
}
}
}

View File

@@ -1,31 +0,0 @@
using Newtonsoft.Json;
namespace GitHub.Runner.Worker
{
public static class IStepExtensions
{
public static string GetRefName(this IStep step, string defaultRefName = null)
{
// TODO: Really check a feature flag.
if (s_featureFlagEnabled)
{
if (step is JobExtensionRunner extensionRunner && extensionRunner.RepositoryRef != null)
{
return JsonConvert.SerializeObject(extensionRunner.RepositoryRef);
}
if (step is IActionRunner actionRunner && actionRunner.Action?.Reference != null)
{
return JsonConvert.SerializeObject(actionRunner.Action.Reference);
}
// RefName should always be valid json or null.
return null;
}
return defaultRefName;
}
private static bool s_featureFlagEnabled = true;
}
}

View File

@@ -64,6 +64,20 @@ namespace GitHub.Runner.Worker
context.Debug($"Starting: Set up job");
context.Output($"Current runner version: '{BuildConstants.RunnerPackage.Version}'");
var setting = HostContext.GetService<IConfigurationStore>().GetSettings();
var credFile = HostContext.GetConfigFile(WellKnownConfigFile.Credentials);
if (File.Exists(credFile))
{
var credData = IOUtil.LoadObject<CredentialData>(credFile);
if (credData != null &&
credData.Data.TryGetValue("clientId", out var clientId))
{
// print out HostName for self-hosted runner
context.Output($"Runner name: '{setting.AgentName}'");
context.Output($"Machine name: '{Environment.MachineName}'");
}
}
var setupInfoFile = HostContext.GetConfigFile(WellKnownConfigFile.SetupInfo);
if (File.Exists(setupInfoFile))
{
@@ -213,10 +227,9 @@ namespace GitHub.Runner.Worker
containers.AddRange(jobContext.ServiceContainers);
preJobSteps.Add(new JobExtensionRunner(runAsync: containerProvider.StartContainersAsync,
condition: $"{PipelineTemplateConstants.Success}()",
displayName: "Initialize containers",
data: (object)containers,
repositoryRef: null));
condition: $"{PipelineTemplateConstants.Success}()",
displayName: "Initialize containers",
data: (object)containers));
}
// Add action steps
@@ -261,19 +274,18 @@ namespace GitHub.Runner.Worker
// Create execution context for pre-job steps
foreach (var step in preJobSteps)
{
if (step is JobExtensionRunner extensionStep)
if (step is JobExtensionRunner)
{
ArgUtil.NotNull(extensionStep, step.DisplayName);
JobExtensionRunner extensionStep = step as JobExtensionRunner;
ArgUtil.NotNull(extensionStep, extensionStep.DisplayName);
Guid stepId = Guid.NewGuid();
var refName = step.GetRefName(defaultRefName: null);
extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, refName, null, stepId.ToString("N"));
extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, null, null, stepId.ToString("N"));
}
else if (step is IActionRunner actionStep)
{
ArgUtil.NotNull(actionStep, step.DisplayName);
Guid stepId = Guid.NewGuid();
var refName = step.GetRefName(defaultRefName: stepId.ToString("N"));
actionStep.ExecutionContext = jobContext.CreateChild(stepId, actionStep.DisplayName, refName, null, null, intraActionStates[actionStep.Action.Id]);
actionStep.ExecutionContext = jobContext.CreateChild(stepId, actionStep.DisplayName, stepId.ToString("N"), null, null, intraActionStates[actionStep.Action.Id]);
}
}
@@ -284,8 +296,7 @@ namespace GitHub.Runner.Worker
{
ArgUtil.NotNull(actionStep, step.DisplayName);
intraActionStates.TryGetValue(actionStep.Action.Id, out var intraActionState);
var refName = step.GetRefName(defaultRefName: actionStep.Action.Name);
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, refName, actionStep.Action.ScopeName, actionStep.Action.ContextName, intraActionState);
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, actionStep.Action.ScopeName, actionStep.Action.ContextName, intraActionState);
}
}

View File

@@ -1,7 +1,7 @@
using System;
using System.Threading.Tasks;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
@@ -9,26 +9,22 @@ namespace GitHub.Runner.Worker
{
private readonly object _data;
private readonly Func<IExecutionContext, object, Task> _runAsync;
private readonly RepositoryPathReference _repositoryRef;
public JobExtensionRunner(
Func<IExecutionContext, object, Task> runAsync,
string condition,
string displayName,
object data,
RepositoryPathReference repositoryRef)
object data)
{
_runAsync = runAsync;
Condition = condition;
DisplayName = displayName;
_data = data;
_repositoryRef = repositoryRef;
}
public string Condition { get; set; }
public TemplateToken ContinueOnError => new BooleanToken(null, null, null, false);
public string DisplayName { get; set; }
public RepositoryPathReference RepositoryRef => _repositoryRef;
public IExecutionContext ExecutionContext { get; set; }
public TemplateToken Timeout => new NumberToken(null, null, null, 0);
public object Data => _data;

View File

@@ -5,21 +5,13 @@ using GitHub.Services.Common;
using GitHub.Services.WebApi;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Net.Http;
using System.Text;
using System.IO.Compression;
using System.Diagnostics;
using Newtonsoft.Json.Linq;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.ObjectTemplating;
namespace GitHub.Runner.Worker
{
@@ -122,13 +114,6 @@ namespace GitHub.Runner.Worker
_tempDirectoryManager = HostContext.GetService<ITempDirectoryManager>();
_tempDirectoryManager.InitializeTempDirectory(jobContext);
// // Expand container properties
// jobContext.Container?.ExpandProperties(jobContext.Variables);
// foreach (var sidecar in jobContext.SidecarContainers)
// {
// sidecar.ExpandProperties(jobContext.Variables);
// }
// Get the job extension.
Trace.Info("Getting job extension.");
IJobExtension jobExtension = HostContext.CreateService<IJobExtension>();
@@ -167,7 +152,7 @@ namespace GitHub.Runner.Worker
{
foreach (var step in jobSteps)
{
jobContext.JobSteps.Enqueue(step);
jobContext.JobSteps.Add(step);
}
await stepsRunner.RunAsync(jobContext);
@@ -254,6 +239,12 @@ namespace GitHub.Runner.Worker
Trace.Error(ex);
return TaskResult.Failed;
}
catch (TaskOrchestrationPlanTerminatedException ex)
{
Trace.Error($"TaskOrchestrationPlanTerminatedException received, while attempting to raise JobCompletedEvent for job {message.JobId}.");
Trace.Error(ex);
return TaskResult.Failed;
}
catch (Exception ex)
{
Trace.Error($"Catch exception while attempting to raise JobCompletedEvent for job {message.JobId}, job request {message.RequestId}.");

View File

@@ -59,14 +59,14 @@ namespace GitHub.Runner.Worker
checkPostJobActions = true;
while (jobContext.PostJobSteps.TryPop(out var postStep))
{
jobContext.JobSteps.Enqueue(postStep);
jobContext.JobSteps.Add(postStep);
}
continue;
}
var step = jobContext.JobSteps.Dequeue();
var nextStep = jobContext.JobSteps.Count > 0 ? jobContext.JobSteps.Peek() : null;
var step = jobContext.JobSteps[0];
jobContext.JobSteps.RemoveAt(0);
Trace.Info($"Processing step: DisplayName='{step.DisplayName}'");
ArgUtil.NotNull(step.ExecutionContext, nameof(step.ExecutionContext));
@@ -82,27 +82,46 @@ namespace GitHub.Runner.Worker
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>(PipelineTemplateConstants.HashFiles, 1, byte.MaxValue));
// Initialize scope
if (InitializeScope(step, scopeInputs))
{
// Populate env context for each step
Trace.Info("Initialize Env context for step");
step.ExecutionContext.ExpressionValues["steps"] = step.ExecutionContext.StepsContext.GetScope(step.ExecutionContext.ScopeName);
// Populate env context for each step
Trace.Info("Initialize Env context for step");
#if OS_WINDOWS
var envContext = new DictionaryContextData();
var envContext = new DictionaryContextData();
#else
var envContext = new CaseSensitiveDictionaryContextData();
var envContext = new CaseSensitiveDictionaryContextData();
#endif
step.ExecutionContext.ExpressionValues["env"] = envContext;
foreach (var pair in step.ExecutionContext.EnvironmentVariables)
// Global env
foreach (var pair in step.ExecutionContext.EnvironmentVariables)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
// Stomps over with outside step env
if (step.ExecutionContext.ExpressionValues.TryGetValue("env", out var envContextData))
{
#if OS_WINDOWS
var dict = envContextData as DictionaryContextData;
#else
var dict = envContextData as CaseSensitiveDictionaryContextData;
#endif
foreach (var pair in dict)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
envContext[pair.Key] = pair.Value;
}
}
if (step is IActionRunner actionStep)
step.ExecutionContext.ExpressionValues["env"] = envContext;
bool evaluateStepEnvFailed = false;
if (step is IActionRunner actionStep)
{
// Set GITHUB_ACTION
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
try
{
// Set GITHUB_ACTION
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
// Evaluate and merge action's env block to env context
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
@@ -111,7 +130,18 @@ namespace GitHub.Runner.Worker
envContext[env.Key] = new StringContextData(env.Value ?? string.Empty);
}
}
catch (Exception ex)
{
// fail the step since there is an evaluate error.
Trace.Info("Caught exception from expression for step.env");
evaluateStepEnvFailed = true;
step.ExecutionContext.Error(ex);
CompleteStep(step, TaskResult.Failed);
}
}
if (!evaluateStepEnvFailed)
{
try
{
// Register job cancellation call back only if job cancellation token not been fire before each step run
@@ -195,19 +225,19 @@ namespace GitHub.Runner.Worker
{
// Condition == false
Trace.Info("Skipping step due to condition evaluation.");
CompleteStep(step, nextStep, TaskResult.Skipped, resultCode: conditionTraceWriter.Trace);
CompleteStep(step, TaskResult.Skipped, resultCode: conditionTraceWriter.Trace);
}
else if (conditionEvaluateError != null)
{
// fail the step since there is an evaluate error.
step.ExecutionContext.Error(conditionEvaluateError);
CompleteStep(step, nextStep, TaskResult.Failed);
CompleteStep(step, TaskResult.Failed);
}
else
{
// Run the step.
await RunStepAsync(step, jobContext.CancellationToken);
CompleteStep(step, nextStep);
CompleteStep(step);
}
}
finally
@@ -369,117 +399,9 @@ namespace GitHub.Runner.Worker
step.ExecutionContext.Debug($"Finishing: {step.DisplayName}");
}
private bool InitializeScope(IStep step, Dictionary<string, PipelineContextData> scopeInputs)
private void CompleteStep(IStep step, TaskResult? result = null, string resultCode = null)
{
var executionContext = step.ExecutionContext;
var stepsContext = executionContext.StepsContext;
if (!string.IsNullOrEmpty(executionContext.ScopeName))
{
// Gather uninitialized current and ancestor scopes
var scope = executionContext.Scopes[executionContext.ScopeName];
var scopesToInitialize = default(Stack<ContextScope>);
while (scope != null && !scopeInputs.ContainsKey(scope.Name))
{
if (scopesToInitialize == null)
{
scopesToInitialize = new Stack<ContextScope>();
}
scopesToInitialize.Push(scope);
scope = string.IsNullOrEmpty(scope.ParentName) ? null : executionContext.Scopes[scope.ParentName];
}
// Initialize current and ancestor scopes
while (scopesToInitialize?.Count > 0)
{
scope = scopesToInitialize.Pop();
executionContext.Debug($"Initializing scope '{scope.Name}'");
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.ParentName);
executionContext.ExpressionValues["inputs"] = !String.IsNullOrEmpty(scope.ParentName) ? scopeInputs[scope.ParentName] : null;
var templateEvaluator = executionContext.ToPipelineTemplateEvaluator();
var inputs = default(DictionaryContextData);
try
{
inputs = templateEvaluator.EvaluateStepScopeInputs(scope.Inputs, executionContext.ExpressionValues, executionContext.ExpressionFunctions);
}
catch (Exception ex)
{
Trace.Info($"Caught exception from initialize scope '{scope.Name}'");
Trace.Error(ex);
executionContext.Error(ex);
executionContext.Complete(TaskResult.Failed);
return false;
}
scopeInputs[scope.Name] = inputs;
}
}
// Setup expression values
var scopeName = executionContext.ScopeName;
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scopeName);
executionContext.ExpressionValues["inputs"] = string.IsNullOrEmpty(scopeName) ? null : scopeInputs[scopeName];
return true;
}
private void CompleteStep(IStep step, IStep nextStep, TaskResult? result = null, string resultCode = null)
{
var executionContext = step.ExecutionContext;
if (!string.IsNullOrEmpty(executionContext.ScopeName))
{
// Gather current and ancestor scopes to finalize
var scope = executionContext.Scopes[executionContext.ScopeName];
var scopesToFinalize = default(Queue<ContextScope>);
var nextStepScopeName = nextStep?.ExecutionContext.ScopeName;
while (scope != null &&
!string.Equals(nextStepScopeName, scope.Name, StringComparison.OrdinalIgnoreCase) &&
!(nextStepScopeName ?? string.Empty).StartsWith($"{scope.Name}.", StringComparison.OrdinalIgnoreCase))
{
if (scopesToFinalize == null)
{
scopesToFinalize = new Queue<ContextScope>();
}
scopesToFinalize.Enqueue(scope);
scope = string.IsNullOrEmpty(scope.ParentName) ? null : executionContext.Scopes[scope.ParentName];
}
// Finalize current and ancestor scopes
var stepsContext = step.ExecutionContext.StepsContext;
while (scopesToFinalize?.Count > 0)
{
scope = scopesToFinalize.Dequeue();
executionContext.Debug($"Finalizing scope '{scope.Name}'");
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.Name);
executionContext.ExpressionValues["inputs"] = null;
var templateEvaluator = executionContext.ToPipelineTemplateEvaluator();
var outputs = default(DictionaryContextData);
try
{
outputs = templateEvaluator.EvaluateStepScopeOutputs(scope.Outputs, executionContext.ExpressionValues, executionContext.ExpressionFunctions);
}
catch (Exception ex)
{
Trace.Info($"Caught exception from finalize scope '{scope.Name}'");
Trace.Error(ex);
executionContext.Error(ex);
executionContext.Complete(TaskResult.Failed);
return;
}
if (outputs?.Count > 0)
{
var parentScopeName = scope.ParentName;
var contextName = scope.ContextName;
foreach (var pair in outputs)
{
var outputName = pair.Key;
var outputValue = pair.Value.ToString();
stepsContext.SetOutput(parentScopeName, contextName, outputName, outputValue, out var reference);
executionContext.Debug($"{reference}='{outputValue}'");
}
}
}
}
executionContext.Complete(result, resultCode: resultCode);
}

View File

@@ -7,7 +7,8 @@
"name": "string",
"description": "string",
"inputs": "inputs",
"runs": "runs"
"runs": "runs",
"outputs": "outputs"
},
"loose-key-type": "non-empty-string",
"loose-value-type": "any"
@@ -28,11 +29,26 @@
"loose-value-type": "any"
}
},
"outputs": {
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "outputs-attributes"
}
},
"outputs-attributes": {
"mapping": {
"properties": {
"description": "string",
"value": "output-value"
}
}
},
"runs": {
"one-of": [
"container-runs",
"node12-runs",
"plugin-runs"
"plugin-runs",
"composite-runs"
]
},
"container-runs": {
@@ -83,12 +99,49 @@
}
}
},
"composite-runs": {
"mapping": {
"properties": {
"using": "non-empty-string",
"steps": "composite-steps"
}
}
},
"composite-steps": {
"context": [
"github",
"strategy",
"matrix",
"steps",
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"sequence": {
"item-type": "any"
}
},
"container-runs-context": {
"context": [
"inputs"
],
"string": {}
},
"output-value": {
"context": [
"github",
"strategy",
"matrix",
"steps",
"inputs",
"job",
"runner",
"env"
],
"string": {}
},
"input-default-context": {
"context": [
"github",

View File

@@ -317,5 +317,37 @@ namespace GitHub.DistributedTask.WebApi
userState: userState,
cancellationToken: cancellationToken);
}
/// <summary>
/// [Preview API] Resolves information required to download actions (URL, token) defined in an orchestration.
/// </summary>
/// <param name="scopeIdentifier">The project GUID to scope the request</param>
/// <param name="hubName">The name of the server hub: "build" for the Build server or "rm" for the Release Management server</param>
/// <param name="planId"></param>
/// <param name="actionReferenceList"></param>
/// <param name="userState"></param>
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
public virtual Task<ActionDownloadInfoCollection> ResolveActionDownloadInfoAsync(
Guid scopeIdentifier,
string hubName,
Guid planId,
ActionReferenceList actionReferenceList,
object userState = null,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
Guid locationId = new Guid("27d7f831-88c1-4719-8ca1-6a061dad90eb");
object routeValues = new { scopeIdentifier = scopeIdentifier, hubName = hubName, planId = planId };
HttpContent content = new ObjectContent<ActionReferenceList>(actionReferenceList, new VssJsonMediaTypeFormatter(true));
return SendAsync<ActionDownloadInfoCollection>(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1),
userState: userState,
cancellationToken: cancellationToken,
content: content);
}
}
}

View File

@@ -22,7 +22,7 @@ namespace GitHub.DistributedTask.Pipelines
[DataContract]
[KnownType(typeof(ContainerRegistryReference))]
[KnownType(typeof(RepositoryPathReference))]
[KnownType(typeof(ScriptReference))]
[KnownType(typeof(ScriptReference))]
[JsonConverter(typeof(ActionStepDefinitionReferenceConverter))]
[EditorBrowsable(EditorBrowsableState.Never)]
public abstract class ActionStepDefinitionReference

View File

@@ -42,7 +42,12 @@ namespace GitHub.DistributedTask.Pipelines.ContextData
var floored = Math.Floor(m_value);
if (m_value == floored && m_value <= (Double)Int32.MaxValue && m_value >= (Double)Int32.MinValue)
{
Int32 flooredInt = (Int32)floored;
var flooredInt = (Int32)floored;
return (JToken)flooredInt;
}
else if (m_value == floored && m_value <= (Double)Int64.MaxValue && m_value >= (Double)Int64.MinValue)
{
var flooredInt = (Int64)floored;
return (JToken)flooredInt;
}
else

View File

@@ -65,6 +65,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String StepEnv = "step-env";
public const String StepIfResult = "step-if-result";
public const String Steps = "steps";
public const String StepsInTemplate = "steps-in-template";
public const String StepsScopeInputs = "steps-scope-inputs";
public const String StepsScopeOutputs = "steps-scope-outputs";
public const String StepsTemplateRoot = "steps-template-root";

View File

@@ -29,7 +29,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
var evaluationResult = EvaluationResult.CreateIntermediateResult(null, ifResult);
return evaluationResult.IsTruthy;
}
internal static Boolean? ConvertToStepContinueOnError(
TemplateContext context,
TemplateToken token,
@@ -264,5 +263,351 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
//Note: originally was List<Step> but we need to change to List<ActionStep> to use the "Inputs" attribute
internal static List<ActionStep> ConvertToSteps(
TemplateContext context,
TemplateToken steps)
{
var stepsSequence = steps.AssertSequence($"job {PipelineTemplateConstants.Steps}");
var result = new List<ActionStep>();
foreach (var stepsItem in stepsSequence)
{
var step = ConvertToStep(context, stepsItem);
if (step != null) // step = null means we are hitting error during step conversion, there should be an error in context.errors
{
if (step.Enabled)
{
result.Add(step);
}
}
}
return result;
}
private static ActionStep ConvertToStep(
TemplateContext context,
TemplateToken stepsItem)
{
var step = stepsItem.AssertMapping($"{PipelineTemplateConstants.Steps} item");
var continueOnError = default(ScalarToken);
var env = default(TemplateToken);
var id = default(StringToken);
var ifCondition = default(String);
var ifToken = default(ScalarToken);
var name = default(ScalarToken);
var run = default(ScalarToken);
var scope = default(StringToken);
var timeoutMinutes = default(ScalarToken);
var uses = default(StringToken);
var with = default(TemplateToken);
var workingDir = default(ScalarToken);
var path = default(ScalarToken);
var clean = default(ScalarToken);
var fetchDepth = default(ScalarToken);
var lfs = default(ScalarToken);
var submodules = default(ScalarToken);
var shell = default(ScalarToken);
foreach (var stepProperty in step)
{
var propertyName = stepProperty.Key.AssertString($"{PipelineTemplateConstants.Steps} item key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Clean:
clean = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Clean}");
break;
case PipelineTemplateConstants.ContinueOnError:
ConvertToStepContinueOnError(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
continueOnError = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} {PipelineTemplateConstants.ContinueOnError}");
break;
case PipelineTemplateConstants.Env:
ConvertToStepEnvironment(context, stepProperty.Value, StringComparer.Ordinal, allowExpressions: true); // Validate early if possible
env = stepProperty.Value;
break;
case PipelineTemplateConstants.FetchDepth:
fetchDepth = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.FetchDepth}");
break;
case PipelineTemplateConstants.Id:
id = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Id}");
if (!NameValidation.IsValid(id.Value, true))
{
context.Error(id, $"Step id {id.Value} is invalid. Ids must start with a letter or '_' and contain only alphanumeric characters, '-', or '_'");
}
break;
case PipelineTemplateConstants.If:
ifToken = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.If}");
break;
case PipelineTemplateConstants.Lfs:
lfs = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Lfs}");
break;
case PipelineTemplateConstants.Name:
name = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Name}");
break;
case PipelineTemplateConstants.Path:
path = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Path}");
break;
case PipelineTemplateConstants.Run:
run = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Run}");
break;
case PipelineTemplateConstants.Shell:
shell = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Shell}");
break;
case PipelineTemplateConstants.Scope:
scope = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Scope}");
break;
case PipelineTemplateConstants.Submodules:
submodules = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Submodules}");
break;
case PipelineTemplateConstants.TimeoutMinutes:
ConvertToStepTimeout(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
timeoutMinutes = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.TimeoutMinutes}");
break;
case PipelineTemplateConstants.Uses:
uses = stepProperty.Value.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}");
break;
case PipelineTemplateConstants.With:
ConvertToStepInputs(context, stepProperty.Value, allowExpressions: true); // Validate early if possible
with = stepProperty.Value;
break;
case PipelineTemplateConstants.WorkingDirectory:
workingDir = stepProperty.Value.AssertScalar($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.WorkingDirectory}");
break;
default:
propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Steps} item key"); // throws
break;
}
}
// Fixup the if-condition
var isDefaultScope = String.IsNullOrEmpty(scope?.Value);
ifCondition = ConvertToIfCondition(context, ifToken, false, isDefaultScope);
if (run != null)
{
var result = new ActionStep
{
ScopeName = scope?.Value,
ContextName = id?.Value,
ContinueOnError = continueOnError,
DisplayNameToken = name,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes,
Environment = env,
Reference = new ScriptReference(),
};
var inputs = new MappingToken(null, null, null);
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Script), run);
if (workingDir != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.WorkingDirectory), workingDir);
}
if (shell != null)
{
inputs.Add(new StringToken(null, null, null, PipelineConstants.ScriptStepInputs.Shell), shell);
}
result.Inputs = inputs;
return result;
}
else
{
uses.AssertString($"{PipelineTemplateConstants.Steps} item {PipelineTemplateConstants.Uses}");
var result = new ActionStep
{
ScopeName = scope?.Value,
ContextName = id?.Value,
ContinueOnError = continueOnError,
DisplayNameToken = name,
Condition = ifCondition,
TimeoutInMinutes = timeoutMinutes,
Inputs = with,
Environment = env,
};
if (uses.Value.StartsWith("docker://", StringComparison.Ordinal))
{
var image = uses.Value.Substring("docker://".Length);
result.Reference = new ContainerRegistryReference { Image = image };
}
else if (uses.Value.StartsWith("./") || uses.Value.StartsWith(".\\"))
{
result.Reference = new RepositoryPathReference
{
RepositoryType = PipelineConstants.SelfAlias,
Path = uses.Value
};
}
else
{
var usesSegments = uses.Value.Split('@');
var pathSegments = usesSegments[0].Split(new[] { '/', '\\' }, StringSplitOptions.RemoveEmptyEntries);
var gitRef = usesSegments.Length == 2 ? usesSegments[1] : String.Empty;
if (usesSegments.Length != 2 ||
pathSegments.Length < 2 ||
String.IsNullOrEmpty(pathSegments[0]) ||
String.IsNullOrEmpty(pathSegments[1]) ||
String.IsNullOrEmpty(gitRef))
{
// todo: loc
context.Error(uses, $"Expected format {{org}}/{{repo}}[/path]@ref. Actual '{uses.Value}'");
}
else
{
var repositoryName = $"{pathSegments[0]}/{pathSegments[1]}";
var directoryPath = pathSegments.Length > 2 ? String.Join("/", pathSegments.Skip(2)) : String.Empty;
result.Reference = new RepositoryPathReference
{
RepositoryType = RepositoryTypes.GitHub,
Name = repositoryName,
Ref = gitRef,
Path = directoryPath,
};
}
}
return result;
}
}
/// <summary>
/// When empty, default to "success()".
/// When a status function is not referenced, format as "success() &amp;&amp; &lt;CONDITION&gt;".
/// </summary>
private static String ConvertToIfCondition(
TemplateContext context,
TemplateToken token,
Boolean isJob,
Boolean isDefaultScope)
{
String condition;
if (token is null)
{
condition = null;
}
else if (token is BasicExpressionToken expressionToken)
{
condition = expressionToken.Expression;
}
else
{
var stringToken = token.AssertString($"{(isJob ? "job" : "step")} {PipelineTemplateConstants.If}");
condition = stringToken.Value;
}
if (String.IsNullOrWhiteSpace(condition))
{
return $"{PipelineTemplateConstants.Success}()";
}
var expressionParser = new ExpressionParser();
var functions = default(IFunctionInfo[]);
var namedValues = default(INamedValueInfo[]);
if (isJob)
{
namedValues = s_jobIfNamedValues;
// TODO: refactor into seperate functions
// functions = PhaseCondition.FunctionInfo;
}
else
{
namedValues = isDefaultScope ? s_stepNamedValues : s_stepInTemplateNamedValues;
functions = s_stepConditionFunctions;
}
var node = default(ExpressionNode);
try
{
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
}
catch (Exception ex)
{
context.Error(token, ex);
return null;
}
if (node == null)
{
return $"{PipelineTemplateConstants.Success}()";
}
var hasStatusFunction = node.Traverse().Any(x =>
{
if (x is Function function)
{
return String.Equals(function.Name, PipelineTemplateConstants.Always, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Cancelled, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Failure, StringComparison.OrdinalIgnoreCase) ||
String.Equals(function.Name, PipelineTemplateConstants.Success, StringComparison.OrdinalIgnoreCase);
}
return false;
});
return hasStatusFunction ? condition : $"{PipelineTemplateConstants.Success}() && ({condition})";
}
private static readonly INamedValueInfo[] s_jobIfNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly INamedValueInfo[] s_stepNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly INamedValueInfo[] s_stepInTemplateNamedValues = new INamedValueInfo[]
{
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Strategy),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Matrix),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Steps),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Inputs),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.GitHub),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Job),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Runner),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Env),
new NamedValueInfo<NoOperationNamedValue>(PipelineTemplateConstants.Needs),
};
private static readonly IFunctionInfo[] s_stepConditionFunctions = new IFunctionInfo[]
{
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Always, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Cancelled, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Failure, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.Success, 0, 0),
new FunctionInfo<NoOperation>(PipelineTemplateConstants.HashFiles, 1, Byte.MaxValue),
};
}
}

View File

@@ -159,6 +159,31 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
public List<ActionStep> LoadCompositeSteps(
TemplateToken token)
{
var result = default(List<ActionStep>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(null, null, setMissingContext: false);
// TODO: we might want to to have a bool to prevent it from filling in with missing context w/ dummy variables
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsInTemplate, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToSteps(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public Dictionary<String, String> EvaluateStepEnvironment(
TemplateToken token,
DictionaryContextData contextData,
@@ -400,7 +425,8 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
private TemplateContext CreateContext(
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState = null)
IEnumerable<KeyValuePair<String, Object>> expressionState = null,
bool setMissingContext = true)
{
var result = new TemplateContext
{
@@ -449,18 +475,21 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
// - Evaluating early when all referenced contexts are available, even though all allowed
// contexts may not yet be available. For example, evaluating step display name can often
// be performed early.
foreach (var name in s_expressionValueNames)
if (setMissingContext)
{
if (!result.ExpressionValues.ContainsKey(name))
foreach (var name in s_expressionValueNames)
{
result.ExpressionValues[name] = null;
if (!result.ExpressionValues.ContainsKey(name))
{
result.ExpressionValues[name] = null;
}
}
}
foreach (var name in s_expressionFunctionNames)
{
if (!functionNames.Contains(name))
foreach (var name in s_expressionFunctionNames)
{
result.ExpressionFunctions.Add(new FunctionInfo<NoOperation>(name, 0, Int32.MaxValue));
if (!functionNames.Contains(name))
{
result.ExpressionFunctions.Add(new FunctionInfo<NoOperation>(name, 0, Int32.MaxValue));
}
}
}

View File

@@ -94,5 +94,12 @@ namespace GitHub.DistributedTask.Pipelines
public static readonly String Resources = "resources";
public static readonly String All = "all";
}
public static class ScriptStepInputs
{
public static readonly String Script = "script";
public static readonly String WorkingDirectory = "workingDirectory";
public static readonly String Shell = "shell";
}
}
}

View File

@@ -0,0 +1,40 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionDownloadInfo
{
[DataMember(EmitDefaultValue = false)]
public ActionDownloadAuthentication Authentication { get; set; }
[DataMember(EmitDefaultValue = false)]
public string NameWithOwner { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ResolvedNameWithOwner { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ResolvedSha { get; set; }
[DataMember(EmitDefaultValue = false)]
public string TarballUrl { get; set; }
[DataMember(EmitDefaultValue = false)]
public string Ref { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ZipballUrl { get; set; }
}
[DataContract]
public class ActionDownloadAuthentication
{
[DataMember(EmitDefaultValue = false)]
public DateTime ExpiresAt { get; set; }
[DataMember(EmitDefaultValue = false)]
public string Token { get; set; }
}
}

View File

@@ -0,0 +1,16 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionDownloadInfoCollection
{
[DataMember]
public IDictionary<string, ActionDownloadInfo> Actions
{
get;
set;
}
}
}

View File

@@ -0,0 +1,22 @@
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionReference
{
[DataMember]
public string NameWithOwner
{
get;
set;
}
[DataMember]
public string Ref
{
get;
set;
}
}
}

View File

@@ -0,0 +1,16 @@
using System.Collections.Generic;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class ActionReferenceList
{
[DataMember]
public IList<ActionReference> Actions
{
get;
set;
}
}
}

View File

@@ -39,7 +39,7 @@ namespace GitHub.Services.OAuth
/// <summary>
/// Gets or sets the error description for the response.
/// </summary>
[DataMember(Name = "errordescription", EmitDefaultValue = false)]
[DataMember(Name = "error_description", EmitDefaultValue = false)]
public String ErrorDescription
{
get;

File diff suppressed because it is too large Load Diff

View File

@@ -70,5 +70,24 @@ namespace GitHub.Runner.Common.Tests.Util
}
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Common")]
public void WhichHandleFullyQualifiedPath()
{
using (TestHostContext hc = new TestHostContext(this))
{
//Arrange
Tracing trace = hc.GetTrace();
// Act.
var gitPath = WhichUtil.Which("git", require: true, trace: trace);
var gitPath2 = WhichUtil.Which(gitPath, require: true, trace: trace);
// Assert.
Assert.Equal(gitPath, gitPath2);
}
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -759,6 +759,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Setup(x => x.Variables).Returns(new Variables(_hc, new Dictionary<string, VariableValue>()));
_ec.Setup(x => x.ExpressionValues).Returns(new DictionaryContextData());
_ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
_ec.Setup(x => x.FileTable).Returns(new List<String>());
_ec.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>())).Callback((string tag, string message) => { _hc.GetTrace().Info($"{tag}{message}"); });
_ec.Setup(x => x.AddIssue(It.IsAny<Issue>(), It.IsAny<string>())).Callback((Issue issue, string message) => { _hc.GetTrace().Info($"[{issue.Type}]{issue.Message ?? message}"); });
}

View File

@@ -328,8 +328,7 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal("invalid1", finialInputs["invalid1"]);
Assert.Equal("invalid2", finialInputs["invalid2"]);
_ec.Verify(x => x.AddIssue(It.Is<Issue>(s => s.Message.Contains("Unexpected input 'invalid1'")), It.IsAny<string>()), Times.Once);
_ec.Verify(x => x.AddIssue(It.Is<Issue>(s => s.Message.Contains("Unexpected input 'invalid2'")), It.IsAny<string>()), Times.Once);
_ec.Verify(x => x.AddIssue(It.Is<Issue>(s => s.Message.Contains("Unexpected input(s) 'invalid1', 'invalid2'")), It.IsAny<string>()), Times.Once);
}
private void Setup([CallerMemberName] string name = "")
@@ -380,6 +379,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
_ec.Setup(x => x.IntraActionState).Returns(new Dictionary<string, string>());
_ec.Setup(x => x.EnvironmentVariables).Returns(new Dictionary<string, string>());
_ec.Setup(x => x.FileTable).Returns(new List<String>());
_ec.Setup(x => x.SetGitHubContext(It.IsAny<string>(), It.IsAny<string>()));
_ec.Setup(x => x.GetGitHubContext(It.IsAny<string>())).Returns("{\"foo\":\"bar\"}");
_ec.Setup(x => x.CancellationToken).Returns(_ecTokenSource.Token);

View File

@@ -176,7 +176,7 @@ namespace GitHub.Runner.Common.Tests.Worker
jobExtension.Initialize(hc);
_actionManager.Setup(x => x.PrepareActionsAsync(It.IsAny<IExecutionContext>(), It.IsAny<IEnumerable<Pipelines.JobStep>>()))
.Returns(Task.FromResult(new PrepareResult(new List<JobExtensionRunner>() { new JobExtensionRunner(null, "", "prepare1", null, null), new JobExtensionRunner(null, "", "prepare2", null, null) }, new Dictionary<Guid, IActionRunner>())));
.Returns(Task.FromResult(new PrepareResult(new List<JobExtensionRunner>() { new JobExtensionRunner(null, "", "prepare1", null), new JobExtensionRunner(null, "", "prepare2", null) }, new Dictionary<Guid, IActionRunner>())));
List<IStep> result = await jobExtension.InitializeJob(_jobEc, _message);

View File

@@ -686,14 +686,17 @@ namespace GitHub.Runner.Common.Tests.Worker
// <WORKSPACE>/workflow-repo/nested-other-repo
// <WORKSPACE>/other-repo
// <WORKSPACE>/other-repo/nested-workflow-repo
// <WORKSPACE>/workflow-repo-using-ssh
var workflowRepository = Path.Combine(workspaceDirectory, "workflow-repo");
var nestedOtherRepository = Path.Combine(workspaceDirectory, "workflow-repo", "nested-other-repo");
var otherRepository = Path.Combine(workspaceDirectory, workflowRepository, "nested-other-repo");
var nestedWorkflowRepository = Path.Combine(workspaceDirectory, "other-repo", "nested-workflow-repo");
var workflowRepositoryUsingSsh = Path.Combine(workspaceDirectory, "workflow-repo-using-ssh");
await CreateRepository(hostContext, workflowRepository, "https://github.com/my-org/workflow-repo");
await CreateRepository(hostContext, nestedOtherRepository, "https://github.com/my-org/other-repo");
await CreateRepository(hostContext, otherRepository, "https://github.com/my-org/other-repo");
await CreateRepository(hostContext, nestedWorkflowRepository, "https://github.com/my-org/workflow-repo");
await CreateRepository(hostContext, workflowRepositoryUsingSsh, "git@github.com:my-org/workflow-repo.git");
// Create test files
var file_noRepository = Path.Combine(workspaceDirectory, "no-repo.txt");
@@ -703,7 +706,8 @@ namespace GitHub.Runner.Common.Tests.Worker
var file_nestedOtherRepository = Path.Combine(nestedOtherRepository, "nested-other-repo");
var file_otherRepository = Path.Combine(otherRepository, "other-repo.txt");
var file_nestedWorkflowRepository = Path.Combine(nestedWorkflowRepository, "nested-workflow-repo.txt");
foreach (var file in new[] { file_noRepository, file_workflowRepository, file_workflowRepository_nestedDirectory, file_workflowRepository_failsafe, file_nestedOtherRepository, file_otherRepository, file_nestedWorkflowRepository })
var file_workflowRepositoryUsingSsh = Path.Combine(workflowRepositoryUsingSsh, "workflow-repo-using-ssh.txt");
foreach (var file in new[] { file_noRepository, file_workflowRepository, file_workflowRepository_nestedDirectory, file_workflowRepository_failsafe, file_nestedOtherRepository, file_otherRepository, file_nestedWorkflowRepository, file_workflowRepositoryUsingSsh })
{
Directory.CreateDirectory(Path.GetDirectoryName(file));
File.WriteAllText(file, "");
@@ -718,8 +722,9 @@ namespace GitHub.Runner.Common.Tests.Worker
Process($"{file_nestedOtherRepository}: some error 6");
Process($"{file_otherRepository}: some error 7");
Process($"{file_nestedWorkflowRepository}: some error 8");
Process($"{file_workflowRepositoryUsingSsh}: some error 9");
Assert.Equal(8, _issues.Count);
Assert.Equal(9, _issues.Count);
Assert.Equal("some error 1", _issues[0].Item1.Message);
Assert.False(_issues[0].Item1.Data.ContainsKey("file"));
@@ -744,6 +749,9 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal("some error 8", _issues[7].Item1.Message);
Assert.Equal(file_nestedWorkflowRepository.Substring(nestedWorkflowRepository.Length + 1).Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar), _issues[7].Item1.Data["file"]);
Assert.Equal("some error 9", _issues[8].Item1.Message);
Assert.Equal(file_workflowRepositoryUsingSsh.Substring(workflowRepositoryUsingSsh.Length + 1).Replace(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar), _issues[8].Item1.Data["file"]);
}
Environment.SetEnvironmentVariable("RUNNER_TEST_GET_REPOSITORY_PATH_FAILSAFE", "");

View File

@@ -80,7 +80,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -115,7 +115,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -154,7 +154,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -208,7 +208,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -287,7 +287,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -330,7 +330,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Step.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Step.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -361,7 +361,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -391,7 +391,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -417,7 +417,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object }));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -426,11 +426,11 @@ namespace GitHub.Runner.Common.Tests.Worker
Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded);
#if OS_WINDOWS
Assert.Equal("100", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("100"));
Assert.Equal("github_actions", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env2"].AssertString("github_actions"));
Assert.Equal("100", step1.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("100"));
Assert.Equal("github_actions", step1.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env2"].AssertString("github_actions"));
#else
Assert.Equal("100", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("100"));
Assert.Equal("github_actions", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env2"].AssertString("github_actions"));
Assert.Equal("100", step1.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("100"));
Assert.Equal("github_actions", step1.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env2"].AssertString("github_actions"));
#endif
}
}
@@ -455,7 +455,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object }));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -463,13 +463,13 @@ namespace GitHub.Runner.Common.Tests.Worker
// Assert.
Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded);
#if OS_WINDOWS
Assert.Equal("1000", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("github_actions", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env3"].AssertString("github_actions"));
Assert.False(_ec.Object.ExpressionValues["env"].AssertDictionary("env").ContainsKey("env2"));
Assert.Equal("1000", step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("github_actions", step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env3"].AssertString("github_actions"));
Assert.False(step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env").ContainsKey("env2"));
#else
Assert.Equal("1000", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("github_actions", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env3"].AssertString("github_actions"));
Assert.False(_ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env").ContainsKey("env2"));
Assert.Equal("1000", step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("github_actions", step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env3"].AssertString("github_actions"));
Assert.False(step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env").ContainsKey("env2"));
#endif
}
}
@@ -493,7 +493,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object }));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -501,11 +501,11 @@ namespace GitHub.Runner.Common.Tests.Worker
// Assert.
Assert.Equal(TaskResult.Succeeded, _ec.Object.Result ?? TaskResult.Succeeded);
#if OS_WINDOWS
Assert.Equal("1000", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("something", _ec.Object.ExpressionValues["env"].AssertDictionary("env")["env2"].AssertString("something"));
Assert.Equal("1000", step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("something", step2.Object.ExecutionContext.ExpressionValues["env"].AssertDictionary("env")["env2"].AssertString("something"));
#else
Assert.Equal("1000", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("something", _ec.Object.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env2"].AssertString("something"));
Assert.Equal("1000", step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env1"].AssertString("1000"));
Assert.Equal("something", step2.Object.ExecutionContext.ExpressionValues["env"].AssertCaseSensitiveDictionary("env")["env2"].AssertString("something"));
#endif
}
}
@@ -524,7 +524,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -560,7 +560,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -602,7 +602,7 @@ namespace GitHub.Runner.Common.Tests.Worker
stepContext.Setup(x => x.WriteDebug).Returns(true);
stepContext.Setup(x => x.Variables).Returns(_variables);
stepContext.Setup(x => x.EnvironmentVariables).Returns(_env);
stepContext.Setup(x => x.ExpressionValues).Returns(_contexts);
stepContext.Setup(x => x.ExpressionValues).Returns(new DictionaryContextData());
stepContext.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
stepContext.Setup(x => x.JobContext).Returns(_jobContext);
stepContext.Setup(x => x.StepsContext).Returns(_stepContext);

View File

@@ -1,4 +1,4 @@
#!/bin/bash
#!/usr/bin/env bash
###############################################################################
#

View File

@@ -1 +1 @@
2.262.0
2.267.0