Compare commits

..

2 Commits

Author SHA1 Message Date
Tingluo Huang
e80ab803d6 c 2020-01-22 16:34:15 -05:00
Tingluo Huang
4f40817c82 Set both http_proxy and HTTP_PROXY env for runner/worker processes. 2020-01-22 11:44:49 -05:00
136 changed files with 2860 additions and 7016 deletions

View File

@@ -43,6 +43,14 @@ jobs:
steps:
- uses: actions/checkout@v1
# Set Path workaround for https://github.com/actions/virtual-environments/issues/263
- run: |
echo "::add-path::C:\Program Files\Git\mingw64\bin"
echo "::add-path::C:\Program Files\Git\usr\bin"
echo "::add-path::C:\Program Files\Git\bin"
if: matrix.os == 'windows-latest'
name: "Temp step to Set Path for Windows"
# Build runner layout
- name: Build & Layout Release
run: |

View File

@@ -1,62 +0,0 @@
# ADR 0274: Step outcome and conclusion
**Date**: 2020-01-13
**Status**: Accepted
## Context
This ADR proposes adding `steps.<id>.outcome` and `steps.<id>.conclusion` to the steps context.
This allows downstream a step to run based on whether a previous step succeeded or failed.
Reminder, currently the steps contains `steps.<id>.outputs`.
## Decision
For steps that have completed, populate `steps.<id>.outcome` and `steps.<id>.conclusion` with one of the following values:
- `success`
- `failure`
- `cancelled`
- `skipped`
When a continue-on-error step fails, the outcome will be `failure` even though the final conclusion is `success`.
### Example
```yaml
steps:
- id: experimental
continue-on-error: true
run: ./build.sh experimental
- if: ${{ steps.experimental.outcome == 'success' }}
run: ./publish.sh experimental
```
### Terminology
The runs API uses the term `conclusion`.
Therefore we use a different term `outcome` for the value prior to continue-on-error.
The following is a snippet from the runs API response payload:
```json
"steps": [
{
"name": "Set up job",
"status": "completed",
"conclusion": "success",
"number": 1,
"started_at": "2020-01-09T11:06:16.000-05:00",
"completed_at": "2020-01-09T11:06:18.000-05:00"
},
```
## Consequences
- Update runner
- Update [docs](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#steps-context)

View File

@@ -15,7 +15,6 @@ User wants to reference workflow variables defined in workflow yaml file for act
Runner will create and populate the `env` context for every job execution using following logic:
1. On job start, create `env` context with any environment variables in the job message, these are env defined in customer's YAML file's job/workflow level `env` section.
2. Update `env` context when customer use `::set-env::` to set env at the runner level.
3. Update `env` context with step's `env` block before each step runs.
The `env` context is only available in the runner, customer can't use the `env` context in any server evaluation part, just like the `runner` context
@@ -23,25 +22,17 @@ Example yaml:
```yaml
env:
env1: 10
env2: 20
env3: 30
env1: 100
jobs:
build:
env:
env1: 100
env2: 200
runs-on: ubuntu-latest
steps:
- run: |
echo ${{ env.env1 }} // 1000
echo $env1 // 1000
echo $env2 // 200
echo $env3 // 30
if: env.env2 == 200 // true
name: ${{ env.env1 }}_${{ env.env2 }} //1000_200
env:
env1: 1000
echo ${{ env.env1 }}
if: env.env2 == 200
name: ${{ env.env1 }}_${{ env.env2 }}
```
### Don't populate the `env` context with environment variables from runner machine.
@@ -57,4 +48,4 @@ build:
- uses: docker://ubuntu:18.04
with:
args: echo ${{env.USER}} <- what should customer expect this output? runner/root
```
```

View File

@@ -1,48 +0,0 @@
# ADR 0297: Base64 Masking Trailing Characters
**Date** 2020-01-21
**Status** Proposed
## Context
The Runner registers a number of Value Encoders, which mask various encodings of a provided secret. Currently, we register a 3 base64 Encoders:
- The base64 encoded secret
- The secret with the first character removed then base64 encoded
- The secret with the first two characters removed then base64 encoded
This gives us good coverage across the board for secrets and secrets with a prefix (i.e. `base64($user:$pass)`).
However, we don't have great coverage for cases where the secret has a string appended to it before it is base64 encoded (i.e.: `base64($pass\n))`).
Most notably we've seen this as a result of user error where a user accidentially appends a newline or space character before encoding their secret in base64.
## Decision
### Trim end characters
We are going to modify all existing base64 encoders to trim information before registering as a secret.
We will trim:
- `=` from the end of all base64 strings. This is a padding character that contains no information.
- Based on the number of `=`'s at the end of a base64 string, a malicious user could predict the length of the original secret modulo 3.
- If a user saw `***==`, they would know the secret could be 1,4,7,10... characters.
- If a string contains `=` we will also trim the last non-padding character from the base64 secret.
- This character can change if a string is appended to the secret before the encoding.
### Register a fourth encoder
We will also add back in the original base64 encoded secret encoder for four total encoders:
- The base64 encoded secret
- The base64 encoded secret trimmed
- The secret with the first character removed then base64 encoded and trimmed
- The secret with the first two characters removed then base64 encoded and trimmed
This allows us to fully cover the most common scenario where a user base64 encodes their secret and expects the entire thing to be masked.
This will result in us only revealing length or bit information when a prefix or suffix is added to a secret before encoding.
## Consequences
- In the case where a secret has a prefix or suffix added before base64 encoding, we may now reveal up to 20 bits of information and the length of the original string modulo 3, rather then the original 16 bits and no length information
- Secrets with a suffix appended before encoding will now be masked across the board. Previously it was only masked if it was a multiple of 3 characters
- Performance will suffer in a neglible way

View File

@@ -1,35 +0,0 @@
# ADR 354: Expose runner machine info
**Date**: 2020-03-02
**Status**: Pending
## Context
- Provide a mechanism in the runner to include extra information in `Set up job` step's log.
Ex: Include OS/Software info from Hosted image.
## Decision
The runner will look for a file `.setup_info` under the runner's root directory, The file can be a JSON with a simple schema.
```json
[
{
"group": "OS Detail",
"detail": "........"
},
{
"group": "Software Detail",
"detail": "........"
}
]
```
The runner will use `##[group]` and `##[endgroup]` to fold all detail info into an expandable group.
Both [virtual-environments](https://github.com/actions/virtual-environments) and self-hosted runners can use this mechanism to add extra logging info to the `Set up job` step's log.
## Consequences
1. Change the runner to best effort read/parse `.extra_setup_info` file under runner root directory.
2. [virtual-environments](https://github.com/actions/virtual-environments) generate the file during image generation.
3. Change MMS provisioner to properly copy the file to runner root directory at runtime.

View File

@@ -1,75 +0,0 @@
# ADR 361: Wrapper Action
**Date**: 2020-03-06
**Status**: Pending
## Context
In addition to action's regular execution, action author may wants their action has a chance to participate in:
- Job initialize
My Action will collect machine resource usage (CPU/RAM/Disk) during a workflow job execution, we need to start perf recorder at the begin of the job.
- Job cleanup
My Action will dirty local workspace or machine environment during execution, we need to cleanup these changes at the end of the job.
Ex: `actions/checkout@v2` will write `github.token` into local `.git/config` during execution, it has post job cleanup defined to undo the changes.
## Decision
### Add `pre` and `post` execution to action
Node Action Example:
```yaml
name: 'My action with pre'
description: 'My action with pre'
runs:
using: 'node12'
pre: 'setup.js'
pre-if: 'success()' // Optional
main: 'index.js'
post: 'cleanup.js'
post-if: 'success()' // Optional
```
Container Action Example:
```yaml
name: 'My action with pre'
description: 'My action with pre'
runs:
using: 'docker'
image: 'mycontainer:latest'
pre-entrypoint: 'setup.sh'
pre-if: 'success()' // Optional
entrypoint: 'entrypoint.sh'
post-entrypoint: 'cleanup.sh'
post-if: 'success()' // Optional
```
Both `pre` and `post` will has default `pre-if/post-if` sets to `always()`.
Setting `pre` to `always()` will make sure no matter what condition evaluate result the `main` gets at runtime, the `pre` has always run already.
`pre` executes in order of how the steps are defined.
`pre` will always be added to job steps list during job setup.
> Action referenced from local repository (`./my-action`) won't get `pre` setup correctly since the repository haven't checkout during job initialize.
> We can't use GitHub api to download the repository since there is a about 3 mins delay between `git push` and the new commit available to download using GitHub api.
`post` will be pushed into a `poststeps` stack lazily when the action's `pre` or `main` execution passed `if` condition check and about to run, you can't have an action that only contains a `post`, we will pop and run each `post` after all `pre` and `main` finished.
> Currently `post` works for both repository action (`org/repo@v1`) and local action (`./my-action`)
Valid action:
- only has `main`
- has `pre` and `main`
- has `main` and `post`
- has `pre`, `main` and `post`
Invalid action:
- only has `pre`
- only has `post`
- has `pre` and `post`
Potential downside of introducing `pre`:
- Extra magic wrt step order. Users should control the step order. Especially when we introduce templates.
- Eliminates the possibility to lazily download the action tarball, since `pre` always run by default, we have to download the tarball to check whether action defined a `pre`
- `pre` doesn't work with local action, we suggested customer use local action for testing their action changes, ex CI for their action, to avoid delay between `git push` and GitHub repo tarball download api.
- Condition on the `pre` can't be controlled using dynamic step outputs. `pre` executes too early.

View File

@@ -1,56 +0,0 @@
# ADR 0397: Support adding custom labels during runner config
**Date**: 2020-03-30
**Status**: Approved
## Context
Since configuring self-hosted runners is commonly automated via scripts, the labels need to be able to be created during configuration. The runner currently registers the built-in labels (os, arch) during registration but does not accept labels via command line args to extend the set registered.
See Issue: https://github.com/actions/runner/issues/262
This is another version of [ADR275](https://github.com/actions/runner/pull/275)
## Decision
This ADR proposes that we add a `--labels` option to `config`, which could be used to add custom additional labels to the configured runner.
For example, to add a single extra label the operator could run:
```bash
./config.sh --labels mylabel
```
> Note: the current runner command line parsing and envvar override algorithm only supports a single argument (key).
This would add the label `mylabel` to the runner, and enable users to select the runner in their workflow using this label:
```yaml
runs-on: [self-hosted, mylabel]
```
To add multiple labels the operator could run:
```bash
./config.sh --labels mylabel,anotherlabel
```
> Note: the current runner command line parsing and envvar override algorithm only supports a single argument (key).
This would add the label `mylabel` and `anotherlabel` to the runner, and enable users to select the runner in their workflow using this label:
```yaml
runs-on: [self-hosted, mylabel, anotherlabel]
```
It would not be possible to remove labels from an existing runner using `config.sh`, instead labels would have to be removed using the GitHub UI.
The labels argument will split on commas, trim and discard empty strings. That effectively means don't use commans in unattended config label names. Alternatively we could choose to escape commans but it's a nice to have.
## Replace
If an existing runner exists and the option to replace is chosen (interactively of via unattend as in this scenario), then the labels will be replaced / overwritten (not merged).
## Overriding built-in labels
Note that it is possible to register "built-in" hosted labels like `ubuntu-latest` and is not considered an error. This is an effective way for the org / runner admin to dictate by policy through registration that this set of runners will be used without having to edit all the workflow files now and in the future.
We will also not make other restrictions such as limiting explicitly adding os / arch labels and validating. We will assume that explicit labels were added for a reason and not restricting offers the most flexibility and future proofing / compat.
## Consequences
The ability to add custom labels to a self-hosted runner would enable most scenarios where job runner selection based on runner capabilities or characteristics are required.

View File

@@ -44,7 +44,7 @@ Sample developer flow:
```bash
git clone https://github.com/actions/runner
cd ./src
./dev.(sh/cmd) layout # the runner that built from source is in {root}/_layout
./dev.(sh/cmd) layout # the runner that build from source is in {root}/_layout
<make code changes>
./dev.(sh/cmd) build # {root}/_layout will get updated
./dev.(sh/cmd) test # run all unit tests before git commit/push

View File

@@ -1,61 +0,0 @@
# Runner Authentication and Authorization
## Goals
- Support runner installs in untrusted domains.
- The account that configures or runs the runner process is not relevant for accessing GitHub resources.
- Accessing GitHub resources is done with a per-job token which expires when job completes.
- The token is granted to trusted parts of the system including the runner, actions and script steps specified by the workflow author as trusted.
- All OAuth tokens that come from the Token Service that the runner uses to access Actions Service resources are the same. It's just the scope and expiration of the token that may vary.
## Configuration
Configuring a self-hosted runner is [covered here in the documentation](https://help.github.com/en/actions/hosting-your-own-runners/adding-self-hosted-runners).
Configuration is done with the user being authenticated via a time-limited, GitHub runner registration token.
*Your credentials are never used for registering the runner with the service.*
![Self-hosted runner config](../res/self-hosted-config.png)
During configuration, an RSA public/private key pair is created, the private key is stored in file on disk. On Windows, the content is protected with DPAPI (machine level encrypted - runner only valid on that machine) and on Linux/OSX with `chmod` permissions.
Using your credentials, the runner is registered with the service by sending the public key to the service which adds that runner to the pool and stores the public key, the Token Service will generate a `clientId` associated with the public key.
## Start and Listen
After configuring the runner, the runner can be started interactively (`./run.cmd` or `./run.sh`) or as a service.
![Self-hosted runner start](../res/self-hosted-start.png)
On start, the runner listener process loads the RSA private key (on Windows decrypting with machine key DPAPI), and asks the Token Service for an OAuth token which is signed with the RSA private key.
The server then responds with an OAuth token that grants permission to access the message queue (HTTP long poll), allowing the runner to acquire the messages it will eventually run.
## Run a workflow
When a workflow is run, its labels are evaluated, it is matched to a runner and a message is placed in a queue of messages for that runner.
The runner then starts listening for jobs via the message queue HTTP long poll.
The message is encrypted with the runner's public key, stored during runner configuration.
![Runner workflow run](../res/workflow-run.png)
A workflow is queued as a result of a triggered [event](https://help.github.com/en/actions/reference/events-that-trigger-workflows). Workflows can be scheduled to [run at specific UTC times](https://help.github.com/en/actions/reference/events-that-trigger-workflows#scheduled-events-schedule) using POSIX `cron` syntax.
An [OAuth token](http://self-issued.info/docs/draft-ietf-oauth-json-web-token.html) is generated, granting limited access to the host in Actions Service associated with the github.com repository/organization.
The lifetime of the OAuth token is the lifetime of the run or at most the [job timeout (default: 6 hours)](https://help.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idtimeout-minutes), plus 10 additional minutes.
## Accessing GitHub resources
The job message sent to the runner contains the OAuth token to talk back to the Actions Service.
The runner listener parent process will spawn a runner worker process for that job and send it the job message over IPC.
The token is never persisted.
Each action is run as a unique subprocess.
The encrypted access token will be provided as an environment variable in each action subprocess.
The token is registered with the runner as a secret and scrubbed from the logs as they are written.
Authentication in a workflow run to github.com can be accomplished by using the [`GITHUB_TOKEN`](https://help.github.com/en/actions/configuring-and-managing-workflows/authenticating-with-the-github_token#about-the-github_token-secret)) secret. This token expires after 60 minutes. Please note that this token is different from the OAuth token that the runner uses to talk to the Actions Service.
## Hosted runner authentication
Hosted runner authentication differs from self-hosted authentication in that runners do not undergo a registration process, but instead, the hosted runners get the OAuth token directly by reading the `.credentials` file. The scope of this particular token is limited for a given workflow job execution, and the token is revoked as soon as the job is finished.
![Hosted runner config and start](../res/hosted-config-start.png)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 31 KiB

View File

@@ -1,52 +0,0 @@
# Markup used to generate the runner auth diagrams: https://websequencediagrams.com
title Runner Configuration (self-hosted only)
note left of Runner: GitHub repo URL as input
Runner->github.com: Retrieve Actions Service access using runner registration token
github.com->Runner: Access token for Actions Service
note left of Runner: Generate RSA key pair
note left of Runner: Store encrypted RSA private key on disk
Runner->Actions Service: Register runner using Actions Service access token
note right of Runner: Runner name, RSA public key sent
note right of Actions Service: Public key stored
Actions Service->Token Service: Register runner as an app along with the RSA public key
note right of Token Service: Public key stored
Token Service->Actions Service: Client Id for the runner application
Actions Service->Runner: Client Id and Token Endpoint URL
note left of Runner: Store runner configuration info into .runner file
note left of Runner: Store Token registration info into .credentials file
title Runner Start and Running (self-hosted only)
Runner.Listener->Runner.Listener: Start
note left of Runner.Listener: Load config info from .runner
note left of Runner.Listener: Load token registration from .credentials
Runner.Listener->Token Service: Exchange OAuth token (happens every 50 mins)
note right of Runner.Listener: Construct JWT token, use Client Id signed by RSA private key
note left of Actions Service: Find corresponding RSA public key, use Client Id\nVerify JWT token's signature
Token Service->Runner.Listener: OAuth token with limited permission and valid for 50 mins
Runner.Listener->Actions Service: Connect to Actions Service with OAuth token
Actions Service->Runner.Listener: Workflow job
title Running workflow
Runner.Listener->Service (Message Queue): Get message
note right of Runner.Listener: Authenticate with exchanged OAuth token
Event->Actions Service: Queue workflow
Actions Service->Actions Service: Generate OAuth token per job
Actions Service->Actions Service: Build job message with the OAuth token
Actions Service->Actions Service: Encrypt job message with the target runner's public key
Actions Service->Service (Message Queue): Send encrypted job message to runner
Service (Message Queue)->Runner.Listener: Send job
note right of Runner.Listener: Decrypt message with runner's private key
Runner.Listener->Runner.Worker: Create worker process per job and run the job
title Runner Configuration, Start and Running (hosted only)
Machine Management Service->Runner.Listener: Construct .runner configuration file, store token in .credentials
Runner.Listener->Runner.Listener: Start
note left of Runner.Listener: Load config info from .runner
note left of Runner.Listener: Load OAuth token from .credentials
Runner.Listener->Actions Service: Connect to Actions Service with OAuth token in .credentials
Actions Service->Runner.Listener: Workflow job

Binary file not shown.

Before

Width:  |  Height:  |  Size: 98 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 46 KiB

View File

@@ -1,14 +1,17 @@
## Features
- Runner support for GHES Alpha (#381 #386 #390 #393 $401)
- Allow secrets context in Container.env (#388)
- Remove runner flow: Change from PAT to "deletion token" in prompt (#225)
- Expose github.run_id and github.run_number to action runtime env. (#224)
## Bugs
- Raise warning when volume mount root. (#413)
- Fix typo (#394)
- Clean up error messages for container scenarios (#221)
- Pick shell from prependpath (#231)
## Misc
- N/A
- Runner code cleanup (#218 #227, #228, #229, #230)
- Consume dotnet core 3.1 in runner. (#213)
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows
We recommend configuring the runner under "<DRIVE>:\actions-runner". This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows
```
// Create a folder under the drive root
mkdir \actions-runner ; cd \actions-runner
@@ -16,7 +19,7 @@ mkdir \actions-runner ; cd \actions-runner
Invoke-WebRequest -Uri https://github.com/actions/runner/releases/download/v<RUNNER_VERSION>/actions-runner-win-x64-<RUNNER_VERSION>.zip -OutFile actions-runner-win-x64-<RUNNER_VERSION>.zip
// Extract the installer
Add-Type -AssemblyName System.IO.Compression.FileSystem ;
[System.IO.Compression.ZipFile]::ExtractToDirectory("$PWD\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
[System.IO.Compression.ZipFile]::ExtractToDirectory("$HOME\Downloads\actions-runner-win-x64-<RUNNER_VERSION>.zip", "$PWD")
```
## OSX

View File

@@ -1 +1 @@
2.168.0
2.164.0

View File

@@ -172,7 +172,7 @@ get_current_os_name() {
return 0
elif [ "$uname" = "FreeBSD" ]; then
echo "freebsd"
return 0
return 0
elif [ "$uname" = "Linux" ]; then
local linux_platform_name
linux_platform_name="$(get_linux_platform_name)" || { echo "linux" && return 0 ; }
@@ -728,12 +728,11 @@ downloadcurl() {
# Append feed_credential as late as possible before calling curl to avoid logging feed_credential
remote_path="${remote_path}${feed_credential}"
local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
local failed=false
if [ -z "$out_path" ]; then
curl $curl_options "$remote_path" || failed=true
curl --retry 10 -sSL -f --create-dirs "$remote_path" || failed=true
else
curl $curl_options -o "$out_path" "$remote_path" || failed=true
curl --retry 10 -sSL -f --create-dirs -o "$out_path" "$remote_path" || failed=true
fi
if [ "$failed" = true ]; then
say_verbose "Curl download failed"
@@ -749,12 +748,12 @@ downloadwget() {
# Append feed_credential as late as possible before calling wget to avoid logging feed_credential
remote_path="${remote_path}${feed_credential}"
local wget_options="--tries 20 --waitretry 2 --connect-timeout 15 "
local failed=false
if [ -z "$out_path" ]; then
wget -q $wget_options -O - "$remote_path" || failed=true
wget -q --tries 10 -O - "$remote_path" || failed=true
else
wget $wget_options -O "$out_path" "$remote_path" || failed=true
wget --tries 10 -O "$out_path" "$remote_path" || failed=true
fi
if [ "$failed" = true ]; then
say_verbose "Wget download failed"

File diff suppressed because it is too large Load Diff

View File

@@ -27,7 +27,7 @@
"@types/node": "^12.7.12",
"@typescript-eslint/parser": "^2.8.0",
"@zeit/ncc": "^0.20.5",
"eslint": "^6.8.0",
"eslint": "^5.16.0",
"eslint-plugin-github": "^2.0.0",
"prettier": "^1.19.1",
"typescript": "^3.6.4"

View File

@@ -1,13 +0,0 @@
const { spawn } = require('child_process');
// argv[0] = node
// argv[1] = macos-run-invoker.js
var shell = process.argv[2];
var args = process.argv.slice(3);
console.log(`::debug::macos-run-invoker: ${shell}`);
console.log(`::debug::macos-run-invoker: ${JSON.stringify(args)}`);
var launch = spawn(shell, args, { stdio: 'inherit' });
launch.on('exit', function (code) {
if (code !== 0) {
process.exit(code);
}
});

View File

@@ -3,6 +3,8 @@
<packageSources>
<!--To inherit the global NuGet package sources remove the <clear/> line below -->
<clear />
<add key="dotnet-core" value="https://www.myget.org/F/dotnet-core/api/v3/index.json" />
<add key="dotnet-buildtools" value="https://www.myget.org/F/dotnet-buildtools/api/v3/index.json" />
<add key="api.nuget.org" value="https://api.nuget.org/v3/index.json" />
</packageSources>
</configuration>

View File

@@ -15,9 +15,6 @@ namespace GitHub.Runner.Common
[DataContract]
public sealed class RunnerSettings
{
[DataMember(Name = "IsHostedServer", EmitDefaultValue = false)]
private bool? _isHostedServer;
[DataMember(EmitDefaultValue = false)]
public int AgentId { get; set; }
@@ -45,21 +42,6 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)]
public string MonitorSocketAddress { get; set; }
[IgnoreDataMember]
public bool IsHostedServer
{
get
{
// Old runners do not have this property. Hosted runners likely don't have this property either.
return _isHostedServer ?? true;
}
set
{
_isHostedServer = value;
}
}
/// <summary>
// Computed property for convenience. Can either return:
// 1. If runner was configured at the repo level, returns something like: "myorg/myrepo"
@@ -87,15 +69,6 @@ namespace GitHub.Runner.Common
return repoOrOrgName;
}
}
[OnSerializing]
private void OnSerializing(StreamingContext context)
{
if (_isHostedServer.HasValue && _isHostedServer.Value)
{
_isHostedServer = null;
}
}
}
[ServiceLocator(Default = typeof(ConfigurationStore))]
@@ -105,10 +78,8 @@ namespace GitHub.Runner.Common
bool IsServiceConfigured();
bool HasCredentials();
CredentialData GetCredentials();
CredentialData GetMigratedCredentials();
RunnerSettings GetSettings();
void SaveCredential(CredentialData credential);
void SaveMigratedCredential(CredentialData credential);
void SaveSettings(RunnerSettings settings);
void DeleteCredential();
void DeleteSettings();
@@ -119,11 +90,9 @@ namespace GitHub.Runner.Common
private string _binPath;
private string _configFilePath;
private string _credFilePath;
private string _migratedCredFilePath;
private string _serviceConfigFilePath;
private CredentialData _creds;
private CredentialData _migratedCreds;
private RunnerSettings _settings;
public override void Initialize(IHostContext hostContext)
@@ -145,9 +114,6 @@ namespace GitHub.Runner.Common
_credFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Credentials);
Trace.Info("CredFilePath: {0}", _credFilePath);
_migratedCredFilePath = hostContext.GetConfigFile(WellKnownConfigFile.MigratedCredentials);
Trace.Info("MigratedCredFilePath: {0}", _migratedCredFilePath);
_serviceConfigFilePath = hostContext.GetConfigFile(WellKnownConfigFile.Service);
Trace.Info("ServiceConfigFilePath: {0}", _serviceConfigFilePath);
}
@@ -157,7 +123,7 @@ namespace GitHub.Runner.Common
public bool HasCredentials()
{
Trace.Info("HasCredentials()");
bool credsStored = (new FileInfo(_credFilePath)).Exists || (new FileInfo(_migratedCredFilePath)).Exists;
bool credsStored = (new FileInfo(_credFilePath)).Exists;
Trace.Info("stored {0}", credsStored);
return credsStored;
}
@@ -188,16 +154,6 @@ namespace GitHub.Runner.Common
return _creds;
}
public CredentialData GetMigratedCredentials()
{
if (_migratedCreds == null && File.Exists(_migratedCredFilePath))
{
_migratedCreds = IOUtil.LoadObject<CredentialData>(_migratedCredFilePath);
}
return _migratedCreds;
}
public RunnerSettings GetSettings()
{
if (_settings == null)
@@ -232,21 +188,6 @@ namespace GitHub.Runner.Common
File.SetAttributes(_credFilePath, File.GetAttributes(_credFilePath) | FileAttributes.Hidden);
}
public void SaveMigratedCredential(CredentialData credential)
{
Trace.Info("Saving {0} migrated credential @ {1}", credential.Scheme, _migratedCredFilePath);
if (File.Exists(_migratedCredFilePath))
{
// Delete existing credential file first, since the file is hidden and not able to overwrite.
Trace.Info("Delete exist runner migrated credential file.");
IOUtil.DeleteFile(_migratedCredFilePath);
}
IOUtil.SaveObject(credential, _migratedCredFilePath);
Trace.Info("Migrated Credentials Saved.");
File.SetAttributes(_migratedCredFilePath, File.GetAttributes(_migratedCredFilePath) | FileAttributes.Hidden);
}
public void SaveSettings(RunnerSettings settings)
{
Trace.Info("Saving runner settings.");
@@ -265,7 +206,6 @@ namespace GitHub.Runner.Common
public void DeleteCredential()
{
IOUtil.Delete(_credFilePath, default(CancellationToken));
IOUtil.Delete(_migratedCredFilePath, default(CancellationToken));
}
public void DeleteSettings()

View File

@@ -19,13 +19,11 @@ namespace GitHub.Runner.Common
{
Runner,
Credentials,
MigratedCredentials,
RSACredentials,
Service,
CredentialStore,
Certificates,
Options,
SetupInfo,
}
public static class Constants
@@ -87,7 +85,6 @@ namespace GitHub.Runner.Common
public static class Args
{
public static readonly string Auth = "auth";
public static readonly string Labels = "labels";
public static readonly string MonitorSocketAddress = "monitorsocketaddress";
public static readonly string Name = "name";
public static readonly string Pool = "pool";
@@ -139,12 +136,6 @@ namespace GitHub.Runner.Common
}
}
public static class RunnerEvent
{
public static readonly string Register = "register";
public static readonly string Remove = "remove";
}
public static class Pipeline
{
public static class Path

View File

@@ -24,7 +24,7 @@ namespace GitHub.Runner.Common
CancellationToken RunnerShutdownToken { get; }
ShutdownReason RunnerShutdownReason { get; }
ISecretMasker SecretMasker { get; }
List<ProductInfoHeaderValue> UserAgents { get; }
ProductInfoHeaderValue UserAgent { get; }
RunnerWebProxy WebProxy { get; }
string GetDirectory(WellKnownDirectory directory);
string GetConfigFile(WellKnownConfigFile configFile);
@@ -54,7 +54,7 @@ namespace GitHub.Runner.Common
private readonly ConcurrentDictionary<Type, object> _serviceInstances = new ConcurrentDictionary<Type, object>();
private readonly ConcurrentDictionary<Type, Type> _serviceTypes = new ConcurrentDictionary<Type, Type>();
private readonly ISecretMasker _secretMasker = new SecretMasker();
private readonly List<ProductInfoHeaderValue> _userAgents = new List<ProductInfoHeaderValue>() { new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version) };
private readonly ProductInfoHeaderValue _userAgent = new ProductInfoHeaderValue($"GitHubActionsRunner-{BuildConstants.RunnerPackage.PackageName}", BuildConstants.RunnerPackage.Version);
private CancellationTokenSource _runnerShutdownTokenSource = new CancellationTokenSource();
private object _perfLock = new object();
private Tracing _trace;
@@ -72,7 +72,7 @@ namespace GitHub.Runner.Common
public CancellationToken RunnerShutdownToken => _runnerShutdownTokenSource.Token;
public ShutdownReason RunnerShutdownReason { get; private set; }
public ISecretMasker SecretMasker => _secretMasker;
public List<ProductInfoHeaderValue> UserAgents => _userAgents;
public ProductInfoHeaderValue UserAgent => _userAgent;
public RunnerWebProxy WebProxy => _webProxy;
public HostContext(string hostType, string logFile = null)
{
@@ -189,17 +189,6 @@ namespace GitHub.Runner.Common
{
_trace.Info($"No proxy settings were found based on environmental variables (http_proxy/https_proxy/HTTP_PROXY/HTTPS_PROXY)");
}
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
if (File.Exists(credFile))
{
var credData = IOUtil.LoadObject<CredentialData>(credFile);
if (credData != null &&
credData.Data.TryGetValue("clientId", out var clientId))
{
_userAgents.Add(new ProductInfoHeaderValue($"RunnerId", clientId));
}
}
}
public string GetDirectory(WellKnownDirectory directory)
@@ -292,12 +281,6 @@ namespace GitHub.Runner.Common
".credentials");
break;
case WellKnownConfigFile.MigratedCredentials:
path = Path.Combine(
GetDirectory(WellKnownDirectory.Root),
".credentials_migrated");
break;
case WellKnownConfigFile.RSACredentials:
path = Path.Combine(
GetDirectory(WellKnownDirectory.Root),
@@ -333,13 +316,6 @@ namespace GitHub.Runner.Common
GetDirectory(WellKnownDirectory.Root),
".options");
break;
case WellKnownConfigFile.SetupInfo:
path = Path.Combine(
GetDirectory(WellKnownDirectory.Root),
".setup_info");
break;
default:
throw new NotSupportedException($"Unexpected well known config file: '{configFile}'");
}

View File

@@ -41,7 +41,7 @@ namespace GitHub.Runner.Common
// job request
Task<TaskAgentJobRequest> GetAgentRequestAsync(int poolId, long requestId, CancellationToken cancellationToken);
Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, CancellationToken cancellationToken);
Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, CancellationToken cancellationToken);
Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken);
// agent package
@@ -50,10 +50,6 @@ namespace GitHub.Runner.Common
// agent update
Task<TaskAgent> UpdateAgentUpdateStateAsync(int agentPoolId, int agentId, string currentState);
// runner authorization url
Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId);
Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error);
}
public sealed class RunnerServer : RunnerService, IRunnerServer
@@ -300,10 +296,10 @@ namespace GitHub.Runner.Common
// JobRequest
//-----------------------------------------------------------------
public Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId = null, CancellationToken cancellationToken = default(CancellationToken))
public Task<TaskAgentJobRequest> RenewAgentRequestAsync(int poolId, long requestId, Guid lockToken, CancellationToken cancellationToken = default(CancellationToken))
{
CheckConnection(RunnerConnectionType.JobRequest);
return _requestTaskAgentClient.RenewAgentRequestAsync(poolId, requestId, lockToken, orchestrationId: orchestrationId, cancellationToken: cancellationToken);
return _requestTaskAgentClient.RenewAgentRequestAsync(poolId, requestId, lockToken, cancellationToken: cancellationToken);
}
public Task<TaskAgentJobRequest> FinishAgentRequestAsync(int poolId, long requestId, Guid lockToken, DateTime finishTime, TaskResult result, CancellationToken cancellationToken = default(CancellationToken))
@@ -338,20 +334,5 @@ namespace GitHub.Runner.Common
CheckConnection(RunnerConnectionType.Generic);
return _genericTaskAgentClient.UpdateAgentUpdateStateAsync(agentPoolId, agentId, currentState);
}
//-----------------------------------------------------------------
// Runner Auth Url
//-----------------------------------------------------------------
public Task<string> GetRunnerAuthUrlAsync(int runnerPoolId, int runnerId)
{
CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.GetAgentAuthUrlAsync(runnerPoolId, runnerId);
}
public Task ReportRunnerAuthUrlErrorAsync(int runnerPoolId, int runnerId, string error)
{
CheckConnection(RunnerConnectionType.MessageQueue);
return _messageTaskAgentClient.ReportAgentAuthUrlMigrationErrorAsync(runnerPoolId, runnerId, error);
}
}
}

View File

@@ -39,7 +39,6 @@ namespace GitHub.Runner.Listener
private readonly string[] validArgs =
{
Constants.Runner.CommandLine.Args.Auth,
Constants.Runner.CommandLine.Args.Labels,
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
Constants.Runner.CommandLine.Args.Name,
Constants.Runner.CommandLine.Args.Pool,
@@ -191,7 +190,7 @@ namespace GitHub.Runner.Listener
{
return GetArgOrPrompt(
name: Constants.Runner.CommandLine.Args.Token,
description: "Enter runner remove token:",
description: "Enter runner deletion token:",
defaultValue: string.Empty,
validator: Validators.NonEmptyValidator);
}
@@ -250,24 +249,6 @@ namespace GitHub.Runner.Listener
return GetArg(Constants.Runner.CommandLine.Args.StartupType);
}
public ISet<string> GetLabels()
{
var labelSet = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
string labels = GetArgOrPrompt(
name: Constants.Runner.CommandLine.Args.Labels,
description: $"This runner will have the following labels: 'self-hosted', '{VarUtil.OS}', '{VarUtil.OSArchitecture}' \nEnter any additional labels (ex. label-1,label-2):",
defaultValue: string.Empty,
validator: Validators.LabelsValidator,
isOptional: true);
if (!string.IsNullOrEmpty(labels))
{
labelSet = labels.Split(',').Where(x => !string.IsNullOrEmpty(x)).ToHashSet<string>(StringComparer.OrdinalIgnoreCase);
}
return labelSet;
}
//
// Private helpers.
//
@@ -299,8 +280,7 @@ namespace GitHub.Runner.Listener
string name,
string description,
string defaultValue,
Func<string, bool> validator,
bool isOptional = false)
Func<string, bool> validator)
{
// Check for the arg in the command line parser.
ArgUtil.NotNull(validator, nameof(validator));
@@ -311,7 +291,7 @@ namespace GitHub.Runner.Listener
if (!string.IsNullOrEmpty(result))
{
// After read the arg from input commandline args, remove it from Arg dictionary,
// This will help if bad arg value passed through CommandLine arg, when ConfigurationManager ask CommandSetting the second time,
// This will help if bad arg value passed through CommandLine arg, when ConfigurationManager ask CommandSetting the second time,
// It will prompt for input instead of continue use the bad input.
_trace.Info($"Remove {name} from Arg dictionary.");
RemoveArg(name);
@@ -331,8 +311,7 @@ namespace GitHub.Runner.Listener
secret: Constants.Runner.CommandLine.Args.Secrets.Any(x => string.Equals(x, name, StringComparison.OrdinalIgnoreCase)),
defaultValue: defaultValue,
validator: validator,
unattended: Unattended,
isOptional: isOptional);
unattended: Unattended);
}
private string GetEnvArg(string name)

View File

@@ -1,18 +1,19 @@
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Threading.Tasks;
using System.Runtime.InteropServices;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System.Net.Http;
using System.Net.Http.Headers;
namespace GitHub.Runner.Listener.Configuration
{
@@ -86,6 +87,7 @@ namespace GitHub.Runner.Listener.Configuration
RunnerSettings runnerSettings = new RunnerSettings();
bool isHostedServer = false;
// Loop getting url and creds until you can connect
ICredentialProvider credProvider = null;
VssCredentials creds = null;
@@ -94,7 +96,8 @@ namespace GitHub.Runner.Listener.Configuration
{
// Get the URL
var inputUrl = command.GetUrl();
if (inputUrl.Contains("codedev.ms", StringComparison.OrdinalIgnoreCase))
if (!inputUrl.Contains("github.com", StringComparison.OrdinalIgnoreCase) &&
!inputUrl.Contains("github.localhost", StringComparison.OrdinalIgnoreCase))
{
runnerSettings.ServerUrl = inputUrl;
// Get the credentials
@@ -106,7 +109,7 @@ namespace GitHub.Runner.Listener.Configuration
{
runnerSettings.GitHubUrl = inputUrl;
var githubToken = command.GetRunnerRegisterToken();
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken, Constants.RunnerEvent.Register);
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken);
runnerSettings.ServerUrl = authResult.TenantUrl;
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
@@ -115,7 +118,7 @@ namespace GitHub.Runner.Listener.Configuration
try
{
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
runnerSettings.IsHostedServer = runnerSettings.GitHubUrl == null || IsHostedServer(new UriBuilder(runnerSettings.GitHubUrl));
isHostedServer = await IsHostedServer(runnerSettings.ServerUrl, creds);
// Validate can connect.
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), creds);
@@ -166,9 +169,6 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteLine();
var userLabels = command.GetLabels();
_term.WriteLine();
var agents = await _runnerServer.GetAgentsAsync(runnerSettings.PoolId, runnerSettings.AgentName);
Trace.Verbose("Returns {0} agents", agents.Count);
agent = agents.FirstOrDefault();
@@ -178,7 +178,7 @@ namespace GitHub.Runner.Listener.Configuration
if (command.GetReplace())
{
// Update existing agent with new PublicKey, agent version.
agent = UpdateExistingAgent(agent, publicKey, userLabels);
agent = UpdateExistingAgent(agent, publicKey);
try
{
@@ -200,8 +200,8 @@ namespace GitHub.Runner.Listener.Configuration
}
else
{
// Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels);
// Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey);
try
{
@@ -249,6 +249,14 @@ namespace GitHub.Runner.Listener.Configuration
{
UriBuilder configServerUrl = new UriBuilder(runnerSettings.ServerUrl);
UriBuilder oauthEndpointUrlBuilder = new UriBuilder(agent.Authorization.AuthorizationUrl);
if (!isHostedServer && Uri.Compare(configServerUrl.Uri, oauthEndpointUrlBuilder.Uri, UriComponents.SchemeAndServer, UriFormat.Unescaped, StringComparison.OrdinalIgnoreCase) != 0)
{
oauthEndpointUrlBuilder.Scheme = configServerUrl.Scheme;
oauthEndpointUrlBuilder.Host = configServerUrl.Host;
oauthEndpointUrlBuilder.Port = configServerUrl.Port;
Trace.Info($"Set oauth endpoint url's scheme://host:port component to match runner configure url's scheme://host:port: '{oauthEndpointUrlBuilder.Uri.AbsoluteUri}'.");
}
var credentialData = new CredentialData
{
Scheme = Constants.Configuration.OAuth,
@@ -269,22 +277,19 @@ namespace GitHub.Runner.Listener.Configuration
throw new NotSupportedException("Message queue listen OAuth token.");
}
// Testing agent connection, detect any potential connection issue, like local clock skew that cause OAuth token expired.
// Testing agent connection, detect any protential connection issue, like local clock skew that cause OAuth token expired.
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials credential = credMgr.LoadCredentials();
try
{
await _runnerServer.ConnectAsync(new Uri(runnerSettings.ServerUrl), credential);
// ConnectAsync() hits _apis/connectionData which is an anonymous endpoint
// Need to hit an authenticate endpoint to trigger OAuth token exchange.
await _runnerServer.GetAgentPoolsAsync();
_term.WriteSuccessMessage("Runner connection is good");
}
catch (VssOAuthTokenRequestException ex) when (ex.Message.Contains("Current server time is"))
{
// there are two exception messages server send that indicate clock skew.
// 1. The bearer token expired on {jwt.ValidTo}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
// 2. The bearer token is not valid until {jwt.ValidFrom}. Current server time is {DateTime.UtcNow}.
Trace.Error("Catch exception during test agent connection.");
Trace.Error(ex);
throw new Exception("The local machine's clock may be out of sync with the server time by more than five minutes. Please sync your clock with your domain or internet time and try again.");
@@ -368,12 +373,13 @@ namespace GitHub.Runner.Listener.Configuration
else
{
var githubToken = command.GetRunnerDeletionToken();
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken, Constants.RunnerEvent.Remove);
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken);
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
}
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
bool isHostedServer = await IsHostedServer(settings.ServerUrl, creds);
await _runnerServer.ConnectAsync(new Uri(settings.ServerUrl), creds);
var agents = await _runnerServer.GetAgentsAsync(settings.PoolId, settings.AgentName);
@@ -396,7 +402,7 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteLine("Cannot connect to server, because config files are missing. Skipping removing runner from the server.");
}
//delete credential config files
//delete credential config files
currentAction = "Removing .credentials";
if (hasCredentials)
{
@@ -410,7 +416,7 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteLine("Does not exist. Skipping " + currentAction);
}
//delete settings config file
//delete settings config file
currentAction = "Removing .runner";
if (isConfigured)
{
@@ -451,7 +457,7 @@ namespace GitHub.Runner.Listener.Configuration
}
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels)
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey)
{
ArgUtil.NotNull(agent, nameof(agent));
agent.Authorization = new TaskAgentAuthorization
@@ -459,25 +465,18 @@ namespace GitHub.Runner.Listener.Configuration
PublicKey = new TaskAgentPublicKey(publicKey.Exponent, publicKey.Modulus),
};
// update should replace the existing labels
// update - update instead of delete so we don't lose labels etc...
agent.Version = BuildConstants.RunnerPackage.Version;
agent.OSDescription = RuntimeInformation.OSDescription;
agent.Labels.Clear();
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
agent.Labels.Add("self-hosted");
agent.Labels.Add(VarUtil.OS);
agent.Labels.Add(VarUtil.OSArchitecture);
foreach (var userLabel in userLabels)
{
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
}
return agent;
}
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels)
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey)
{
TaskAgent agent = new TaskAgent(agentName)
{
@@ -490,51 +489,45 @@ namespace GitHub.Runner.Listener.Configuration
OSDescription = RuntimeInformation.OSDescription,
};
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OS, LabelType.System));
agent.Labels.Add(new AgentLabel(VarUtil.OSArchitecture, LabelType.System));
foreach (var userLabel in userLabels)
{
agent.Labels.Add(new AgentLabel(userLabel, LabelType.User));
}
agent.Labels.Add("self-hosted");
agent.Labels.Add(VarUtil.OS);
agent.Labels.Add(VarUtil.OSArchitecture);
return agent;
}
private bool IsHostedServer(UriBuilder gitHubUrl)
private async Task<bool> IsHostedServer(string serverUrl, VssCredentials credentials)
{
return string.Equals(gitHubUrl.Host, "github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "www.github.com", StringComparison.OrdinalIgnoreCase) ||
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
// Determine the service deployment type based on connection data. (Hosted/OnPremises)
var locationServer = HostContext.GetService<ILocationServer>();
VssConnection connection = VssUtil.CreateConnection(new Uri(serverUrl), credentials);
await locationServer.ConnectAsync(connection);
try
{
var connectionData = await locationServer.GetConnectionDataAsync();
Trace.Info($"Server deployment type: {connectionData.DeploymentType}");
return connectionData.DeploymentType.HasFlag(DeploymentFlags.Hosted);
}
catch (Exception ex)
{
// Since the DeploymentType is Enum, deserialization exception means there is a new Enum member been added.
// It's more likely to be Hosted since OnPremises is always behind and customer can update their agent if are on-prem
Trace.Error(ex);
return true;
}
}
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
if (IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/actions/runner-registration";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/actions/runner-registration";
}
var gitHubUrl = new UriBuilder(githubUrl);
var githubApiUrl = $"https://api.{gitHubUrl.Host}/repos/{gitHubUrl.Path.Trim('/')}/actions-runners/registration";
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("RemoteAuth", githubToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
var bodyObject = new Dictionary<string, string>()
{
{"url", githubUrl},
{"runner_event", runnerEvent}
};
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
httpClient.DefaultRequestHeaders.UserAgent.Add(HostContext.UserAgent);
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/vnd.github.shuri-preview+json"));
var response = await httpClient.PostAsync(githubApiUrl, new StringContent("", null, "application/json"));
if (response.IsSuccessStatusCode)
{

View File

@@ -13,7 +13,7 @@ namespace GitHub.Runner.Listener.Configuration
public interface ICredentialManager : IRunnerService
{
ICredentialProvider GetCredentialProvider(string credType);
VssCredentials LoadCredentials(bool preferMigrated = true);
VssCredentials LoadCredentials();
}
public class CredentialManager : RunnerService, ICredentialManager
@@ -40,7 +40,7 @@ namespace GitHub.Runner.Listener.Configuration
return creds;
}
public VssCredentials LoadCredentials(bool preferMigrated = true)
public VssCredentials LoadCredentials()
{
IConfigurationStore store = HostContext.GetService<IConfigurationStore>();
@@ -50,16 +50,6 @@ namespace GitHub.Runner.Listener.Configuration
}
CredentialData credData = store.GetCredentials();
if (preferMigrated)
{
var migratedCred = store.GetMigratedCredentials();
if (migratedCred != null)
{
credData = migratedCred;
}
}
ICredentialProvider credProv = GetCredentialProvider(credData.Scheme);
credProv.CredentialData = credData;

View File

@@ -1,5 +1,6 @@
using System;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Services.Common;
using GitHub.Services.OAuth;
@@ -28,7 +29,7 @@ namespace GitHub.Runner.Listener.Configuration
var authorizationUrl = this.CredentialData.Data.GetValueOrDefault("authorizationUrl", null);
// For back compat with .credential file that doesn't has 'oauthEndpointUrl' section
var oauthEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
var oathEndpointUrl = this.CredentialData.Data.GetValueOrDefault("oauthEndpointUrl", authorizationUrl);
ArgUtil.NotNullOrEmpty(clientId, nameof(clientId));
ArgUtil.NotNullOrEmpty(authorizationUrl, nameof(authorizationUrl));
@@ -38,7 +39,7 @@ namespace GitHub.Runner.Listener.Configuration
var keyManager = context.GetService<IRSAKeyManager>();
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
var clientCredential = new VssOAuthJwtBearerClientCredential(clientId, authorizationUrl, signingCredentials);
var agentCredential = new VssOAuthCredential(new Uri(oauthEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
var agentCredential = new VssOAuthCredential(new Uri(oathEndpointUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, clientCredential);
// Construct a credentials cache with a single OAuth credential for communication. The windows credential
// is explicitly set to null to ensure we never do that negotiation.

View File

@@ -20,8 +20,7 @@ namespace GitHub.Runner.Listener.Configuration
bool secret,
string defaultValue,
Func<String, bool> validator,
bool unattended,
bool isOptional = false);
bool unattended);
}
public sealed class PromptManager : RunnerService, IPromptManager
@@ -57,8 +56,7 @@ namespace GitHub.Runner.Listener.Configuration
bool secret,
string defaultValue,
Func<string, bool> validator,
bool unattended,
bool isOptional = false)
bool unattended)
{
Trace.Info(nameof(ReadValue));
ArgUtil.NotNull(validator, nameof(validator));
@@ -72,10 +70,6 @@ namespace GitHub.Runner.Listener.Configuration
{
return defaultValue;
}
else if (isOptional)
{
return string.Empty;
}
// Otherwise throw.
throw new Exception($"Invalid configuration provided for {argName}. Terminating unattended configuration.");
@@ -91,28 +85,18 @@ namespace GitHub.Runner.Listener.Configuration
{
_terminal.Write($"[press Enter for {defaultValue}] ");
}
else if (isOptional){
_terminal.Write($"[press Enter to skip] ");
}
// Read and trim the value.
value = secret ? _terminal.ReadSecret() : _terminal.ReadLine();
value = value?.Trim() ?? string.Empty;
// Return the default if not specified.
if (string.IsNullOrEmpty(value))
if (string.IsNullOrEmpty(value) && !string.IsNullOrEmpty(defaultValue))
{
if (!string.IsNullOrEmpty(defaultValue))
{
Trace.Info($"Falling back to the default: '{defaultValue}'");
return defaultValue;
}
else if (isOptional)
{
return string.Empty;
}
Trace.Info($"Falling back to the default: '{defaultValue}'");
return defaultValue;
}
// Return the value if it is not empty and it is valid.
// Otherwise try the loop again.
if (!string.IsNullOrEmpty(value))

View File

@@ -1,7 +1,6 @@
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using System;
using System.Linq;
using System.IO;
using System.Security.Principal;
@@ -47,21 +46,6 @@ namespace GitHub.Runner.Listener.Configuration
string.Equals(value, "N", StringComparison.CurrentCultureIgnoreCase);
}
public static bool LabelsValidator(string labels)
{
if (!string.IsNullOrEmpty(labels))
{
var labelSet = labels.Split(',').Where(x => !string.IsNullOrEmpty(x)).ToHashSet<string>(StringComparer.OrdinalIgnoreCase);
if (labelSet.Any(x => x.Length > 256))
{
return false;
}
}
return true;
}
public static bool NonEmptyValidator(string value)
{
return !string.IsNullOrEmpty(value);

View File

@@ -12,14 +12,12 @@ using System.Linq;
using GitHub.Services.Common;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Services.WebApi.Jwt;
namespace GitHub.Runner.Listener
{
[ServiceLocator(Default = typeof(JobDispatcher))]
public interface IJobDispatcher : IRunnerService
{
bool Busy { get; }
TaskCompletionSource<bool> RunOnceJobCompleted { get; }
void Run(Pipelines.AgentJobRequestMessage message, bool runOnce = false);
bool Cancel(JobCancelMessage message);
@@ -71,8 +69,6 @@ namespace GitHub.Runner.Listener
public TaskCompletionSource<bool> RunOnceJobCompleted => _runOnceJobCompleted;
public bool Busy { get; private set; }
public void Run(Pipelines.AgentJobRequestMessage jobRequestMessage, bool runOnce = false)
{
Trace.Info($"Job request {jobRequestMessage.RequestId} for plan {jobRequestMessage.Plan.PlanId} job {jobRequestMessage.JobId} received.");
@@ -87,30 +83,15 @@ namespace GitHub.Runner.Listener
}
}
var orchestrationId = string.Empty;
var systemConnection = jobRequestMessage.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
if (systemConnection?.Authorization != null &&
systemConnection.Authorization.Parameters.TryGetValue("AccessToken", out var accessToken) &&
!string.IsNullOrEmpty(accessToken))
{
var jwt = JsonWebToken.Create(accessToken);
var claims = jwt.ExtractClaims();
orchestrationId = claims.FirstOrDefault(x => string.Equals(x.Type, "orchid", StringComparison.OrdinalIgnoreCase))?.Value;
if (!string.IsNullOrEmpty(orchestrationId))
{
Trace.Info($"Pull OrchestrationId {orchestrationId} from JWT claims");
}
}
WorkerDispatcher newDispatch = new WorkerDispatcher(jobRequestMessage.JobId, jobRequestMessage.RequestId);
if (runOnce)
{
Trace.Info("Start dispatcher for one time used runner.");
newDispatch.WorkerDispatch = RunOnceAsync(jobRequestMessage, orchestrationId, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
newDispatch.WorkerDispatch = RunOnceAsync(jobRequestMessage, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
}
else
{
newDispatch.WorkerDispatch = RunAsync(jobRequestMessage, orchestrationId, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
newDispatch.WorkerDispatch = RunAsync(jobRequestMessage, currentDispatch, newDispatch.WorkerCancellationTokenSource.Token, newDispatch.WorkerCancelTimeoutKillTokenSource.Token);
}
_jobInfos.TryAdd(newDispatch.JobId, newDispatch);
@@ -266,7 +247,7 @@ namespace GitHub.Runner.Listener
Task completedTask = await Task.WhenAny(jobDispatch.WorkerDispatch, Task.Delay(TimeSpan.FromSeconds(45)));
if (completedTask != jobDispatch.WorkerDispatch)
{
// at this point, the job execution might encounter some dead lock and even not able to be cancelled.
// at this point, the job exectuion might encounter some dead lock and even not able to be canclled.
// no need to localize the exception string should never happen.
throw new InvalidOperationException($"Job dispatch process for {jobDispatch.JobId} has encountered unexpected error, the dispatch task is not able to be canceled within 45 seconds.");
}
@@ -300,11 +281,11 @@ namespace GitHub.Runner.Listener
}
}
private async Task RunOnceAsync(Pipelines.AgentJobRequestMessage message, string orchestrationId, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
private async Task RunOnceAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
{
try
{
await RunAsync(message, orchestrationId, previousJobDispatch, jobRequestCancellationToken, workerCancelTimeoutKillToken);
await RunAsync(message, previousJobDispatch, jobRequestCancellationToken, workerCancelTimeoutKillToken);
}
finally
{
@@ -313,145 +294,271 @@ namespace GitHub.Runner.Listener
}
}
private async Task RunAsync(Pipelines.AgentJobRequestMessage message, string orchestrationId, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
private async Task RunAsync(Pipelines.AgentJobRequestMessage message, WorkerDispatcher previousJobDispatch, CancellationToken jobRequestCancellationToken, CancellationToken workerCancelTimeoutKillToken)
{
Busy = true;
try
if (previousJobDispatch != null)
{
if (previousJobDispatch != null)
Trace.Verbose($"Make sure the previous job request {previousJobDispatch.JobId} has successfully finished on worker.");
await EnsureDispatchFinished(previousJobDispatch);
}
else
{
Trace.Verbose($"This is the first job request.");
}
var term = HostContext.GetService<ITerminal>();
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
// first job request renew succeed.
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
var notification = HostContext.GetService<IJobNotification>();
// lock renew cancellation token.
using (var lockRenewalTokenSource = new CancellationTokenSource())
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
{
long requestId = message.RequestId;
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
// start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// wait till first renew succeed or job request is canceled
// not even start worker if the first renew fail
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
if (renewJobRequest.IsCompleted)
{
Trace.Verbose($"Make sure the previous job request {previousJobDispatch.JobId} has successfully finished on worker.");
await EnsureDispatchFinished(previousJobDispatch);
}
else
{
Trace.Verbose($"This is the first job request.");
// renew job request task complete means we run out of retry for the first job request renew.
Trace.Info($"Unable to renew job request for job {message.JobId} for the first time, stop dispatching job to worker.");
return;
}
var term = HostContext.GetService<ITerminal>();
term.WriteLine($"{DateTime.UtcNow:u}: Running job: {message.JobDisplayName}");
// first job request renew succeed.
TaskCompletionSource<int> firstJobRequestRenewed = new TaskCompletionSource<int>();
var notification = HostContext.GetService<IJobNotification>();
// lock renew cancellation token.
using (var lockRenewalTokenSource = new CancellationTokenSource())
using (var workerProcessCancelTokenSource = new CancellationTokenSource())
if (jobRequestCancellationToken.IsCancellationRequested)
{
long requestId = message.RequestId;
Guid lockToken = Guid.Empty; // lockToken has never been used, keep this here of compat
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// start renew job request
Trace.Info($"Start renew job request {requestId} for job {message.JobId}.");
Task renewJobRequest = RenewJobRequestAsync(_poolId, requestId, lockToken, orchestrationId, firstJobRequestRenewed, lockRenewalTokenSource.Token);
// complete job request with result Cancelled
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
return;
}
// wait till first renew succeed or job request is canceled
// not even start worker if the first renew fail
await Task.WhenAny(firstJobRequestRenewed.Task, renewJobRequest, Task.Delay(-1, jobRequestCancellationToken));
HostContext.WritePerfCounter($"JobRequestRenewed_{requestId.ToString()}");
if (renewJobRequest.IsCompleted)
Task<int> workerProcessTask = null;
object _outputLock = new object();
List<string> workerOutput = new List<string>();
using (var processChannel = HostContext.CreateService<IProcessChannel>())
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
{
// Start the process channel.
// It's OK if StartServer bubbles an execption after the worker process has already started.
// The worker will shutdown after 30 seconds if it hasn't received the job message.
processChannel.StartServer(
// Delegate to start the child process.
startProcess: (string pipeHandleOut, string pipeHandleIn) =>
{
// Validate args.
ArgUtil.NotNullOrEmpty(pipeHandleOut, nameof(pipeHandleOut));
ArgUtil.NotNullOrEmpty(pipeHandleIn, nameof(pipeHandleIn));
// Save STDOUT from worker, worker will use STDOUT report unhandle exception.
processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
lock (_outputLock)
{
workerOutput.Add(stdout.Data);
}
}
};
// Save STDERR from worker, worker will use STDERR on crash.
processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
lock (_outputLock)
{
workerOutput.Add(stderr.Data);
}
}
};
// Start the child process.
HostContext.WritePerfCounter("StartingWorkerProcess");
var assemblyDirectory = HostContext.GetDirectory(WellKnownDirectory.Bin);
string workerFileName = Path.Combine(assemblyDirectory, _workerProcessName);
workerProcessTask = processInvoker.ExecuteAsync(
workingDirectory: assemblyDirectory,
fileName: workerFileName,
arguments: "spawnclient " + pipeHandleOut + " " + pipeHandleIn,
environment: null,
requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: workerProcessCancelTokenSource.Token);
});
// Send the job request message.
// Kill the worker process if sending the job message times out. The worker
// process may have successfully received the job message.
try
{
// renew job request task complete means we run out of retry for the first job request renew.
Trace.Info($"Unable to renew job request for job {message.JobId} for the first time, stop dispatching job to worker.");
return;
Trace.Info($"Send job request message to worker for job {message.JobId}.");
HostContext.WritePerfCounter($"RunnerSendingJobToWorker_{message.JobId}");
using (var csSendJobRequest = new CancellationTokenSource(_channelTimeout))
{
await processChannel.SendAsync(
messageType: MessageType.NewJobRequest,
body: JsonUtility.ToString(message),
cancellationToken: csSendJobRequest.Token);
}
}
if (jobRequestCancellationToken.IsCancellationRequested)
catch (OperationCanceledException)
{
// message send been cancelled.
// timeout 30 sec. kill worker.
Trace.Info($"Job request message sending for job {message.JobId} been cancelled, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// complete job request with result Cancelled
await CompleteJobRequestAsync(_poolId, message, lockToken, TaskResult.Canceled);
// not finish the job request since the job haven't run on worker at all, we will not going to set a result to server.
return;
}
HostContext.WritePerfCounter($"JobRequestRenewed_{requestId.ToString()}");
// we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
Task<int> workerProcessTask = null;
object _outputLock = new object();
List<string> workerOutput = new List<string>();
using (var processChannel = HostContext.CreateService<IProcessChannel>())
using (var processInvoker = HostContext.CreateService<IProcessInvoker>())
HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}");
try
{
// Start the process channel.
// It's OK if StartServer bubbles an execption after the worker process has already started.
// The worker will shutdown after 30 seconds if it hasn't received the job message.
processChannel.StartServer(
// Delegate to start the child process.
startProcess: (string pipeHandleOut, string pipeHandleIn) =>
TaskResult resultOnAbandonOrCancel = TaskResult.Succeeded;
// wait for renewlock, worker process or cancellation token been fired.
var completedTask = await Task.WhenAny(renewJobRequest, workerProcessTask, Task.Delay(-1, jobRequestCancellationToken));
if (completedTask == workerProcessTask)
{
// worker finished successfully, complete job request with result, attach unhandled exception reported by worker, stop renew lock, job has finished.
int returnCode = await workerProcessTask;
Trace.Info($"Worker finished for job {message.JobId}. Code: " + returnCode);
string detailInfo = null;
if (!TaskResultUtil.IsValidReturnCode(returnCode))
{
// Validate args.
ArgUtil.NotNullOrEmpty(pipeHandleOut, nameof(pipeHandleOut));
ArgUtil.NotNullOrEmpty(pipeHandleIn, nameof(pipeHandleIn));
detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
await LogWorkerProcessUnhandledException(message, detailInfo);
}
// Save STDOUT from worker, worker will use STDOUT report unhandle exception.
processInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
lock (_outputLock)
{
workerOutput.Add(stdout.Data);
}
}
};
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
Trace.Info($"finish job request for job {message.JobId} with result: {result}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}");
// Save STDERR from worker, worker will use STDERR on crash.
processInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
lock (_outputLock)
{
workerOutput.Add(stderr.Data);
}
}
};
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// Start the child process.
HostContext.WritePerfCounter("StartingWorkerProcess");
var assemblyDirectory = HostContext.GetDirectory(WellKnownDirectory.Bin);
string workerFileName = Path.Combine(assemblyDirectory, _workerProcessName);
workerProcessTask = processInvoker.ExecuteAsync(
workingDirectory: assemblyDirectory,
fileName: workerFileName,
arguments: "spawnclient " + pipeHandleOut + " " + pipeHandleIn,
environment: null,
requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: workerProcessCancelTokenSource.Token);
});
// complete job request
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
// Send the job request message.
// Kill the worker process if sending the job message times out. The worker
// process may have successfully received the job message.
// print out unhandled exception happened in worker after we complete job request.
// when we run out of disk space, report back to server has higher priority.
if (!string.IsNullOrEmpty(detailInfo))
{
Trace.Error("Unhandled exception happened in worker:");
Trace.Error(detailInfo);
}
return;
}
else if (completedTask == renewJobRequest)
{
resultOnAbandonOrCancel = TaskResult.Abandoned;
}
else
{
resultOnAbandonOrCancel = TaskResult.Canceled;
}
// renew job request completed or job request cancellation token been fired for RunAsync(jobrequestmessage)
// cancel worker gracefully first, then kill it after worker cancel timeout
try
{
Trace.Info($"Send job request message to worker for job {message.JobId}.");
HostContext.WritePerfCounter($"RunnerSendingJobToWorker_{message.JobId}");
using (var csSendJobRequest = new CancellationTokenSource(_channelTimeout))
Trace.Info($"Send job cancellation message to worker for job {message.JobId}.");
using (var csSendCancel = new CancellationTokenSource(_channelTimeout))
{
var messageType = MessageType.CancelRequest;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
switch (HostContext.RunnerShutdownReason)
{
case ShutdownReason.UserCancelled:
messageType = MessageType.RunnerShutdown;
break;
case ShutdownReason.OperatingSystemShutdown:
messageType = MessageType.OperatingSystemShutdown;
break;
}
}
await processChannel.SendAsync(
messageType: MessageType.NewJobRequest,
body: JsonUtility.ToString(message),
cancellationToken: csSendJobRequest.Token);
messageType: messageType,
body: string.Empty,
cancellationToken: csSendCancel.Token);
}
}
catch (OperationCanceledException)
{
// message send been cancelled.
// timeout 30 sec. kill worker.
Trace.Info($"Job request message sending for job {message.JobId} been cancelled, kill running worker.");
Trace.Info($"Job cancel message sending for job {message.JobId} been cancelled, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
}
// wait worker to exit
// if worker doesn't exit within timeout, then kill worker.
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
// worker haven't exit within cancellation timeout.
if (completedTask != workerProcessTask)
{
Trace.Info($"worker process for job {message.JobId} haven't exit within cancellation timout, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
@@ -462,168 +569,34 @@ namespace GitHub.Runner.Listener
Trace.Info("worker process has been killed.");
}
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// not finish the job request since the job haven't run on worker at all, we will not going to set a result to server.
return;
// When worker doesn't exit within cancel timeout, the runner will kill the worker process and worker won't finish upload job logs.
// The runner will try to upload these logs at this time.
await TryUploadUnfinishedLogs(message);
}
// we get first jobrequest renew succeed and start the worker process with the job message.
// send notification to machine provisioner.
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}");
// complete job request with cancel result, stop renew lock, job has finished.
HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}");
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
try
{
TaskResult resultOnAbandonOrCancel = TaskResult.Succeeded;
// wait for renewlock, worker process or cancellation token been fired.
var completedTask = await Task.WhenAny(renewJobRequest, workerProcessTask, Task.Delay(-1, jobRequestCancellationToken));
if (completedTask == workerProcessTask)
{
// worker finished successfully, complete job request with result, attach unhandled exception reported by worker, stop renew lock, job has finished.
int returnCode = await workerProcessTask;
Trace.Info($"Worker finished for job {message.JobId}. Code: " + returnCode);
string detailInfo = null;
if (!TaskResultUtil.IsValidReturnCode(returnCode))
{
detailInfo = string.Join(Environment.NewLine, workerOutput);
Trace.Info($"Return code {returnCode} indicate worker encounter an unhandled exception or app crash, attach worker stdout/stderr to JobRequest result.");
await LogWorkerProcessUnhandledException(message, detailInfo);
}
TaskResult result = TaskResultUtil.TranslateFromReturnCode(returnCode);
Trace.Info($"finish job request for job {message.JobId} with result: {result}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {result}");
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// complete job request
await CompleteJobRequestAsync(_poolId, message, lockToken, result, detailInfo);
// print out unhandled exception happened in worker after we complete job request.
// when we run out of disk space, report back to server has higher priority.
if (!string.IsNullOrEmpty(detailInfo))
{
Trace.Error("Unhandled exception happened in worker:");
Trace.Error(detailInfo);
}
return;
}
else if (completedTask == renewJobRequest)
{
resultOnAbandonOrCancel = TaskResult.Abandoned;
}
else
{
resultOnAbandonOrCancel = TaskResult.Canceled;
}
// renew job request completed or job request cancellation token been fired for RunAsync(jobrequestmessage)
// cancel worker gracefully first, then kill it after worker cancel timeout
try
{
Trace.Info($"Send job cancellation message to worker for job {message.JobId}.");
using (var csSendCancel = new CancellationTokenSource(_channelTimeout))
{
var messageType = MessageType.CancelRequest;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
switch (HostContext.RunnerShutdownReason)
{
case ShutdownReason.UserCancelled:
messageType = MessageType.RunnerShutdown;
break;
case ShutdownReason.OperatingSystemShutdown:
messageType = MessageType.OperatingSystemShutdown;
break;
}
}
await processChannel.SendAsync(
messageType: messageType,
body: string.Empty,
cancellationToken: csSendCancel.Token);
}
}
catch (OperationCanceledException)
{
// message send been cancelled.
Trace.Info($"Job cancel message sending for job {message.JobId} been cancelled, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
}
// wait worker to exit
// if worker doesn't exit within timeout, then kill worker.
completedTask = await Task.WhenAny(workerProcessTask, Task.Delay(-1, workerCancelTimeoutKillToken));
// worker haven't exit within cancellation timeout.
if (completedTask != workerProcessTask)
{
Trace.Info($"worker process for job {message.JobId} haven't exit within cancellation timout, kill running worker.");
workerProcessCancelTokenSource.Cancel();
try
{
await workerProcessTask;
}
catch (OperationCanceledException)
{
Trace.Info("worker process has been killed.");
}
// When worker doesn't exit within cancel timeout, the runner will kill the worker process and worker won't finish upload job logs.
// The runner will try to upload these logs at this time.
await TryUploadUnfinishedLogs(message);
}
Trace.Info($"finish job request for job {message.JobId} with result: {resultOnAbandonOrCancel}");
term.WriteLine($"{DateTime.UtcNow:u}: Job {message.JobDisplayName} completed with result: {resultOnAbandonOrCancel}");
// complete job request with cancel result, stop renew lock, job has finished.
Trace.Info($"Stop renew job request for job {message.JobId}.");
// stop renew lock
lockRenewalTokenSource.Cancel();
// renew job request should never blows up.
await renewJobRequest;
// complete job request
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
}
finally
{
// This should be the last thing to run so we don't notify external parties until actually finished
await notification.JobCompleted(message.JobId);
}
// complete job request
await CompleteJobRequestAsync(_poolId, message, lockToken, resultOnAbandonOrCancel);
}
finally
{
// This should be the last thing to run so we don't notify external parties until actually finished
await notification.JobCompleted(message.JobId);
}
}
}
finally
{
Busy = false;
}
}
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, string orchestrationId, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
public async Task RenewJobRequestAsync(int poolId, long requestId, Guid lockToken, TaskCompletionSource<int> firstJobRequestRenewed, CancellationToken token)
{
var runnerServer = HostContext.GetService<IRunnerServer>();
TaskAgentJobRequest request = null;
@@ -636,7 +609,7 @@ namespace GitHub.Runner.Listener
{
try
{
request = await runnerServer.RenewAgentRequestAsync(poolId, requestId, lockToken, orchestrationId, token);
request = await runnerServer.RenewAgentRequestAsync(poolId, requestId, lockToken, token);
Trace.Info($"Successfully renew job request {requestId}, job is valid till {request.LockedUntil.Value}");

View File

@@ -13,10 +13,7 @@ using System.Diagnostics;
using System.Runtime.InteropServices;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Services.WebApi;
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("Test")]
namespace GitHub.Runner.Listener
{
[ServiceLocator(Default = typeof(MessageListener))]
@@ -35,30 +32,18 @@ namespace GitHub.Runner.Listener
private ITerminal _term;
private IRunnerServer _runnerServer;
private TaskAgentSession _session;
private ICredentialManager _credMgr;
private IConfigurationStore _configStore;
private TimeSpan _getNextMessageRetryInterval;
private readonly TimeSpan _sessionCreationRetryInterval = TimeSpan.FromSeconds(30);
private readonly TimeSpan _sessionConflictRetryLimit = TimeSpan.FromMinutes(4);
private readonly TimeSpan _clockSkewRetryLimit = TimeSpan.FromMinutes(30);
private readonly Dictionary<string, int> _sessionCreationExceptionTracker = new Dictionary<string, int>();
// Whether load credentials from .credentials_migrated file
internal bool _useMigratedCredentials;
// need to check auth url if there is only .credentials and auth schema is OAuth
internal bool _needToCheckAuthorizationUrlUpdate;
internal Task<VssCredentials> _authorizationUrlMigrationBackgroundTask;
internal Task _authorizationUrlRollbackReattemptDelayBackgroundTask;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_term = HostContext.GetService<ITerminal>();
_runnerServer = HostContext.GetService<IRunnerServer>();
_credMgr = HostContext.GetService<ICredentialManager>();
_configStore = HostContext.GetService<IConfigurationStore>();
}
public async Task<Boolean> CreateSessionAsync(CancellationToken token)
@@ -73,8 +58,8 @@ namespace GitHub.Runner.Listener
// Create connection.
Trace.Info("Loading Credentials");
_useMigratedCredentials = !StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_SPSAUTHURL"));
VssCredentials creds = _credMgr.LoadCredentials(_useMigratedCredentials);
var credMgr = HostContext.GetService<ICredentialManager>();
VssCredentials creds = credMgr.LoadCredentials();
var agent = new TaskAgentReference
{
@@ -89,17 +74,6 @@ namespace GitHub.Runner.Listener
string errorMessage = string.Empty;
bool encounteringError = false;
var originalCreds = _configStore.GetCredentials();
var migratedCreds = _configStore.GetMigratedCredentials();
if (migratedCreds == null)
{
_useMigratedCredentials = false;
if (originalCreds.Scheme == Constants.Configuration.OAuth)
{
_needToCheckAuthorizationUrlUpdate = true;
}
}
while (true)
{
token.ThrowIfCancellationRequested();
@@ -109,7 +83,7 @@ namespace GitHub.Runner.Listener
Trace.Info("Connecting to the Runner Server...");
await _runnerServer.ConnectAsync(new Uri(serverUrl), creds);
Trace.Info("VssConnection created");
_term.WriteLine();
_term.WriteSuccessMessage("Connected to GitHub");
_term.WriteLine();
@@ -127,12 +101,6 @@ namespace GitHub.Runner.Listener
encounteringError = false;
}
if (_needToCheckAuthorizationUrlUpdate)
{
// start background task try to get new authorization url
_authorizationUrlMigrationBackgroundTask = GetNewOAuthAuthorizationSetting(token);
}
return true;
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
@@ -152,21 +120,8 @@ namespace GitHub.Runner.Listener
if (!IsSessionCreationExceptionRetriable(ex))
{
if (_useMigratedCredentials)
{
// migrated credentials might cause lose permission during permission check,
// we will force to use original credential and try again
_useMigratedCredentials = false;
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
creds = _credMgr.LoadCredentials(false);
Trace.Error("Fallback to original credentials and try again.");
}
else
{
_term.WriteError($"Failed to create session. {ex.Message}");
return false;
}
_term.WriteError($"Failed to create session. {ex.Message}");
return false;
}
if (!encounteringError) //print the message only on the first error
@@ -227,51 +182,6 @@ namespace GitHub.Runner.Listener
encounteringError = false;
continuousError = 0;
}
if (_needToCheckAuthorizationUrlUpdate &&
_authorizationUrlMigrationBackgroundTask?.IsCompleted == true)
{
if (HostContext.GetService<IJobDispatcher>().Busy ||
HostContext.GetService<ISelfUpdater>().Busy)
{
Trace.Info("Job or runner updates in progress, update credentials next time.");
}
else
{
try
{
var newCred = await _authorizationUrlMigrationBackgroundTask;
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), newCred);
Trace.Info("Updated connection to use migrated credential for next GetMessage call.");
_useMigratedCredentials = true;
_authorizationUrlMigrationBackgroundTask = null;
_needToCheckAuthorizationUrlUpdate = false;
}
catch (Exception ex)
{
Trace.Error("Fail to refresh connection with new authorization url.");
Trace.Error(ex);
}
}
}
if (_authorizationUrlRollbackReattemptDelayBackgroundTask?.IsCompleted == true)
{
try
{
// we rolled back to use original creds about 2 days before, now it's a good time to try migrated creds again.
Trace.Info("Re-attempt to use migrated credential");
var migratedCreds = _credMgr.LoadCredentials();
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedCreds);
_useMigratedCredentials = true;
_authorizationUrlRollbackReattemptDelayBackgroundTask = null;
}
catch (Exception ex)
{
Trace.Error("Fail to refresh connection with new authorization url on rollback reattempt.");
Trace.Error(ex);
}
}
}
catch (OperationCanceledException) when (token.IsCancellationRequested)
{
@@ -295,21 +205,7 @@ namespace GitHub.Runner.Listener
}
else if (!IsGetNextMessageExceptionRetriable(ex))
{
if (_useMigratedCredentials)
{
// migrated credentials might cause lose permission during permission check,
// we will force to use original credential and try again
_useMigratedCredentials = false;
var reattemptBackoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromHours(24), TimeSpan.FromHours(36));
_authorizationUrlRollbackReattemptDelayBackgroundTask = HostContext.Delay(reattemptBackoff, token); // retry migrated creds in 24-36 hours.
var originalCreds = _credMgr.LoadCredentials(false);
await _runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), originalCreds);
Trace.Error("Fallback to original credentials and try again.");
}
else
{
throw;
}
throw;
}
else
{
@@ -501,80 +397,5 @@ namespace GitHub.Runner.Listener
return true;
}
}
private async Task<VssCredentials> GetNewOAuthAuthorizationSetting(CancellationToken token)
{
Trace.Info("Start checking oauth authorization url update.");
while (true)
{
var backoff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromMinutes(30), TimeSpan.FromMinutes(45));
await HostContext.Delay(backoff, token);
try
{
var migratedAuthorizationUrl = await _runnerServer.GetRunnerAuthUrlAsync(_settings.PoolId, _settings.AgentId);
if (!string.IsNullOrEmpty(migratedAuthorizationUrl))
{
var credData = _configStore.GetCredentials();
var clientId = credData.Data.GetValueOrDefault("clientId", null);
var currentAuthorizationUrl = credData.Data.GetValueOrDefault("authorizationUrl", null);
Trace.Info($"Current authorization url: {currentAuthorizationUrl}, new authorization url: {migratedAuthorizationUrl}");
if (string.Equals(currentAuthorizationUrl, migratedAuthorizationUrl, StringComparison.OrdinalIgnoreCase))
{
// We don't need to update credentials.
Trace.Info("No needs to update authorization url");
await Task.Delay(TimeSpan.FromMilliseconds(-1), token);
}
var keyManager = HostContext.GetService<IRSAKeyManager>();
var signingCredentials = VssSigningCredentials.Create(() => keyManager.GetKey());
var migratedClientCredential = new VssOAuthJwtBearerClientCredential(clientId, migratedAuthorizationUrl, signingCredentials);
var migratedRunnerCredential = new VssOAuthCredential(new Uri(migratedAuthorizationUrl, UriKind.Absolute), VssOAuthGrant.ClientCredentials, migratedClientCredential);
Trace.Info("Try connect service with Token Service OAuth endpoint.");
var runnerServer = HostContext.CreateService<IRunnerServer>();
await runnerServer.ConnectAsync(new Uri(_settings.ServerUrl), migratedRunnerCredential);
await runnerServer.GetAgentPoolsAsync();
Trace.Info($"Successfully connected service with new authorization url.");
var migratedCredData = new CredentialData
{
Scheme = Constants.Configuration.OAuth,
Data =
{
{ "clientId", clientId },
{ "authorizationUrl", migratedAuthorizationUrl },
{ "oauthEndpointUrl", migratedAuthorizationUrl },
},
};
_configStore.SaveMigratedCredential(migratedCredData);
return migratedRunnerCredential;
}
else
{
Trace.Verbose("No authorization url updates");
}
}
catch (Exception ex)
{
Trace.Error("Fail to get/test new authorization url.");
Trace.Error(ex);
try
{
await _runnerServer.ReportRunnerAuthUrlErrorAsync(_settings.PoolId, _settings.AgentId, ex.ToString());
}
catch (Exception e)
{
// best effort
Trace.Error("Fail to report the migration error");
Trace.Error(e);
}
}
}
}
}
}

View File

@@ -37,7 +37,7 @@ namespace GitHub.Runner.Listener
{
try
{
VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy);
VssUtil.InitializeVssClientSettings(HostContext.UserAgent, HostContext.WebProxy);
_inConfigStage = true;
_completedCommand.Reset();

View File

@@ -17,7 +17,6 @@ namespace GitHub.Runner.Listener
[ServiceLocator(Default = typeof(SelfUpdater))]
public interface ISelfUpdater : IRunnerService
{
bool Busy { get; }
Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token);
}
@@ -32,8 +31,6 @@ namespace GitHub.Runner.Listener
private int _poolId;
private int _agentId;
public bool Busy { get; private set; }
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
@@ -48,60 +45,52 @@ namespace GitHub.Runner.Listener
public async Task<bool> SelfUpdate(AgentRefreshMessage updateMessage, IJobDispatcher jobDispatcher, bool restartInteractiveRunner, CancellationToken token)
{
Busy = true;
try
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
{
if (!await UpdateNeeded(updateMessage.TargetVersion, token))
{
Trace.Info($"Can't find available update package.");
return false;
}
Trace.Info($"Can't find available update package.");
return false;
}
Trace.Info($"An update is available.");
Trace.Info($"An update is available.");
// Print console line that warn user not shutdown runner.
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
// Print console line that warn user not shutdown runner.
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");
await DownloadLatestRunner(token);
Trace.Info($"Download latest runner and unzip into runner root.");
await DownloadLatestRunner(token);
Trace.Info($"Download latest runner and unzip into runner root.");
// wait till all running job finish
await UpdateRunnerUpdateStateAsync("Waiting for current job finish running.");
// wait till all running job finish
await UpdateRunnerUpdateStateAsync("Waiting for current job finish running.");
await jobDispatcher.WaitAsync(token);
Trace.Info($"All running job has exited.");
await jobDispatcher.WaitAsync(token);
Trace.Info($"All running job has exited.");
// delete runner backup
DeletePreviousVersionRunnerBackup(token);
Trace.Info($"Delete old version runner backup.");
// delete runner backup
DeletePreviousVersionRunnerBackup(token);
Trace.Info($"Delete old version runner backup.");
// generate update script from template
await UpdateRunnerUpdateStateAsync("Generate and execute update script.");
// generate update script from template
await UpdateRunnerUpdateStateAsync("Generate and execute update script.");
string updateScript = GenerateUpdateScript(restartInteractiveRunner);
Trace.Info($"Generate update script into: {updateScript}");
string updateScript = GenerateUpdateScript(restartInteractiveRunner);
Trace.Info($"Generate update script into: {updateScript}");
// kick off update script
Process invokeScript = new Process();
// kick off update script
Process invokeScript = new Process();
#if OS_WINDOWS
invokeScript.StartInfo.FileName = WhichUtil.Which("cmd.exe", trace: Trace);
invokeScript.StartInfo.Arguments = $"/c \"{updateScript}\"";
#elif (OS_OSX || OS_LINUX)
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
invokeScript.StartInfo.FileName = WhichUtil.Which("bash", trace: Trace);
invokeScript.StartInfo.Arguments = $"\"{updateScript}\"";
#endif
invokeScript.Start();
Trace.Info($"Update script start running");
invokeScript.Start();
Trace.Info($"Update script start running");
await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should back online within 10 seconds.");
await UpdateRunnerUpdateStateAsync("Runner will exit shortly for update, should back online within 10 seconds.");
return true;
}
finally
{
Busy = false;
}
return true;
}
private async Task<bool> UpdateNeeded(string targetVersion, CancellationToken token)

View File

@@ -271,14 +271,6 @@ namespace GitHub.Runner.Sdk
// Indicate GitHub Actions process.
_proc.StartInfo.Environment["GITHUB_ACTIONS"] = "true";
// Set CI=true when no one else already set it.
// CI=true is common set in most CI provider in GitHub
if (!_proc.StartInfo.Environment.ContainsKey("CI") &&
Environment.GetEnvironmentVariable("CI") == null)
{
_proc.StartInfo.Environment["CI"] = "true";
}
// Hook up the events.
_proc.EnableRaisingEvents = true;
_proc.Exited += ProcessExitedHandler;

View File

@@ -21,7 +21,6 @@ namespace GitHub.Runner.Sdk
private string _httpsProxyAddress;
private string _httpsProxyUsername;
private string _httpsProxyPassword;
private string _noProxyString;
private readonly List<ByPassInfo> _noProxyList = new List<ByPassInfo>();
private readonly HashSet<string> _noProxyUnique = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
@@ -34,7 +33,6 @@ namespace GitHub.Runner.Sdk
public string HttpsProxyAddress => _httpsProxyAddress;
public string HttpsProxyUsername => _httpsProxyUsername;
public string HttpsProxyPassword => _httpsProxyPassword;
public string NoProxyString => _noProxyString;
public List<ByPassInfo> NoProxyList => _noProxyList;
@@ -74,8 +72,8 @@ namespace GitHub.Runner.Sdk
_httpProxyAddress = proxyHttpUri.AbsoluteUri;
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
Environment.SetEnvironmentVariable("HTTP_PROXY", _httpProxyAddress);
Environment.SetEnvironmentVariable("http_proxy", _httpProxyAddress);
Environment.SetEnvironmentVariable("HTTP_PROXY", _httpProxyAddress);
// the proxy url looks like http://[user:pass@]127.0.0.1:8888
var userInfo = Uri.UnescapeDataString(proxyHttpUri.UserInfo).Split(':', 2, StringSplitOptions.RemoveEmptyEntries);
@@ -104,8 +102,8 @@ namespace GitHub.Runner.Sdk
_httpsProxyAddress = proxyHttpsUri.AbsoluteUri;
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
Environment.SetEnvironmentVariable("HTTPS_PROXY", _httpsProxyAddress);
Environment.SetEnvironmentVariable("https_proxy", _httpsProxyAddress);
Environment.SetEnvironmentVariable("HTTPS_PROXY", _httpsProxyAddress);
// the proxy url looks like http://[user:pass@]127.0.0.1:8888
var userInfo = Uri.UnescapeDataString(proxyHttpsUri.UserInfo).Split(':', 2, StringSplitOptions.RemoveEmptyEntries);
@@ -131,11 +129,9 @@ namespace GitHub.Runner.Sdk
if (!string.IsNullOrEmpty(noProxyList))
{
_noProxyString = noProxyList;
// Set both environment variables since there are tools support both casing (curl, wget) and tools support only one casing (docker)
Environment.SetEnvironmentVariable("NO_PROXY", noProxyList);
Environment.SetEnvironmentVariable("no_proxy", noProxyList);
Environment.SetEnvironmentVariable("NO_PROXY", noProxyList);
var noProxyListSplit = noProxyList.Split(',', StringSplitOptions.RemoveEmptyEntries);
foreach (string noProxy in noProxyListSplit)

View File

@@ -14,10 +14,10 @@ namespace GitHub.Runner.Sdk
{
public static class VssUtil
{
public static void InitializeVssClientSettings(List<ProductInfoHeaderValue> additionalUserAgents, IWebProxy proxy)
public static void InitializeVssClientSettings(ProductInfoHeaderValue additionalUserAgent, IWebProxy proxy)
{
var headerValues = new List<ProductInfoHeaderValue>();
headerValues.AddRange(additionalUserAgents);
headerValues.Add(additionalUserAgent);
headerValues.Add(new ProductInfoHeaderValue($"({RuntimeInformation.OSDescription.Trim()})"));
if (VssClientHttpRequestSettings.Default.UserAgent != null && VssClientHttpRequestSettings.Default.UserAgent.Count > 0)

View File

@@ -1,7 +1,6 @@
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Worker.Container;
using System;
using System.Collections.Generic;
using System.IO;
@@ -16,14 +15,14 @@ namespace GitHub.Runner.Worker
{
void EnablePluginInternalCommand();
void DisablePluginInternalCommand();
bool TryProcessCommand(IExecutionContext context, string input, ContainerInfo container);
bool TryProcessCommand(IExecutionContext context, string input);
}
public sealed class ActionCommandManager : RunnerService, IActionCommandManager
{
private const string _stopCommand = "stop-commands";
private readonly Dictionary<string, IActionCommandExtension> _commandExtensions = new Dictionary<string, IActionCommandExtension>(StringComparer.OrdinalIgnoreCase);
private readonly HashSet<string> _registeredCommands = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
private HashSet<string> _registeredCommands = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
private readonly object _commandSerializeLock = new object();
private bool _stopProcessCommand = false;
private string _stopToken = null;
@@ -59,7 +58,7 @@ namespace GitHub.Runner.Worker
_registeredCommands.Remove("internal-set-repo-path");
}
public bool TryProcessCommand(IExecutionContext context, string input, ContainerInfo container)
public bool TryProcessCommand(IExecutionContext context, string input)
{
if (string.IsNullOrEmpty(input))
{
@@ -115,7 +114,7 @@ namespace GitHub.Runner.Worker
try
{
extension.ProcessCommand(context, input, actionCommand, container);
extension.ProcessCommand(context, input, actionCommand);
}
catch (Exception ex)
{
@@ -141,7 +140,7 @@ namespace GitHub.Runner.Worker
string Command { get; }
bool OmitEcho { get; }
void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container);
void ProcessCommand(IExecutionContext context, string line, ActionCommand command);
}
public sealed class InternalPluginSetRepoPathCommandExtension : RunnerService, IActionCommandExtension
@@ -151,7 +150,7 @@ namespace GitHub.Runner.Worker
public Type ExtensionType => typeof(IActionCommandExtension);
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
{
if (!command.Properties.TryGetValue(SetRepoPathCommandProperties.repoFullName, out string repoFullName) || string.IsNullOrEmpty(repoFullName))
{
@@ -181,7 +180,7 @@ namespace GitHub.Runner.Worker
public Type ExtensionType => typeof(IActionCommandExtension);
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
{
if (!command.Properties.TryGetValue(SetEnvCommandProperties.Name, out string envName) || string.IsNullOrEmpty(envName))
{
@@ -206,7 +205,7 @@ namespace GitHub.Runner.Worker
public Type ExtensionType => typeof(IActionCommandExtension);
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
{
if (!command.Properties.TryGetValue(SetOutputCommandProperties.Name, out string outputName) || string.IsNullOrEmpty(outputName))
{
@@ -230,7 +229,7 @@ namespace GitHub.Runner.Worker
public Type ExtensionType => typeof(IActionCommandExtension);
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
{
if (!command.Properties.TryGetValue(SaveStateCommandProperties.Name, out string stateName) || string.IsNullOrEmpty(stateName))
{
@@ -254,7 +253,7 @@ namespace GitHub.Runner.Worker
public Type ExtensionType => typeof(IActionCommandExtension);
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
{
if (string.IsNullOrWhiteSpace(command.Data))
{
@@ -280,7 +279,7 @@ namespace GitHub.Runner.Worker
public Type ExtensionType => typeof(IActionCommandExtension);
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
{
ArgUtil.NotNullOrEmpty(command.Data, "path");
context.PrependPath.RemoveAll(x => string.Equals(x, command.Data, StringComparison.CurrentCulture));
@@ -295,7 +294,7 @@ namespace GitHub.Runner.Worker
public Type ExtensionType => typeof(IActionCommandExtension);
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
{
var file = command.Data;
@@ -307,9 +306,9 @@ namespace GitHub.Runner.Worker
}
// Translate file path back from container path
if (container != null)
if (context.Container != null)
{
file = container.TranslateToHostPath(file);
file = context.Container.TranslateToHostPath(file);
}
// Root the path
@@ -342,7 +341,7 @@ namespace GitHub.Runner.Worker
public Type ExtensionType => typeof(IActionCommandExtension);
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
{
command.Properties.TryGetValue(RemoveMatcherCommandProperties.Owner, out string owner);
var file = command.Data;
@@ -370,9 +369,9 @@ namespace GitHub.Runner.Worker
else
{
// Translate file path back from container path
if (container != null)
if (context.Container != null)
{
file = container.TranslateToHostPath(file);
file = context.Container.TranslateToHostPath(file);
}
// Root the path
@@ -410,7 +409,7 @@ namespace GitHub.Runner.Worker
public Type ExtensionType => typeof(IActionCommandExtension);
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command, ContainerInfo container)
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command)
{
context.Debug(command.Data);
}
@@ -438,7 +437,7 @@ namespace GitHub.Runner.Worker
public Type ExtensionType => typeof(IActionCommandExtension);
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command, ContainerInfo container)
public void ProcessCommand(IExecutionContext context, string inputLine, ActionCommand command)
{
command.Properties.TryGetValue(IssueCommandProperties.File, out string file);
command.Properties.TryGetValue(IssueCommandProperties.Line, out string line);
@@ -455,10 +454,10 @@ namespace GitHub.Runner.Worker
{
issue.Category = "Code";
if (container != null)
if (context.Container != null)
{
// Translate file path back from container path
file = container.TranslateToHostPath(file);
file = context.Container.TranslateToHostPath(file);
command.Properties[IssueCommandProperties.File] = file;
}
@@ -518,7 +517,7 @@ namespace GitHub.Runner.Worker
public Type ExtensionType => typeof(IActionCommandExtension);
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
{
var data = this is GroupCommandExtension ? command.Data : string.Empty;
context.Output($"##[{Command}]{data}");
@@ -532,7 +531,7 @@ namespace GitHub.Runner.Worker
public Type ExtensionType => typeof(IActionCommandExtension);
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command, ContainerInfo container)
public void ProcessCommand(IExecutionContext context, string line, ActionCommand command)
{
ArgUtil.NotNullOrEmpty(command.Data, "value");

View File

@@ -21,24 +21,11 @@ using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplat
namespace GitHub.Runner.Worker
{
public class PrepareResult
{
public PrepareResult(List<JobExtensionRunner> containerSetupSteps, Dictionary<Guid, IActionRunner> preStepTracker)
{
this.ContainerSetupSteps = containerSetupSteps;
this.PreStepTracker = preStepTracker;
}
public List<JobExtensionRunner> ContainerSetupSteps { get; set; }
public Dictionary<Guid, IActionRunner> PreStepTracker { get; set; }
}
[ServiceLocator(Default = typeof(ActionManager))]
public interface IActionManager : IRunnerService
{
Dictionary<Guid, ContainerInfo> CachedActionContainers { get; }
Task<PrepareResult> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps);
Task<List<JobExtensionRunner>> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps);
Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action);
}
@@ -52,7 +39,7 @@ namespace GitHub.Runner.Worker
private readonly Dictionary<Guid, ContainerInfo> _cachedActionContainers = new Dictionary<Guid, ContainerInfo>();
public Dictionary<Guid, ContainerInfo> CachedActionContainers => _cachedActionContainers;
public async Task<PrepareResult> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps)
public async Task<List<JobExtensionRunner>> PrepareActionsAsync(IExecutionContext executionContext, IEnumerable<Pipelines.JobStep> steps)
{
ArgUtil.NotNull(executionContext, nameof(executionContext));
ArgUtil.NotNull(steps, nameof(steps));
@@ -62,24 +49,17 @@ namespace GitHub.Runner.Worker
Dictionary<string, List<Guid>> imagesToBuild = new Dictionary<string, List<Guid>>(StringComparer.OrdinalIgnoreCase);
Dictionary<string, ActionContainer> imagesToBuildInfo = new Dictionary<string, ActionContainer>(StringComparer.OrdinalIgnoreCase);
List<JobExtensionRunner> containerSetupSteps = new List<JobExtensionRunner>();
Dictionary<Guid, IActionRunner> preStepTracker = new Dictionary<Guid, IActionRunner>();
IEnumerable<Pipelines.ActionStep> actions = steps.OfType<Pipelines.ActionStep>();
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
// TODO: Depreciate the PREVIEW_ACTION_TOKEN
// Log even if we aren't using it to ensure users know.
if (!string.IsNullOrEmpty(executionContext.Variables.Get("PREVIEW_ACTION_TOKEN")))
{
executionContext.Warning("The 'PREVIEW_ACTION_TOKEN' secret is deprecated. Please remove it from the repository's secrets");
executionContext.Warning("The 'PREVIEW_ACTION_TOKEN' secret is depreciated. Please remove it from the repository's secrets");
}
// Clear the cache (for self-hosted runners)
// Note, temporarily avoid this step for the on-premises product, to avoid rate limiting.
var configurationStore = HostContext.GetService<IConfigurationStore>();
var isHostedServer = configurationStore.GetSettings().IsHostedServer;
if (isHostedServer)
{
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
}
// Clear the cache (local runner)
IOUtil.DeleteDirectory(HostContext.GetDirectory(WellKnownDirectory.Actions), executionContext.CancellationToken);
foreach (var action in actions)
{
@@ -131,22 +111,6 @@ namespace GitHub.Runner.Worker
imagesToBuildInfo[setupInfo.ActionRepository] = setupInfo;
}
}
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
{
var definition = LoadAction(executionContext, action);
if (definition.Data.Execution.HasPre)
{
var actionRunner = HostContext.CreateService<IActionRunner>();
actionRunner.Action = action;
actionRunner.Stage = ActionRunStage.Pre;
actionRunner.Condition = definition.Data.Execution.InitCondition;
Trace.Info($"Add 'pre' execution for {action.Id}");
preStepTracker[action.Id] = actionRunner;
}
}
}
}
@@ -183,7 +147,7 @@ namespace GitHub.Runner.Worker
}
#endif
return new PrepareResult(containerSetupSteps, preStepTracker);
return containerSetupSteps;
}
public Definition LoadAction(IExecutionContext executionContext, Pipelines.ActionStep action)
@@ -275,19 +239,14 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action container env: {StringUtil.ConvertToJson(containerAction.Environment)}.");
}
if (!string.IsNullOrEmpty(containerAction.Pre))
{
Trace.Info($"Action container pre entrypoint: {containerAction.Pre}.");
}
if (!string.IsNullOrEmpty(containerAction.EntryPoint))
{
Trace.Info($"Action container entrypoint: {containerAction.EntryPoint}.");
}
if (!string.IsNullOrEmpty(containerAction.Post))
if (!string.IsNullOrEmpty(containerAction.Cleanup))
{
Trace.Info($"Action container post entrypoint: {containerAction.Post}.");
Trace.Info($"Action container cleanup entrypoint: {containerAction.Cleanup}.");
}
if (CachedActionContainers.TryGetValue(action.Id, out var container))
@@ -299,9 +258,8 @@ namespace GitHub.Runner.Worker
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.NodeJS)
{
var nodeAction = definition.Data.Execution as NodeJSActionExecutionData;
Trace.Info($"Action pre node.js file: {nodeAction.Pre ?? "N/A"}.");
Trace.Info($"Action node.js file: {nodeAction.Script}.");
Trace.Info($"Action post node.js file: {nodeAction.Post ?? "N/A"}.");
Trace.Info($"Action cleanup node.js file: {nodeAction.Cleanup ?? "N/A"}.");
}
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Plugin)
{
@@ -317,7 +275,7 @@ namespace GitHub.Runner.Worker
if (!string.IsNullOrEmpty(plugin.PostPluginTypeName))
{
pluginAction.Post = plugin.PostPluginTypeName;
pluginAction.Cleanup = plugin.PostPluginTypeName;
Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}.");
}
}
@@ -490,8 +448,7 @@ namespace GitHub.Runner.Worker
ArgUtil.NotNullOrEmpty(repositoryReference.Ref, nameof(repositoryReference.Ref));
string destDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Actions), repositoryReference.Name.Replace(Path.AltDirectorySeparatorChar, Path.DirectorySeparatorChar), repositoryReference.Ref);
string watermarkFile = destDirectory + ".completed";
if (File.Exists(watermarkFile))
if (File.Exists(destDirectory + ".completed"))
{
executionContext.Debug($"Action '{repositoryReference.Name}@{repositoryReference.Ref}' already downloaded at '{destDirectory}'.");
return;
@@ -541,36 +498,27 @@ namespace GitHub.Runner.Worker
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
var configurationStore = HostContext.GetService<IConfigurationStore>();
var isHostedServer = configurationStore.GetSettings().IsHostedServer;
if (isHostedServer)
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
if (string.IsNullOrEmpty(authToken))
{
var authToken = Environment.GetEnvironmentVariable("_GITHUB_ACTION_TOKEN");
if (string.IsNullOrEmpty(authToken))
{
// TODO: Deprecate the PREVIEW_ACTION_TOKEN
authToken = executionContext.Variables.Get("PREVIEW_ACTION_TOKEN");
}
// TODO: Depreciate the PREVIEW_ACTION_TOKEN
authToken = executionContext.Variables.Get("PREVIEW_ACTION_TOKEN");
}
if (!string.IsNullOrEmpty(authToken))
{
HostContext.SecretMasker.AddValue(authToken);
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
else
{
var accessToken = executionContext.GetGitHubContext("token");
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
if (!string.IsNullOrEmpty(authToken))
{
HostContext.SecretMasker.AddValue(authToken);
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"PAT:{authToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
else
{
// Intentionally empty. Temporary for GHES alpha release, download from dotcom unauthenticated.
var accessToken = executionContext.GetGitHubContext("token");
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{accessToken}"));
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Basic", base64EncodingToken);
}
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
httpClient.DefaultRequestHeaders.UserAgent.Add(HostContext.UserAgent);
using (var result = await httpClient.GetStreamAsync(archiveLink))
{
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
@@ -662,7 +610,7 @@ namespace GitHub.Runner.Worker
}
Trace.Verbose("Create watermark file indicate action download succeed.");
File.WriteAllText(watermarkFile, DateTime.UtcNow.ToString());
File.WriteAllText(destDirectory + ".completed", DateTime.UtcNow.ToString());
executionContext.Debug($"Archive '{archiveFile}' has been unzipped into '{destDirectory}'.");
Trace.Info("Finished getting action repository.");
@@ -824,8 +772,7 @@ namespace GitHub.Runner.Worker
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Container;
public override bool HasPre => !string.IsNullOrEmpty(Pre);
public override bool HasPost => !string.IsNullOrEmpty(Post);
public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup);
public string Image { get; set; }
@@ -835,66 +782,51 @@ namespace GitHub.Runner.Worker
public MappingToken Environment { get; set; }
public string Pre { get; set; }
public string Post { get; set; }
public string Cleanup { get; set; }
}
public sealed class NodeJSActionExecutionData : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.NodeJS;
public override bool HasPre => !string.IsNullOrEmpty(Pre);
public override bool HasPost => !string.IsNullOrEmpty(Post);
public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup);
public string Script { get; set; }
public string Pre { get; set; }
public string Post { get; set; }
public string Cleanup { get; set; }
}
public sealed class PluginActionExecutionData : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Plugin;
public override bool HasPre => false;
public override bool HasPost => !string.IsNullOrEmpty(Post);
public override bool HasCleanup => !string.IsNullOrEmpty(Cleanup);
public string Plugin { get; set; }
public string Post { get; set; }
public string Cleanup { get; set; }
}
public sealed class ScriptActionExecutionData : ActionExecutionData
{
public override ActionExecutionType ExecutionType => ActionExecutionType.Script;
public override bool HasPre => false;
public override bool HasPost => false;
public override bool HasCleanup => false;
}
public abstract class ActionExecutionData
{
private string _initCondition = $"{Constants.Expressions.Always}()";
private string _cleanupCondition = $"{Constants.Expressions.Always}()";
public abstract ActionExecutionType ExecutionType { get; }
public abstract bool HasPre { get; }
public abstract bool HasPost { get; }
public abstract bool HasCleanup { get; }
public string CleanupCondition
{
get { return _cleanupCondition; }
set { _cleanupCondition = value; }
}
public string InitCondition
{
get { return _initCondition; }
set { _initCondition = value; }
}
}
public class ContainerSetupInfo

View File

@@ -22,17 +22,16 @@ namespace GitHub.Runner.Worker
{
ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
List<string> EvaluateContainerArguments(IExecutionContext executionContext, SequenceToken token, IDictionary<string, PipelineContextData> contextData);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> extraExpressionValues);
Dictionary<string, string> EvaluateContainerEnvironment(IExecutionContext executionContext, MappingToken token, IDictionary<string, PipelineContextData> contextData);
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token);
string EvaluateDefaultInput(IExecutionContext executionContext, string inputName, TemplateToken token, IDictionary<string, PipelineContextData> contextData);
}
public sealed class ActionManifestManager : RunnerService, IActionManifestManager
{
private TemplateSchema _actionManifestSchema;
private IReadOnlyList<String> _fileTable;
public override void Initialize(IHostContext hostContext)
{
@@ -54,7 +53,7 @@ namespace GitHub.Runner.Worker
public ActionDefinitionData Load(IExecutionContext executionContext, string manifestFile)
{
var context = CreateContext(executionContext);
var context = CreateContext(executionContext, null);
ActionDefinitionData actionDefinition = new ActionDefinitionData();
try
{
@@ -62,9 +61,6 @@ namespace GitHub.Runner.Worker
// Get the file ID
var fileId = context.GetFileId(manifestFile);
_fileTable = context.GetFileTable();
// Read the file
var fileContent = File.ReadAllText(manifestFile);
using (var stringReader = new StringReader(fileContent))
{
@@ -133,13 +129,13 @@ namespace GitHub.Runner.Worker
public List<string> EvaluateContainerArguments(
IExecutionContext executionContext,
SequenceToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
IDictionary<string, PipelineContextData> contextData)
{
var result = new List<string>();
if (token != null)
{
var context = CreateContext(executionContext, extraExpressionValues);
var context = CreateContext(executionContext, contextData);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-args", token, 0, null, omitHeader: true);
@@ -172,13 +168,13 @@ namespace GitHub.Runner.Worker
public Dictionary<string, string> EvaluateContainerEnvironment(
IExecutionContext executionContext,
MappingToken token,
IDictionary<string, PipelineContextData> extraExpressionValues)
IDictionary<string, PipelineContextData> contextData)
{
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
if (token != null)
{
var context = CreateContext(executionContext, extraExpressionValues);
var context = CreateContext(executionContext, contextData);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(context, "container-runs-env", token, 0, null, omitHeader: true);
@@ -216,12 +212,13 @@ namespace GitHub.Runner.Worker
public string EvaluateDefaultInput(
IExecutionContext executionContext,
string inputName,
TemplateToken token)
TemplateToken token,
IDictionary<string, PipelineContextData> contextData)
{
string result = "";
if (token != null)
{
var context = CreateContext(executionContext);
var context = CreateContext(executionContext, contextData);
try
{
var evaluateResult = TemplateEvaluator.Evaluate(context, "input-default-context", token, 0, null, omitHeader: true);
@@ -246,7 +243,7 @@ namespace GitHub.Runner.Worker
private TemplateContext CreateContext(
IExecutionContext executionContext,
IDictionary<string, PipelineContextData> extraExpressionValues = null)
IDictionary<string, PipelineContextData> contextData)
{
var result = new TemplateContext
{
@@ -260,36 +257,14 @@ namespace GitHub.Runner.Worker
TraceWriter = executionContext.ToTemplateTraceWriter(),
};
// Expression values from execution context
foreach (var pair in executionContext.ExpressionValues)
if (contextData?.Count > 0)
{
result.ExpressionValues[pair.Key] = pair.Value;
}
// Extra expression values
if (extraExpressionValues?.Count > 0)
{
foreach (var pair in extraExpressionValues)
foreach (var pair in contextData)
{
result.ExpressionValues[pair.Key] = pair.Value;
}
}
// Expression functions from execution context
foreach (var item in executionContext.ExpressionFunctions)
{
result.ExpressionFunctions.Add(item);
}
// Add the file table
if (_fileTable?.Count > 0)
{
for (var i = 0 ; i < _fileTable.Count ; i++)
{
result.GetFileId(_fileTable[i]);
}
}
return result;
}
@@ -305,9 +280,6 @@ namespace GitHub.Runner.Worker
var envToken = default(MappingToken);
var mainToken = default(StringToken);
var pluginToken = default(StringToken);
var preToken = default(StringToken);
var preEntrypointToken = default(StringToken);
var preIfToken = default(StringToken);
var postToken = default(StringToken);
var postEntrypointToken = default(StringToken);
var postIfToken = default(StringToken);
@@ -346,15 +318,6 @@ namespace GitHub.Runner.Worker
case "post-if":
postIfToken = run.Value.AssertString("post-if");
break;
case "pre":
preToken = run.Value.AssertString("pre");
break;
case "pre-entrypoint":
preEntrypointToken = run.Value.AssertString("pre-entrypoint");
break;
case "pre-if":
preIfToken = run.Value.AssertString("pre-if");
break;
default:
Trace.Info($"Ignore run property {runsKey}.");
break;
@@ -377,9 +340,7 @@ namespace GitHub.Runner.Worker
Arguments = argsToken,
EntryPoint = entrypointToken?.Value,
Environment = envToken,
Pre = preEntrypointToken?.Value,
InitCondition = preIfToken?.Value ?? "always()",
Post = postEntrypointToken?.Value,
Cleanup = postEntrypointToken?.Value,
CleanupCondition = postIfToken?.Value ?? "always()"
};
}
@@ -388,16 +349,14 @@ namespace GitHub.Runner.Worker
{
if (string.IsNullOrEmpty(mainToken?.Value))
{
throw new ArgumentNullException($"Entry javascript file is not provided.");
throw new ArgumentNullException($"Entry javascript fils is not provided.");
}
else
{
return new NodeJSActionExecutionData()
{
Script = mainToken.Value,
Pre = preToken?.Value,
InitCondition = preIfToken?.Value ?? "always()",
Post = postToken?.Value,
Cleanup = postToken?.Value,
CleanupCondition = postIfToken?.Value ?? "always()"
};
}
@@ -456,5 +415,566 @@ namespace GitHub.Runner.Worker
}
}
}
/// <summary>
/// Converts a YAML file into a TemplateToken
/// </summary>
internal sealed class YamlObjectReader : IObjectReader
{
internal YamlObjectReader(
Int32? fileId,
TextReader input)
{
m_fileId = fileId;
m_parser = new Parser(input);
}
public Boolean AllowLiteral(out LiteralToken value)
{
if (EvaluateCurrent() is Scalar scalar)
{
// Tag specified
if (!string.IsNullOrEmpty(scalar.Tag))
{
// String tag
if (string.Equals(scalar.Tag, c_stringTag, StringComparison.Ordinal))
{
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
MoveNext();
return true;
}
// Not plain style
if (scalar.Style != ScalarStyle.Plain)
{
throw new NotSupportedException($"The scalar style '{scalar.Style}' on line {scalar.Start.Line} and column {scalar.Start.Column} is not valid with the tag '{scalar.Tag}'");
}
// Boolean, Float, Integer, or Null
switch (scalar.Tag)
{
case c_booleanTag:
value = ParseBoolean(scalar);
break;
case c_floatTag:
value = ParseFloat(scalar);
break;
case c_integerTag:
value = ParseInteger(scalar);
break;
case c_nullTag:
value = ParseNull(scalar);
break;
default:
throw new NotSupportedException($"Unexpected tag '{scalar.Tag}'");
}
MoveNext();
return true;
}
// Plain style, determine type using YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
if (scalar.Style == ScalarStyle.Plain)
{
if (MatchNull(scalar, out var nullToken))
{
value = nullToken;
}
else if (MatchBoolean(scalar, out var booleanToken))
{
value = booleanToken;
}
else if (MatchInteger(scalar, out var numberToken) ||
MatchFloat(scalar, out numberToken))
{
value = numberToken;
}
else
{
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
}
MoveNext();
return true;
}
// Otherwise assume string
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowSequenceStart(out SequenceToken value)
{
if (EvaluateCurrent() is SequenceStart sequenceStart)
{
value = new SequenceToken(m_fileId, sequenceStart.Start.Line, sequenceStart.Start.Column);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowSequenceEnd()
{
if (EvaluateCurrent() is SequenceEnd)
{
MoveNext();
return true;
}
return false;
}
public Boolean AllowMappingStart(out MappingToken value)
{
if (EvaluateCurrent() is MappingStart mappingStart)
{
value = new MappingToken(m_fileId, mappingStart.Start.Line, mappingStart.Start.Column);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowMappingEnd()
{
if (EvaluateCurrent() is MappingEnd)
{
MoveNext();
return true;
}
return false;
}
/// <summary>
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
/// </summary>
public void ValidateEnd()
{
if (EvaluateCurrent() is DocumentEnd)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected document end parse event");
}
if (EvaluateCurrent() is StreamEnd)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected stream end parse event");
}
if (MoveNext())
{
throw new InvalidOperationException("Expected end of parse events");
}
}
/// <summary>
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
/// </summary>
public void ValidateStart()
{
if (EvaluateCurrent() != null)
{
throw new InvalidOperationException("Unexpected parser state");
}
if (!MoveNext())
{
throw new InvalidOperationException("Expected a parse event");
}
if (EvaluateCurrent() is StreamStart)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected stream start parse event");
}
if (EvaluateCurrent() is DocumentStart)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected document start parse event");
}
}
private ParsingEvent EvaluateCurrent()
{
if (m_current == null)
{
m_current = m_parser.Current;
if (m_current != null)
{
if (m_current is Scalar scalar)
{
// Verify not using achors
if (scalar.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{scalar.Anchor}'");
}
}
else if (m_current is MappingStart mappingStart)
{
// Verify not using achors
if (mappingStart.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{mappingStart.Anchor}'");
}
}
else if (m_current is SequenceStart sequenceStart)
{
// Verify not using achors
if (sequenceStart.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{sequenceStart.Anchor}'");
}
}
else if (!(m_current is MappingEnd) &&
!(m_current is SequenceEnd) &&
!(m_current is DocumentStart) &&
!(m_current is DocumentEnd) &&
!(m_current is StreamStart) &&
!(m_current is StreamEnd))
{
throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}");
}
}
}
return m_current;
}
private Boolean MoveNext()
{
m_current = null;
return m_parser.MoveNext();
}
private BooleanToken ParseBoolean(Scalar scalar)
{
if (MatchBoolean(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_booleanTag); // throws
return default;
}
private NumberToken ParseFloat(Scalar scalar)
{
if (MatchFloat(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_floatTag); // throws
return default;
}
private NumberToken ParseInteger(Scalar scalar)
{
if (MatchInteger(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_integerTag); // throws
return default;
}
private NullToken ParseNull(Scalar scalar)
{
if (MatchNull(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_nullTag); // throws
return default;
}
private Boolean MatchBoolean(
Scalar scalar,
out BooleanToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
switch (scalar.Value ?? string.Empty)
{
case "true":
case "True":
case "TRUE":
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, true);
return true;
case "false":
case "False":
case "FALSE":
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, false);
return true;
}
value = default;
return false;
}
private Boolean MatchFloat(
Scalar scalar,
out NumberToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
var str = scalar.Value;
if (!string.IsNullOrEmpty(str))
{
// Check for [-+]?(\.inf|\.Inf|\.INF)|\.nan|\.NaN|\.NAN
switch (str)
{
case ".inf":
case ".Inf":
case ".INF":
case "+.inf":
case "+.Inf":
case "+.INF":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.PositiveInfinity);
return true;
case "-.inf":
case "-.Inf":
case "-.INF":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NegativeInfinity);
return true;
case ".nan":
case ".NaN":
case ".NAN":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NaN);
return true;
}
// Otherwise check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?
// Skip leading sign
var index = str[0] == '-' || str[0] == '+' ? 1 : 0;
// Check for integer portion
var length = str.Length;
var hasInteger = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasInteger = true;
index++;
}
// Check for decimal point
var hasDot = false;
if (index < length && str[index] == '.')
{
hasDot = true;
index++;
}
// Check for decimal portion
var hasDecimal = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasDecimal = true;
index++;
}
// Check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)
if ((hasDot && hasDecimal) || hasInteger)
{
// Check for end
if (index == length)
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
else
{
ThrowInvalidValue(scalar, c_floatTag); // throws
}
}
// Check [eE][-+]?[0-9]
else if (index < length && (str[index] == 'e' || str[index] == 'E'))
{
index++;
// Skip sign
if (index < length && (str[index] == '-' || str[index] == '+'))
{
index++;
}
// Check for exponent
var hasExponent = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasExponent = true;
index++;
}
// Check for end
if (hasExponent && index == length)
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, (Double)doubleValue);
return true;
}
// Otherwise exceeds range
else
{
ThrowInvalidValue(scalar, c_floatTag); // throws
}
}
}
}
}
value = default;
return false;
}
private Boolean MatchInteger(
Scalar scalar,
out NumberToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
var str = scalar.Value;
if (!string.IsNullOrEmpty(str))
{
// Check for [0-9]+
var firstChar = str[0];
if (firstChar >= '0' && firstChar <= '9' &&
str.Skip(1).All(x => x >= '0' && x <= '9'))
{
// Try parse
if (Double.TryParse(str, NumberStyles.None, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for (-|+)[0-9]+
else if ((firstChar == '-' || firstChar == '+') &&
str.Length > 1 &&
str.Skip(1).All(x => x >= '0' && x <= '9'))
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for 0x[0-9a-fA-F]+
else if (firstChar == '0' &&
str.Length > 2 &&
str[1] == 'x' &&
str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F')))
{
// Try parse
if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integerValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for 0o[0-9]+
else if (firstChar == '0' &&
str.Length > 2 &&
str[1] == 'o' &&
str.Skip(2).All(x => x >= '0' && x <= '7'))
{
// Try parse
var integerValue = default(Int32);
try
{
integerValue = Convert.ToInt32(str.Substring(2), 8);
}
// Otherwise exceeds range
catch (Exception)
{
ThrowInvalidValue(scalar, c_integerTag); // throws
}
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
return true;
}
}
value = default;
return false;
}
private Boolean MatchNull(
Scalar scalar,
out NullToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
switch (scalar.Value ?? string.Empty)
{
case "":
case "null":
case "Null":
case "NULL":
case "~":
value = new NullToken(m_fileId, scalar.Start.Line, scalar.Start.Column);
return true;
}
value = default;
return false;
}
private void ThrowInvalidValue(
Scalar scalar,
String tag)
{
throw new NotSupportedException($"The value '{scalar.Value}' on line {scalar.Start.Line} and column {scalar.Start.Column} is invalid for the type '{scalar.Tag}'");
}
private const String c_booleanTag = "tag:yaml.org,2002:bool";
private const String c_floatTag = "tag:yaml.org,2002:float";
private const String c_integerTag = "tag:yaml.org,2002:int";
private const String c_nullTag = "tag:yaml.org,2002:null";
private const String c_stringTag = "tag:yaml.org,2002:string";
private readonly Int32? m_fileId;
private readonly Parser m_parser;
private ParsingEvent m_current;
}
}

View File

@@ -18,7 +18,6 @@ namespace GitHub.Runner.Worker
{
public enum ActionRunStage
{
Pre,
Main,
Post,
}
@@ -27,7 +26,7 @@ namespace GitHub.Runner.Worker
public interface IActionRunner : IStep, IRunnerService
{
ActionRunStage Stage { get; set; }
bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context);
Boolean TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context);
Pipelines.ActionStep Action { get; set; }
}
@@ -82,18 +81,20 @@ namespace GitHub.Runner.Worker
ActionExecutionData handlerData = definition.Data?.Execution;
ArgUtil.NotNull(handlerData, nameof(handlerData));
if (handlerData.HasPre &&
Action.Reference is Pipelines.RepositoryPathReference repoAction &&
string.Equals(repoAction.RepositoryType, Pipelines.PipelineConstants.SelfAlias, StringComparison.OrdinalIgnoreCase))
{
ExecutionContext.Warning($"`pre` execution is not supported for local action from '{repoAction.Path}'");
}
// The action has post cleanup defined.
// we need to create timeline record for them and add them to the step list that StepRunner is using
if (handlerData.HasPost && (Stage == ActionRunStage.Pre || Stage == ActionRunStage.Main))
if (handlerData.HasCleanup && Stage == ActionRunStage.Main)
{
string postDisplayName = $"Post {this.DisplayName}";
string postDisplayName = null;
if (this.DisplayName.StartsWith(PipelineTemplateConstants.RunDisplayPrefix))
{
postDisplayName = $"Post {this.DisplayName.Substring(PipelineTemplateConstants.RunDisplayPrefix.Length)}";
}
else
{
postDisplayName = $"Post {this.DisplayName}";
}
var repositoryReference = Action.Reference as RepositoryPathReference;
var pathString = string.IsNullOrEmpty(repositoryReference.Path) ? string.Empty : $"/{repositoryReference.Path}";
var repoString = string.IsNullOrEmpty(repositoryReference.Ref) ? $"{repositoryReference.Name}{pathString}" :
@@ -107,7 +108,7 @@ namespace GitHub.Runner.Worker
actionRunner.Condition = handlerData.CleanupCondition;
actionRunner.DisplayName = postDisplayName;
ExecutionContext.RegisterPostJobStep(actionRunner);
ExecutionContext.RegisterPostJobStep($"{actionRunner.Action.Name}_post", actionRunner);
}
IStepHost stepHost = HostContext.CreateService<IDefaultStepHost>();
@@ -140,8 +141,10 @@ namespace GitHub.Runner.Worker
// Load the inputs.
ExecutionContext.Debug("Loading inputs");
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues, ExecutionContext.ExpressionFunctions);
var templateTrace = ExecutionContext.ToTemplateTraceWriter();
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
var templateEvaluator = new PipelineTemplateEvaluator(templateTrace, schema);
var inputs = templateEvaluator.EvaluateStepInputs(Action.Inputs, ExecutionContext.ExpressionValues);
foreach (KeyValuePair<string, string> input in inputs)
{
@@ -161,7 +164,13 @@ namespace GitHub.Runner.Worker
string key = input.Key.AssertString("action input name").Value;
if (!inputs.ContainsKey(key))
{
inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value);
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
foreach (var data in ExecutionContext.ExpressionValues)
{
evaluateContext[data.Key] = data.Value;
}
inputs[key] = manifestManager.EvaluateDefaultInput(ExecutionContext, key, input.Value, evaluateContext);
}
}
}
@@ -170,15 +179,17 @@ namespace GitHub.Runner.Worker
ExecutionContext.Debug("Loading env");
var environment = new Dictionary<String, String>(VarUtil.EnvironmentVariableKeyComparer);
#if OS_WINDOWS
var envContext = ExecutionContext.ExpressionValues["env"] as DictionaryContextData;
#else
var envContext = ExecutionContext.ExpressionValues["env"] as CaseSensitiveDictionaryContextData;
#endif
// Apply environment from env context, env context contains job level env and action's evn block
foreach (var env in envContext)
// Apply environment set using ##[set-env] first since these are job level env
foreach (var env in ExecutionContext.EnvironmentVariables)
{
environment[env.Key] = env.Value.ToString();
environment[env.Key] = env.Value ?? string.Empty;
}
// Apply action's env block later.
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(Action.Environment, ExecutionContext.ExpressionValues, VarUtil.EnvironmentVariableKeyComparer);
foreach (var env in actionEnvironment)
{
environment[env.Key] = env.Value ?? string.Empty;
}
// Apply action's intra-action state at last
@@ -286,14 +297,11 @@ namespace GitHub.Runner.Worker
return displayName;
}
// Try evaluating fully
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
var templateEvaluator = new PipelineTemplateEvaluator(context.ToTemplateTraceWriter(), schema);
try
{
if (tokenToParse.CheckHasRequiredContext(contextData, context.ExpressionFunctions))
{
var templateEvaluator = context.ToPipelineTemplateEvaluator();
displayName = templateEvaluator.EvaluateStepDisplayName(tokenToParse, contextData, context.ExpressionFunctions);
didFullyEvaluate = true;
}
didFullyEvaluate = templateEvaluator.TryEvaluateStepDisplayName(tokenToParse, contextData, out displayName);
}
catch (TemplateValidationException e)
{

View File

@@ -61,11 +61,8 @@ namespace GitHub.Runner.Worker.Container
foreach (var volume in container.Volumes)
{
UserMountVolumes[volume] = volume;
MountVolumes.Add(new MountVolume(volume));
}
}
UpdateWebProxyEnv(hostContext.WebProxy);
}
public string ContainerId { get; set; }
@@ -225,26 +222,6 @@ namespace GitHub.Runner.Worker.Container
{
_pathMappings.Insert(0, new PathMapping(hostCommonPath, containerCommonPath));
}
private void UpdateWebProxyEnv(RunnerWebProxy webProxy)
{
// Set common forms of proxy variables if configured in Runner and not set directly by container.env
if (!String.IsNullOrEmpty(webProxy.HttpProxyAddress))
{
ContainerEnvironmentVariables.TryAdd("HTTP_PROXY", webProxy.HttpProxyAddress);
ContainerEnvironmentVariables.TryAdd("http_proxy", webProxy.HttpProxyAddress);
}
if (!String.IsNullOrEmpty(webProxy.HttpsProxyAddress))
{
ContainerEnvironmentVariables.TryAdd("HTTPS_PROXY", webProxy.HttpsProxyAddress);
ContainerEnvironmentVariables.TryAdd("https_proxy", webProxy.HttpsProxyAddress);
}
if (!String.IsNullOrEmpty(webProxy.NoProxyString))
{
ContainerEnvironmentVariables.TryAdd("NO_PROXY", webProxy.NoProxyString);
ContainerEnvironmentVariables.TryAdd("no_proxy", webProxy.NoProxyString);
}
}
}
public class MountVolume

View File

@@ -130,13 +130,6 @@ namespace GitHub.Runner.Worker.Container
// Watermark for GitHub Action environment
dockerOptions.Add("-e GITHUB_ACTIONS=true");
// Set CI=true when no one else already set it.
// CI=true is common set in most CI provider in GitHub
if (!container.ContainerEnvironmentVariables.ContainsKey("CI"))
{
dockerOptions.Add("-e CI=true");
}
foreach (var volume in container.MountVolumes)
{
// replace `"` with `\"` and add `"{0}"` to all path.
@@ -196,13 +189,6 @@ namespace GitHub.Runner.Worker.Container
// Watermark for GitHub Action environment
dockerOptions.Add("-e GITHUB_ACTIONS=true");
// Set CI=true when no one else already set it.
// CI=true is common set in most CI provider in GitHub
if (!container.ContainerEnvironmentVariables.ContainsKey("CI"))
{
dockerOptions.Add("-e CI=true");
}
if (!string.IsNullOrEmpty(container.ContainerEntryPoint))
{
dockerOptions.Add($"--entrypoint \"{container.ContainerEntryPoint}\"");

View File

@@ -47,9 +47,9 @@ namespace GitHub.Runner.Worker
condition: $"{PipelineTemplateConstants.Always}()",
displayName: "Stop containers",
data: data);
executionContext.Debug($"Register post job cleanup for stopping/deleting containers.");
executionContext.RegisterPostJobStep(postJobStep);
executionContext.RegisterPostJobStep(nameof(StopContainersAsync), postJobStep);
// Check whether we are inside a container.
// Our container feature requires to map working directory from host to the container.
@@ -180,11 +180,6 @@ namespace GitHub.Runner.Worker
foreach (var volume in container.UserMountVolumes)
{
Trace.Info($"User provided volume: {volume.Value}");
var mount = new MountVolume(volume.Value);
if (string.Equals(mount.SourceVolumePath, "/", StringComparison.OrdinalIgnoreCase))
{
executionContext.Warning($"Volume mount {volume.Value} is going to mount '/' into the container which may cause file ownership change in the entire file system and cause Actions Runner to lose permission to access the disk.");
}
}
// Pull down docker image with retry up to 3 times

View File

@@ -1,27 +1,26 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Web;
using GitHub.DistributedTask.Expressions2;
using GitHub.Runner.Worker.Container;
using GitHub.Services.WebApi;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Container;
using GitHub.Services.WebApi;
using Newtonsoft.Json;
using System.Text;
using System.Collections;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Expressions2;
namespace GitHub.Runner.Worker
{
@@ -39,7 +38,6 @@ namespace GitHub.Runner.Worker
string ContextName { get; }
Task ForceCompleted { get; }
TaskResult? Result { get; set; }
TaskResult? Outcome { get; set; }
string ResultCode { get; set; }
TaskResult? CommandResult { get; set; }
CancellationToken CancellationToken { get; }
@@ -48,14 +46,11 @@ namespace GitHub.Runner.Worker
PlanFeatures Features { get; }
Variables Variables { get; }
Dictionary<string, string> IntraActionState { get; }
IDictionary<String, IDictionary<String, String>> JobDefaults { get; }
Dictionary<string, VariableValue> JobOutputs { get; }
HashSet<string> OutputVariables { get; }
IDictionary<String, String> EnvironmentVariables { get; }
IDictionary<String, ContextScope> Scopes { get; }
IList<String> FileTable { get; }
StepsContext StepsContext { get; }
DictionaryContextData ExpressionValues { get; }
IList<IFunctionInfo> ExpressionFunctions { get; }
List<string> PrependPath { get; }
ContainerInfo Container { get; set; }
List<ContainerInfo> ServiceContainers { get; }
@@ -103,7 +98,7 @@ namespace GitHub.Runner.Worker
// others
void ForceTaskComplete();
void RegisterPostJobStep(IStep step);
void RegisterPostJobStep(string refName, IStep step);
}
public sealed class ExecutionContext : RunnerService, IExecutionContext
@@ -113,6 +108,7 @@ namespace GitHub.Runner.Worker
private readonly TimelineRecord _record = new TimelineRecord();
private readonly Dictionary<Guid, TimelineRecord> _detailRecords = new Dictionary<Guid, TimelineRecord>();
private readonly object _loggerLock = new object();
private readonly HashSet<string> _outputvariables = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
private readonly object _matchersLock = new object();
private event OnMatcherChanged _onMatcherChanged;
@@ -142,14 +138,11 @@ namespace GitHub.Runner.Worker
public List<ServiceEndpoint> Endpoints { get; private set; }
public Variables Variables { get; private set; }
public Dictionary<string, string> IntraActionState { get; private set; }
public IDictionary<String, IDictionary<String, String>> JobDefaults { get; private set; }
public Dictionary<string, VariableValue> JobOutputs { get; private set; }
public HashSet<string> OutputVariables => _outputvariables;
public IDictionary<String, String> EnvironmentVariables { get; private set; }
public IDictionary<String, ContextScope> Scopes { get; private set; }
public IList<String> FileTable { get; private set; }
public StepsContext StepsContext { get; private set; }
public DictionaryContextData ExpressionValues { get; } = new DictionaryContextData();
public IList<IFunctionInfo> ExpressionFunctions { get; } = new List<IFunctionInfo>();
public bool WriteDebug { get; private set; }
public List<string> PrependPath { get; private set; }
public ContainerInfo Container { get; set; }
@@ -161,9 +154,6 @@ namespace GitHub.Runner.Worker
// Only job level ExecutionContext has PostJobSteps
public Stack<IStep> PostJobSteps { get; private set; }
// Only job level ExecutionContext has StepsWithPostRegistered
public HashSet<Guid> StepsWithPostRegistered { get; private set; }
public bool EchoOnActionCommand { get; set; }
@@ -179,8 +169,6 @@ namespace GitHub.Runner.Worker
}
}
public TaskResult? Outcome { get; set; }
public TaskResult? CommandResult { get; set; }
private string ContextType => _record.RecordType;
@@ -251,15 +239,9 @@ namespace GitHub.Runner.Worker
});
}
public void RegisterPostJobStep(IStep step)
public void RegisterPostJobStep(string refName, IStep step)
{
if (step is IActionRunner actionRunner && !Root.StepsWithPostRegistered.Add(actionRunner.Action.Id))
{
Trace.Info($"'post' of '{actionRunner.DisplayName}' already push to post step stack.");
return;
}
step.ExecutionContext = Root.CreatePostChild(step.DisplayName, IntraActionState);
step.ExecutionContext = Root.CreatePostChild(step.DisplayName, refName, IntraActionState);
Root.PostJobSteps.Push(step);
}
@@ -283,18 +265,12 @@ namespace GitHub.Runner.Worker
child.IntraActionState = intraActionState;
}
child.EnvironmentVariables = EnvironmentVariables;
child.JobDefaults = JobDefaults;
child.Scopes = Scopes;
child.FileTable = FileTable;
child.StepsContext = StepsContext;
foreach (var pair in ExpressionValues)
{
child.ExpressionValues[pair.Key] = pair.Value;
}
foreach (var item in ExpressionFunctions)
{
child.ExpressionFunctions.Add(item);
}
child._cancellationTokenSource = new CancellationTokenSource();
child.WriteDebug = WriteDebug;
child._parentExecutionContext = this;
@@ -367,12 +343,6 @@ namespace GitHub.Runner.Worker
_logger.End();
if (!string.IsNullOrEmpty(ContextName))
{
StepsContext.SetOutcome(ScopeName, ContextName, (Outcome ?? Result ?? TaskResult.Succeeded).ToActionResult().ToString());
StepsContext.SetConclusion(ScopeName, ContextName, (Result ?? TaskResult.Succeeded).ToActionResult().ToString());
}
return Result.Value;
}
@@ -583,12 +553,6 @@ namespace GitHub.Runner.Worker
// Environment variables shared across all actions
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
// Job defaults shared across all actions
JobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase);
// Job Outputs
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
// Service container info
ServiceContainers = new List<ContainerInfo>();
@@ -605,8 +569,11 @@ namespace GitHub.Runner.Worker
}
}
// File table
FileTable = new List<String>(message.FileTable ?? new string[0]);
// Expression functions
if (Variables.GetBoolean("System.HashFilesV2") == true)
{
ExpressionConstants.UpdateFunction<Handlers.HashFiles>("hashFiles", 1, byte.MaxValue);
}
// Expression values
if (message.ContextData?.Count > 0)
@@ -625,13 +592,8 @@ namespace GitHub.Runner.Worker
var githubAccessToken = new StringContextData(Variables.Get("system.github.token"));
var base64EncodedToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{githubAccessToken}"));
HostContext.SecretMasker.AddValue(base64EncodedToken);
var githubJob = Variables.Get("system.github.job");
var githubContext = new GitHubContext();
githubContext["token"] = githubAccessToken;
if (!string.IsNullOrEmpty(githubJob))
{
githubContext["job"] = new StringContextData(githubJob);
}
var githubDictionary = ExpressionValues["github"].AssertDictionary("github");
foreach (var pair in githubDictionary)
{
@@ -656,9 +618,6 @@ namespace GitHub.Runner.Worker
// PostJobSteps for job ExecutionContext
PostJobSteps = new Stack<IStep>();
// StepsWithPostRegistered for job ExecutionContext
StepsWithPostRegistered = new HashSet<Guid>();
// Job timeline record.
InitializeTimelineRecord(
timelineId: message.Timeline.Id,
@@ -770,7 +729,7 @@ namespace GitHub.Runner.Worker
var owners = config.Matchers.Select(x => $"'{x.Owner}'");
var joinedOwners = string.Join(", ", owners);
// todo: loc
this.Debug($"Added matchers: {joinedOwners}. Problem matchers scan action output for known warning or error strings and report these inline.");
this.Output($"Added matchers: {joinedOwners}. Problem matchers scan action output for known warning or error strings and report these inline.");
}
}
@@ -812,7 +771,7 @@ namespace GitHub.Runner.Worker
owners = removedMatchers.Select(x => $"'{x.Owner}'");
var joinedOwners = string.Join(", ", owners);
// todo: loc
this.Debug($"Removed matchers: {joinedOwners}");
this.Output($"Removed matchers: {joinedOwners}");
}
}
@@ -859,7 +818,7 @@ namespace GitHub.Runner.Worker
}
}
private IExecutionContext CreatePostChild(string displayName, Dictionary<string, string> intraActionState)
private IExecutionContext CreatePostChild(string displayName, string refName, Dictionary<string, string> intraActionState)
{
if (!_expandedForPostJob)
{
@@ -868,8 +827,7 @@ namespace GitHub.Runner.Worker
_childTimelineRecordOrder = _childTimelineRecordOrder * 2;
}
var newGuid = Guid.NewGuid();
return CreateChild(newGuid, displayName, newGuid.ToString("N"), null, null, intraActionState, _childTimelineRecordOrder - Root.PostJobSteps.Count);
return CreateChild(Guid.NewGuid(), displayName, refName, null, null, intraActionState, _childTimelineRecordOrder - Root.PostJobSteps.Count);
}
}
@@ -928,21 +886,6 @@ namespace GitHub.Runner.Worker
}
}
public static IEnumerable<KeyValuePair<string, object>> ToExpressionState(this IExecutionContext context)
{
return new[] { new KeyValuePair<string, object>(nameof(IExecutionContext), context) };
}
public static PipelineTemplateEvaluator ToPipelineTemplateEvaluator(this IExecutionContext context, ObjectTemplating.ITraceWriter traceWriter = null)
{
if (traceWriter == null)
{
traceWriter = context.ToTemplateTraceWriter();
}
var schema = PipelineTemplateSchemaFactory.GetSchema();
return new PipelineTemplateEvaluator(traceWriter, schema, context.FileTable);
}
public static ObjectTemplating.ITraceWriter ToTemplateTraceWriter(this IExecutionContext context)
{
return new TemplateTraceWriter(context);
@@ -955,7 +898,6 @@ namespace GitHub.Runner.Worker
internal TemplateTraceWriter(IExecutionContext executionContext)
{
ArgUtil.NotNull(executionContext, nameof(executionContext));
_executionContext = executionContext;
}

View File

@@ -8,9 +8,28 @@ using System.Reflection;
using System.Threading;
using System.Collections.Generic;
namespace GitHub.Runner.Worker.Expressions
namespace GitHub.Runner.Worker.Handlers
{
public sealed class HashFilesFunction : Function
public class FunctionTrace : ITraceWriter
{
private GitHub.DistributedTask.Expressions2.ITraceWriter _trace;
public FunctionTrace(GitHub.DistributedTask.Expressions2.ITraceWriter trace)
{
_trace = trace;
}
public void Info(string message)
{
_trace.Info(message);
}
public void Verbose(string message)
{
_trace.Info(message);
}
}
public sealed class HashFiles : Function
{
protected sealed override Object EvaluateCore(
EvaluationContext context,
@@ -63,7 +82,7 @@ namespace GitHub.Runner.Worker.Expressions
string node = Path.Combine(runnerRoot, "externals", "node12", "bin", $"node{IOUtil.ExeExtension}");
string hashFilesScript = Path.Combine(binDir, "hashFiles");
var hashResult = string.Empty;
var p = new ProcessInvoker(new HashFilesTrace(context.Trace));
var p = new ProcessInvoker(new FunctionTrace(context.Trace));
p.ErrorDataReceived += ((_, data) =>
{
if (!string.IsNullOrEmpty(data.Data) && data.Data.StartsWith("__OUTPUT__") && data.Data.EndsWith("__OUTPUT__"))
@@ -103,24 +122,5 @@ namespace GitHub.Runner.Worker.Expressions
return hashResult;
}
private sealed class HashFilesTrace : ITraceWriter
{
private GitHub.DistributedTask.Expressions2.ITraceWriter _trace;
public HashFilesTrace(GitHub.DistributedTask.Expressions2.ITraceWriter trace)
{
_trace = trace;
}
public void Info(string message)
{
_trace.Info(message);
}
public void Verbose(string message)
{
_trace.Info(message);
}
}
}
}

View File

@@ -0,0 +1,162 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(ExpressionManager))]
public interface IExpressionManager : IRunnerService
{
ConditionResult Evaluate(IExecutionContext context, string condition, bool hostTracingOnly = false);
}
public sealed class ExpressionManager : RunnerService, IExpressionManager
{
public ConditionResult Evaluate(IExecutionContext executionContext, string condition, bool hostTracingOnly = false)
{
ArgUtil.NotNull(executionContext, nameof(executionContext));
ConditionResult result = new ConditionResult();
var expressionTrace = new TraceWriter(Trace, hostTracingOnly ? null : executionContext);
var tree = Parse(executionContext, expressionTrace, condition);
var expressionResult = tree.Evaluate(expressionTrace, HostContext.SecretMasker, state: executionContext, options: null);
result.Value = expressionResult.IsTruthy;
result.Trace = expressionTrace.Trace;
return result;
}
private static IExpressionNode Parse(IExecutionContext executionContext, TraceWriter expressionTrace, string condition)
{
ArgUtil.NotNull(executionContext, nameof(executionContext));
if (string.IsNullOrWhiteSpace(condition))
{
condition = $"{PipelineTemplateConstants.Success}()";
}
var parser = new ExpressionParser();
var namedValues = executionContext.ExpressionValues.Keys.Select(x => new NamedValueInfo<ContextValueNode>(x)).ToArray();
var functions = new IFunctionInfo[]
{
new FunctionInfo<AlwaysNode>(name: Constants.Expressions.Always, minParameters: 0, maxParameters: 0),
new FunctionInfo<CancelledNode>(name: Constants.Expressions.Cancelled, minParameters: 0, maxParameters: 0),
new FunctionInfo<FailureNode>(name: Constants.Expressions.Failure, minParameters: 0, maxParameters: 0),
new FunctionInfo<SuccessNode>(name: Constants.Expressions.Success, minParameters: 0, maxParameters: 0),
};
return parser.CreateTree(condition, expressionTrace, namedValues, functions) ?? new SuccessNode();
}
private sealed class TraceWriter : DistributedTask.Expressions2.ITraceWriter
{
private readonly IExecutionContext _executionContext;
private readonly Tracing _trace;
private readonly StringBuilder _traceBuilder = new StringBuilder();
public string Trace => _traceBuilder.ToString();
public TraceWriter(Tracing trace, IExecutionContext executionContext)
{
ArgUtil.NotNull(trace, nameof(trace));
_trace = trace;
_executionContext = executionContext;
}
public void Info(string message)
{
_trace.Info(message);
_executionContext?.Debug(message);
_traceBuilder.AppendLine(message);
}
public void Verbose(string message)
{
_trace.Verbose(message);
_executionContext?.Debug(message);
}
}
private sealed class AlwaysNode : Function
{
protected override Object EvaluateCore(EvaluationContext context, out ResultMemory resultMemory)
{
resultMemory = null;
return true;
}
}
private sealed class CancelledNode : Function
{
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var executionContext = evaluationContext.State as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Cancelled;
}
}
private sealed class FailureNode : Function
{
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var executionContext = evaluationContext.State as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Failure;
}
}
private sealed class SuccessNode : Function
{
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var executionContext = evaluationContext.State as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Success;
}
}
private sealed class ContextValueNode : NamedValue
{
protected override Object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var jobContext = evaluationContext.State as IExecutionContext;
ArgUtil.NotNull(jobContext, nameof(jobContext));
return jobContext.ExpressionValues[Name];
}
}
}
public class ConditionResult
{
public ConditionResult(bool value = false, string trace = null)
{
this.Value = value;
this.Trace = trace;
}
public bool Value { get; set; }
public string Trace { get; set; }
public static implicit operator ConditionResult(bool value)
{
return new ConditionResult(value);
}
}
}

View File

@@ -1,25 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
namespace GitHub.Runner.Worker.Expressions
{
public sealed class AlwaysFunction : Function
{
protected override Object EvaluateCore(EvaluationContext context, out ResultMemory resultMemory)
{
resultMemory = null;
return true;
}
}
}

View File

@@ -1,31 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
namespace GitHub.Runner.Worker.Expressions
{
public sealed class CancelledFunction : Function
{
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var templateContext = evaluationContext.State as TemplateContext;
ArgUtil.NotNull(templateContext, nameof(templateContext));
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Cancelled;
}
}
}

View File

@@ -1,31 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
namespace GitHub.Runner.Worker.Expressions
{
public sealed class FailureFunction : Function
{
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var templateContext = evaluationContext.State as TemplateContext;
ArgUtil.NotNull(templateContext, nameof(templateContext));
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Failure;
}
}
}

View File

@@ -1,31 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
namespace GitHub.Runner.Worker.Expressions
{
public sealed class SuccessFunction : Function
{
protected sealed override object EvaluateCore(EvaluationContext evaluationContext, out ResultMemory resultMemory)
{
resultMemory = null;
var templateContext = evaluationContext.State as TemplateContext;
ArgUtil.NotNull(templateContext, nameof(templateContext));
var executionContext = templateContext.State[nameof(IExecutionContext)] as IExecutionContext;
ArgUtil.NotNull(executionContext, nameof(executionContext));
ActionResult jobStatus = executionContext.JobContext.Status ?? ActionResult.Success;
return jobStatus == ActionResult.Success;
}
}
}

View File

@@ -10,19 +10,15 @@ namespace GitHub.Runner.Worker
{
"action",
"actor",
"api_url", // temp for GHES alpha release
"base_ref",
"event_name",
"event_path",
"head_ref",
"job",
"ref",
"repository",
"repository_owner",
"run_id",
"run_number",
"sha",
"url", // temp for GHES alpha release
"workflow",
"workspace",
};

View File

@@ -82,13 +82,9 @@ namespace GitHub.Runner.Worker.Handlers
container.ContainerEntryPoint = Inputs.GetValueOrDefault("entryPoint");
}
}
else if (stage == ActionRunStage.Pre)
{
container.ContainerEntryPoint = Data.Pre;
}
else if (stage == ActionRunStage.Post)
{
container.ContainerEntryPoint = Data.Post;
container.ContainerEntryPoint = Data.Cleanup;
}
// create inputs context for template evaluation
@@ -101,14 +97,14 @@ namespace GitHub.Runner.Worker.Handlers
}
}
var extraExpressionValues = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
extraExpressionValues["inputs"] = inputsContext;
var evaluateContext = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
evaluateContext["inputs"] = inputsContext;
var manifestManager = HostContext.GetService<IActionManifestManager>();
if (Data.Arguments != null)
{
container.ContainerEntryPointArgs = "";
var evaluatedArgs = manifestManager.EvaluateContainerArguments(ExecutionContext, Data.Arguments, extraExpressionValues);
var evaluatedArgs = manifestManager.EvaluateContainerArguments(ExecutionContext, Data.Arguments, evaluateContext);
foreach (var arg in evaluatedArgs)
{
if (!string.IsNullOrEmpty(arg))
@@ -128,7 +124,7 @@ namespace GitHub.Runner.Worker.Handlers
if (Data.Environment != null)
{
var evaluatedEnv = manifestManager.EvaluateContainerEnvironment(ExecutionContext, Data.Environment, extraExpressionValues);
var evaluatedEnv = manifestManager.EvaluateContainerEnvironment(ExecutionContext, Data.Environment, evaluateContext);
foreach (var env in evaluatedEnv)
{
if (!this.Environment.ContainsKey(env.Key))

View File

@@ -60,13 +60,9 @@ namespace GitHub.Runner.Worker.Handlers
{
target = Data.Script;
}
else if (stage == ActionRunStage.Pre)
{
target = Data.Pre;
}
else if (stage == ActionRunStage.Post)
{
target = Data.Post;
target = Data.Cleanup;
}
ArgUtil.NotNullOrEmpty(target, nameof(target));

View File

@@ -84,7 +84,7 @@ namespace GitHub.Runner.Worker.Handlers
{
// This does not need to be inside of a critical section.
// The logging queues and command handlers are thread-safe.
if (_commandManager.TryProcessCommand(_executionContext, line, _container))
if (_commandManager.TryProcessCommand(_executionContext, line))
{
return;
}

View File

@@ -31,7 +31,7 @@ namespace GitHub.Runner.Worker.Handlers
}
else if (stage == ActionRunStage.Post)
{
plugin = Data.Post;
plugin = Data.Cleanup;
}
ArgUtil.NotNullOrEmpty(plugin, nameof(plugin));

View File

@@ -58,21 +58,12 @@ namespace GitHub.Runner.Worker.Handlers
string shellCommandPath = null;
bool validateShellOnHost = !(StepHost is ContainerStepHost);
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>());
string shell = null;
if (!Inputs.TryGetValue("shell", out shell) || string.IsNullOrEmpty(shell))
{
// TODO: figure out how defaults interact with template later
// for now, we won't check job.defaults if we are inside a template.
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
{
runDefaults.TryGetValue("shell", out shell);
}
}
Inputs.TryGetValue("shell", out var shell);
if (string.IsNullOrEmpty(shell))
{
#if OS_WINDOWS
shellCommand = "pwsh";
if (validateShellOnHost)
if(validateShellOnHost)
{
shellCommandPath = WhichUtil.Which(shellCommand, require: false, Trace, prependPath);
if (string.IsNullOrEmpty(shellCommandPath))
@@ -148,36 +139,11 @@ namespace GitHub.Runner.Worker.Handlers
Inputs.TryGetValue("script", out var contents);
contents = contents ?? string.Empty;
string workingDirectory = null;
if (!Inputs.TryGetValue("workingDirectory", out workingDirectory))
{
// TODO: figure out how defaults interact with template later
// for now, we won't check job.defaults if we are inside a template.
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
{
if (runDefaults.TryGetValue("working-directory", out workingDirectory))
{
ExecutionContext.Debug("Overwrite 'working-directory' base on job defaults.");
}
}
}
Inputs.TryGetValue("workingDirectory", out var workingDirectory);
var workspaceDir = githubContext["workspace"] as StringContextData;
workingDirectory = Path.Combine(workspaceDir, workingDirectory ?? string.Empty);
string shell = null;
if (!Inputs.TryGetValue("shell", out shell) || string.IsNullOrEmpty(shell))
{
// TODO: figure out how defaults interact with template later
// for now, we won't check job.defaults if we are inside a template.
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.JobDefaults.TryGetValue("run", out var runDefaults))
{
if (runDefaults.TryGetValue("shell", out shell))
{
ExecutionContext.Debug("Overwrite 'shell' base on job defaults.");
}
}
}
Inputs.TryGetValue("shell", out var shell);
var isContainerStepHost = StepHost is ContainerStepHost;
string prependPath = string.Join(Path.PathSeparator.ToString(), ExecutionContext.PrependPath.Reverse<string>());
@@ -259,16 +225,6 @@ namespace GitHub.Runner.Worker.Handlers
// dump out the command
var fileName = isContainerStepHost ? shellCommand : commandPath;
#if OS_OSX
if (Environment.ContainsKey("DYLD_INSERT_LIBRARIES")) // We don't check `isContainerStepHost` because we don't support container on macOS
{
// launch `node macOSRunInvoker.js shell args` instead of `shell args` to avoid macOS SIP remove `DYLD_INSERT_LIBRARIES` when launch process
string node12 = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), "node12", "bin", $"node{IOUtil.ExeExtension}");
string macOSRunInvoker = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Bin), "macos-run-invoker.js");
arguments = $"\"{macOSRunInvoker.Replace("\"", "\\\"")}\" \"{fileName.Replace("\"", "\\\"")}\" {arguments}";
fileName = node12;
}
#endif
ExecutionContext.Debug($"{fileName} {arguments}");
using (var stdoutManager = new OutputManager(ExecutionContext, ActionCommandManager))

View File

@@ -110,9 +110,9 @@ namespace GitHub.Runner.Worker.Handlers
// try to resolve path inside container if the request path is part of the mount volume
#if OS_WINDOWS
if (Container.MountVolumes.Exists(x => !string.IsNullOrEmpty(x.SourceVolumePath) && path.StartsWith(x.SourceVolumePath, StringComparison.OrdinalIgnoreCase)))
if (Container.MountVolumes.Exists(x => path.StartsWith(x.SourceVolumePath, StringComparison.OrdinalIgnoreCase)))
#else
if (Container.MountVolumes.Exists(x => !string.IsNullOrEmpty(x.SourceVolumePath) && path.StartsWith(x.SourceVolumePath)))
if (Container.MountVolumes.Exists(x => path.StartsWith(x.SourceVolumePath)))
#endif
{
return Container.TranslateToContainerPath(path);

View File

@@ -1,14 +1,10 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Runtime.Serialization;
using System.Threading.Tasks;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
@@ -18,16 +14,6 @@ using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
[DataContract]
public class SetupInfo
{
[DataMember]
public string Group { get; set; }
[DataMember]
public string Detail { get; set; }
}
[ServiceLocator(Default = typeof(JobExtension))]
public interface IJobExtension : IRunnerService
@@ -63,44 +49,6 @@ namespace GitHub.Runner.Worker
context.Start();
context.Debug($"Starting: Set up job");
context.Output($"Current runner version: '{BuildConstants.RunnerPackage.Version}'");
var setupInfoFile = HostContext.GetConfigFile(WellKnownConfigFile.SetupInfo);
if (File.Exists(setupInfoFile))
{
Trace.Info($"Load machine setup info from {setupInfoFile}");
try
{
var setupInfo = IOUtil.LoadObject<List<SetupInfo>>(setupInfoFile);
if (setupInfo?.Count > 0)
{
foreach (var info in setupInfo)
{
if (!string.IsNullOrEmpty(info?.Detail))
{
var groupName = info.Group;
if (string.IsNullOrEmpty(groupName))
{
groupName = "Machine Setup Info";
}
context.Output($"##[group]{groupName}");
var multiLines = info.Detail.Replace("\r\n", "\n").TrimEnd('\n').Split('\n');
foreach (var line in multiLines)
{
context.Output(line);
}
context.Output("##[endgroup]");
}
}
}
}
catch (Exception ex)
{
context.Output($"Fail to load and print machine setup info: {ex.Message}");
Trace.Error(ex);
}
}
var repoFullName = context.GetGitHubContext("repository");
ArgUtil.NotNull(repoFullName, nameof(repoFullName));
context.Debug($"Primary repository: {repoFullName}");
@@ -128,23 +76,14 @@ namespace GitHub.Runner.Worker
context.SetRunnerContext("workspace", Path.Combine(_workDirectory, trackingConfig.PipelineDirectory));
context.SetGitHubContext("workspace", Path.Combine(_workDirectory, trackingConfig.WorkspaceDirectory));
// Temporary hack for GHES alpha
var configurationStore = HostContext.GetService<IConfigurationStore>();
var runnerSettings = configurationStore.GetSettings();
if (!runnerSettings.IsHostedServer && !string.IsNullOrEmpty(runnerSettings.GitHubUrl))
{
var url = new Uri(runnerSettings.GitHubUrl);
var portInfo = url.IsDefaultPort ? string.Empty : $":{url.Port.ToString(CultureInfo.InvariantCulture)}";
context.SetGitHubContext("url", $"{url.Scheme}://{url.Host}{portInfo}");
context.SetGitHubContext("api_url", $"{url.Scheme}://{url.Host}{portInfo}/api/v3");
}
// Evaluate the job-level environment variables
context.Debug("Evaluating job-level environment variables");
var templateEvaluator = context.ToPipelineTemplateEvaluator();
var templateTrace = context.ToTemplateTraceWriter();
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
var templateEvaluator = new PipelineTemplateEvaluator(templateTrace, schema);
foreach (var token in message.EnvironmentVariables)
{
var environmentVariables = templateEvaluator.EvaluateStepEnvironment(token, jobContext.ExpressionValues, jobContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
var environmentVariables = templateEvaluator.EvaluateStepEnvironment(token, jobContext.ExpressionValues, VarUtil.EnvironmentVariableKeyComparer);
foreach (var pair in environmentVariables)
{
context.EnvironmentVariables[pair.Key] = pair.Value ?? string.Empty;
@@ -154,7 +93,7 @@ namespace GitHub.Runner.Worker
// Evaluate the job container
context.Debug("Evaluating job container");
var container = templateEvaluator.EvaluateJobContainer(message.JobContainer, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
var container = templateEvaluator.EvaluateJobContainer(message.JobContainer, jobContext.ExpressionValues);
if (container != null)
{
jobContext.Container = new Container.ContainerInfo(HostContext, container);
@@ -162,7 +101,7 @@ namespace GitHub.Runner.Worker
// Evaluate the job service containers
context.Debug("Evaluating job service containers");
var serviceContainers = templateEvaluator.EvaluateJobServiceContainers(message.JobServiceContainers, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
var serviceContainers = templateEvaluator.EvaluateJobServiceContainers(message.JobServiceContainers, jobContext.ExpressionValues);
if (serviceContainers?.Count > 0)
{
foreach (var pair in serviceContainers)
@@ -173,32 +112,12 @@ namespace GitHub.Runner.Worker
}
}
// Evaluate the job defaults
context.Debug("Evaluating job defaults");
foreach (var token in message.Defaults)
{
var defaults = token.AssertMapping("defaults");
if (defaults.Any(x => string.Equals(x.Key.AssertString("defaults key").Value, "run", StringComparison.OrdinalIgnoreCase)))
{
context.JobDefaults["run"] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
var defaultsRun = defaults.First(x => string.Equals(x.Key.AssertString("defaults key").Value, "run", StringComparison.OrdinalIgnoreCase));
var jobDefaults = templateEvaluator.EvaluateJobDefaultsRun(defaultsRun.Value, jobContext.ExpressionValues, jobContext.ExpressionFunctions);
foreach (var pair in jobDefaults)
{
if (!string.IsNullOrEmpty(pair.Value))
{
context.JobDefaults["run"][pair.Key] = pair.Value;
}
}
}
}
// Build up 2 lists of steps, pre-job, job
// Download actions not already in the cache
Trace.Info("Downloading actions");
var actionManager = HostContext.GetService<IActionManager>();
var prepareResult = await actionManager.PrepareActionsAsync(context, message.Steps);
preJobSteps.AddRange(prepareResult.ContainerSetupSteps);
var prepareSteps = await actionManager.PrepareActionsAsync(context, message.Steps);
preJobSteps.AddRange(prepareSteps);
// Add start-container steps, record and stop-container steps
if (jobContext.Container != null || jobContext.ServiceContainers.Count > 0)
@@ -239,23 +158,9 @@ namespace GitHub.Runner.Worker
actionRunner.TryEvaluateDisplayName(contextData, context);
jobSteps.Add(actionRunner);
if (prepareResult.PreStepTracker.TryGetValue(step.Id, out var preStep))
{
Trace.Info($"Adding pre-{action.DisplayName}.");
preStep.TryEvaluateDisplayName(contextData, context);
preStep.DisplayName = $"Pre {preStep.DisplayName}";
preJobSteps.Add(preStep);
}
}
}
var intraActionStates = new Dictionary<Guid, Dictionary<string, string>>();
foreach (var preStep in prepareResult.PreStepTracker)
{
intraActionStates[preStep.Key] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
}
// Create execution context for pre-job steps
foreach (var step in preJobSteps)
{
@@ -266,12 +171,6 @@ namespace GitHub.Runner.Worker
Guid stepId = Guid.NewGuid();
extensionStep.ExecutionContext = jobContext.CreateChild(stepId, extensionStep.DisplayName, null, null, stepId.ToString("N"));
}
else if (step is IActionRunner actionStep)
{
ArgUtil.NotNull(actionStep, step.DisplayName);
Guid stepId = Guid.NewGuid();
actionStep.ExecutionContext = jobContext.CreateChild(stepId, actionStep.DisplayName, stepId.ToString("N"), null, null, intraActionStates[actionStep.Action.Id]);
}
}
// Create execution context for job steps
@@ -280,8 +179,7 @@ namespace GitHub.Runner.Worker
if (step is IActionRunner actionStep)
{
ArgUtil.NotNull(actionStep, step.DisplayName);
intraActionStates.TryGetValue(actionStep.Action.Id, out var intraActionState);
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, actionStep.Action.ScopeName, actionStep.Action.ContextName, intraActionState);
actionStep.ExecutionContext = jobContext.CreateChild(actionStep.Action.Id, actionStep.DisplayName, actionStep.Action.Name, actionStep.Action.ScopeName, actionStep.Action.ContextName);
}
}
@@ -346,58 +244,6 @@ namespace GitHub.Runner.Worker
context.Start();
context.Debug("Starting: Complete job");
// Evaluate job outputs
if (message.JobOutputs != null && message.JobOutputs.Type != TokenType.Null)
{
try
{
context.Output($"Evaluate and set job outputs");
// Populate env context for each step
Trace.Info("Initialize Env context for evaluating job outputs");
#if OS_WINDOWS
var envContext = new DictionaryContextData();
#else
var envContext = new CaseSensitiveDictionaryContextData();
#endif
context.ExpressionValues["env"] = envContext;
foreach (var pair in context.EnvironmentVariables)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
Trace.Info("Initialize steps context for evaluating job outputs");
context.ExpressionValues["steps"] = context.StepsContext.GetScope(context.ScopeName);
var templateEvaluator = context.ToPipelineTemplateEvaluator();
var outputs = templateEvaluator.EvaluateJobOutput(message.JobOutputs, context.ExpressionValues, context.ExpressionFunctions);
foreach (var output in outputs)
{
if (string.IsNullOrEmpty(output.Value))
{
context.Debug($"Skip output '{output.Key}' since it's empty");
continue;
}
if (!string.Equals(output.Value, HostContext.SecretMasker.MaskSecrets(output.Value)))
{
context.Warning($"Skip output '{output.Key}' since it may contain secret.");
continue;
}
context.Output($"Set output '{output.Key}'");
jobContext.JobOutputs[output.Key] = output.Value;
}
}
catch (Exception ex)
{
context.Result = TaskResult.Failed;
context.Error($"Fail to evaluate job outputs");
context.Error(ex);
jobContext.Result = TaskResultUtil.MergeTaskResults(jobContext.Result, TaskResult.Failed);
}
}
if (context.Variables.GetBoolean(Constants.Variables.Actions.RunnerDebug) ?? false)
{
Trace.Info("Support log upload starting.");

View File

@@ -231,7 +231,7 @@ namespace GitHub.Runner.Worker
}
Trace.Info("Raising job completed event.");
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result, jobContext.JobOutputs);
var jobCompletedEvent = new JobCompletedEvent(message.RequestId, message.JobId, result);
var completeJobRetryLimit = 5;
var exceptions = new List<Exception>();

View File

@@ -56,22 +56,13 @@ namespace GitHub.Runner.Worker
}
}
public void SetConclusion(
public void SetResult(
string scopeName,
string stepName,
string conclusion)
string result)
{
var step = GetStep(scopeName, stepName);
step["conclusion"] = new StringContextData(conclusion);
}
public void SetOutcome(
string scopeName,
string stepName,
string outcome)
{
var step = GetStep(scopeName, stepName);
step["outcome"] = new StringContextData(outcome);
step["result"] = new StringContextData(result);
}
private DictionaryContextData GetStep(string scopeName, string stepName)

View File

@@ -1,6 +1,8 @@
using GitHub.DistributedTask.WebApi;
using Pipelines = GitHub.DistributedTask.Pipelines;
using GitHub.Runner.Common.Util;
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.Expressions2;
@@ -8,13 +10,8 @@ using GitHub.DistributedTask.ObjectTemplating.Tokens;
using GitHub.DistributedTask.Pipelines;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Common;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker.Expressions;
using ObjectTemplating = GitHub.DistributedTask.ObjectTemplating;
using Pipelines = GitHub.DistributedTask.Pipelines;
namespace GitHub.Runner.Worker
{
@@ -66,7 +63,11 @@ namespace GitHub.Runner.Worker
}
var step = jobContext.JobSteps.Dequeue();
var nextStep = jobContext.JobSteps.Count > 0 ? jobContext.JobSteps.Peek() : null;
IStep nextStep = null;
if (jobContext.JobSteps.Count > 0)
{
nextStep = jobContext.JobSteps.Peek();
}
Trace.Info($"Processing step: DisplayName='{step.DisplayName}'");
ArgUtil.NotNull(step.ExecutionContext, nameof(step.ExecutionContext));
@@ -75,43 +76,16 @@ namespace GitHub.Runner.Worker
// Start
step.ExecutionContext.Start();
// Expression functions
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<AlwaysFunction>(PipelineTemplateConstants.Always, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<CancelledFunction>(PipelineTemplateConstants.Cancelled, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<FailureFunction>(PipelineTemplateConstants.Failure, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<SuccessFunction>(PipelineTemplateConstants.Success, 0, 0));
step.ExecutionContext.ExpressionFunctions.Add(new FunctionInfo<HashFilesFunction>(PipelineTemplateConstants.HashFiles, 1, byte.MaxValue));
// Set GITHUB_ACTION
if (step is IActionRunner actionStep)
{
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
}
// Initialize scope
if (InitializeScope(step, scopeInputs))
{
// Populate env context for each step
Trace.Info("Initialize Env context for step");
#if OS_WINDOWS
var envContext = new DictionaryContextData();
#else
var envContext = new CaseSensitiveDictionaryContextData();
#endif
step.ExecutionContext.ExpressionValues["env"] = envContext;
foreach (var pair in step.ExecutionContext.EnvironmentVariables)
{
envContext[pair.Key] = new StringContextData(pair.Value ?? string.Empty);
}
if (step is IActionRunner actionStep)
{
// Set GITHUB_ACTION
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
// Evaluate and merge action's env block to env context
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
var actionEnvironment = templateEvaluator.EvaluateStepEnvironment(actionStep.Action.Environment, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, VarUtil.EnvironmentVariableKeyComparer);
foreach (var env in actionEnvironment)
{
envContext[env.Key] = new StringContextData(env.Value ?? string.Empty);
}
}
var expressionManager = HostContext.GetService<IExpressionManager>();
try
{
// Register job cancellation call back only if job cancellation token not been fire before each step run
@@ -125,29 +99,28 @@ namespace GitHub.Runner.Worker
jobContext.JobContext.Status = jobContext.Result?.ToActionResult();
step.ExecutionContext.Debug($"Re-evaluate condition on job cancellation for step: '{step.DisplayName}'.");
var conditionReTestTraceWriter = new ConditionTraceWriter(Trace, null); // host tracing only
var conditionReTestResult = false;
ConditionResult conditionReTestResult;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
step.ExecutionContext.Debug($"Skip Re-evaluate condition on runner shutdown.");
conditionReTestResult = false;
}
else
{
try
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionReTestTraceWriter);
var condition = new BasicExpressionToken(null, null, null, step.Condition);
conditionReTestResult = templateEvaluator.EvaluateStepIf(condition, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, step.ExecutionContext.ToExpressionState());
conditionReTestResult = expressionManager.Evaluate(step.ExecutionContext, step.Condition, hostTracingOnly: true);
}
catch (Exception ex)
{
// Cancel the step since we get exception while re-evaluate step condition.
Trace.Info("Caught exception from expression when re-test condition on job cancellation.");
step.ExecutionContext.Error(ex);
conditionReTestResult = false;
}
}
if (!conditionReTestResult)
if (!conditionReTestResult.Value)
{
// Cancel the step.
Trace.Info("Cancel current running step.");
@@ -167,35 +140,34 @@ namespace GitHub.Runner.Worker
// Evaluate condition.
step.ExecutionContext.Debug($"Evaluating condition for step: '{step.DisplayName}'");
var conditionTraceWriter = new ConditionTraceWriter(Trace, step.ExecutionContext);
var conditionResult = false;
var conditionEvaluateError = default(Exception);
Exception conditionEvaluateError = null;
ConditionResult conditionResult;
if (HostContext.RunnerShutdownToken.IsCancellationRequested)
{
step.ExecutionContext.Debug($"Skip evaluate condition on runner shutdown.");
conditionResult = false;
}
else
{
try
{
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator(conditionTraceWriter);
var condition = new BasicExpressionToken(null, null, null, step.Condition);
conditionResult = templateEvaluator.EvaluateStepIf(condition, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions, step.ExecutionContext.ToExpressionState());
conditionResult = expressionManager.Evaluate(step.ExecutionContext, step.Condition);
}
catch (Exception ex)
{
Trace.Info("Caught exception from expression.");
Trace.Error(ex);
conditionResult = false;
conditionEvaluateError = ex;
}
}
// no evaluate error but condition is false
if (!conditionResult && conditionEvaluateError == null)
if (!conditionResult.Value && conditionEvaluateError == null)
{
// Condition == false
Trace.Info("Skipping step due to condition evaluation.");
CompleteStep(step, nextStep, TaskResult.Skipped, resultCode: conditionTraceWriter.Trace);
CompleteStep(step, nextStep, TaskResult.Skipped, resultCode: conditionResult.Trace);
}
else if (conditionEvaluateError != null)
{
@@ -252,10 +224,10 @@ namespace GitHub.Runner.Worker
// Set the timeout
var timeoutMinutes = 0;
var templateEvaluator = step.ExecutionContext.ToPipelineTemplateEvaluator();
var templateEvaluator = CreateTemplateEvaluator(step.ExecutionContext);
try
{
timeoutMinutes = templateEvaluator.EvaluateStepTimeout(step.Timeout, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions);
timeoutMinutes = templateEvaluator.EvaluateStepTimeout(step.Timeout, step.ExecutionContext.ExpressionValues);
}
catch (Exception ex)
{
@@ -346,7 +318,7 @@ namespace GitHub.Runner.Worker
var continueOnError = false;
try
{
continueOnError = templateEvaluator.EvaluateStepContinueOnError(step.ContinueOnError, step.ExecutionContext.ExpressionValues, step.ExecutionContext.ExpressionFunctions);
continueOnError = templateEvaluator.EvaluateStepContinueOnError(step.ContinueOnError, step.ExecutionContext.ExpressionValues);
}
catch (Exception ex)
{
@@ -358,7 +330,6 @@ namespace GitHub.Runner.Worker
if (continueOnError)
{
step.ExecutionContext.Outcome = step.ExecutionContext.Result;
step.ExecutionContext.Result = TaskResult.Succeeded;
Trace.Info($"Updated step result (continue on error)");
}
@@ -395,11 +366,11 @@ namespace GitHub.Runner.Worker
executionContext.Debug($"Initializing scope '{scope.Name}'");
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.ParentName);
executionContext.ExpressionValues["inputs"] = !String.IsNullOrEmpty(scope.ParentName) ? scopeInputs[scope.ParentName] : null;
var templateEvaluator = executionContext.ToPipelineTemplateEvaluator();
var templateEvaluator = CreateTemplateEvaluator(executionContext);
var inputs = default(DictionaryContextData);
try
{
inputs = templateEvaluator.EvaluateStepScopeInputs(scope.Inputs, executionContext.ExpressionValues, executionContext.ExpressionFunctions);
inputs = templateEvaluator.EvaluateStepScopeInputs(scope.Inputs, executionContext.ExpressionValues);
}
catch (Exception ex)
{
@@ -451,11 +422,11 @@ namespace GitHub.Runner.Worker
executionContext.Debug($"Finalizing scope '{scope.Name}'");
executionContext.ExpressionValues["steps"] = stepsContext.GetScope(scope.Name);
executionContext.ExpressionValues["inputs"] = null;
var templateEvaluator = executionContext.ToPipelineTemplateEvaluator();
var templateEvaluator = CreateTemplateEvaluator(executionContext);
var outputs = default(DictionaryContextData);
try
{
outputs = templateEvaluator.EvaluateStepScopeOutputs(scope.Outputs, executionContext.ExpressionValues, executionContext.ExpressionFunctions);
outputs = templateEvaluator.EvaluateStepScopeOutputs(scope.Outputs, executionContext.ExpressionValues);
}
catch (Exception ex)
{
@@ -484,42 +455,11 @@ namespace GitHub.Runner.Worker
executionContext.Complete(result, resultCode: resultCode);
}
private sealed class ConditionTraceWriter : ObjectTemplating::ITraceWriter
private PipelineTemplateEvaluator CreateTemplateEvaluator(IExecutionContext executionContext)
{
private readonly IExecutionContext _executionContext;
private readonly Tracing _trace;
private readonly StringBuilder _traceBuilder = new StringBuilder();
public string Trace => _traceBuilder.ToString();
public ConditionTraceWriter(Tracing trace, IExecutionContext executionContext)
{
ArgUtil.NotNull(trace, nameof(trace));
_trace = trace;
_executionContext = executionContext;
}
public void Error(string format, params Object[] args)
{
var message = StringUtil.Format(format, args);
_trace.Error(message);
_executionContext?.Debug(message);
}
public void Info(string format, params Object[] args)
{
var message = StringUtil.Format(format, args);
_trace.Info(message);
_executionContext?.Debug(message);
_traceBuilder.AppendLine(message);
}
public void Verbose(string format, params Object[] args)
{
var message = StringUtil.Format(format, args);
_trace.Verbose(message);
_executionContext?.Debug(message);
}
var templateTrace = executionContext.ToTemplateTraceWriter();
var schema = new PipelineTemplateSchemaFactory().CreateSchema();
return new PipelineTemplateEvaluator(templateTrace, schema);
}
}
}

View File

@@ -40,7 +40,7 @@ namespace GitHub.Runner.Worker
// Validate args.
ArgUtil.NotNullOrEmpty(pipeIn, nameof(pipeIn));
ArgUtil.NotNullOrEmpty(pipeOut, nameof(pipeOut));
VssUtil.InitializeVssClientSettings(HostContext.UserAgents, HostContext.WebProxy);
VssUtil.InitializeVssClientSettings(HostContext.UserAgent, HostContext.WebProxy);
var jobRunner = HostContext.CreateService<IJobRunner>();
using (var channel = HostContext.CreateService<IProcessChannel>())

View File

@@ -43,8 +43,6 @@
"entrypoint": "non-empty-string",
"args": "container-runs-args",
"env": "container-runs-env",
"pre-entrypoint": "non-empty-string",
"pre-if": "non-empty-string",
"post-entrypoint": "non-empty-string",
"post-if": "non-empty-string"
}
@@ -69,8 +67,6 @@
"properties": {
"using": "non-empty-string",
"main": "non-empty-string",
"pre": "non-empty-string",
"pre-if": "non-empty-string",
"post": "non-empty-string",
"post-if": "non-empty-string"
}
@@ -94,9 +90,10 @@
"github",
"strategy",
"matrix",
"steps",
"job",
"runner",
"hashFiles(1,255)"
"env"
],
"string": {}
},

View File

@@ -162,8 +162,8 @@ namespace GitHub.Services.Common
}
IssuedToken token = null;
IssuedTokenProvider provider;
if (this.Credentials.TryGetTokenProvider(request.RequestUri, out provider))
IssuedTokenProvider provider = null;
if (this.Credentials != null && this.Credentials.TryGetTokenProvider(request.RequestUri, out provider))
{
token = provider.CurrentToken;
}
@@ -227,7 +227,7 @@ namespace GitHub.Services.Common
responseWrapper = new HttpResponseMessageWrapper(response);
if (!this.Credentials.IsAuthenticationChallenge(responseWrapper))
if (this.Credentials != null && !this.Credentials.IsAuthenticationChallenge(responseWrapper))
{
// Validate the token after it has been successfully authenticated with the server.
if (provider != null)
@@ -259,7 +259,10 @@ namespace GitHub.Services.Common
}
// Ensure we have an appropriate token provider for the current challenge
provider = this.Credentials.CreateTokenProvider(request.RequestUri, responseWrapper, token);
if (this.Credentials != null)
{
provider = this.Credentials.CreateTokenProvider(request.RequestUri, responseWrapper, token);
}
// Make sure we don't invoke the provider in an invalid state
if (provider == null)
@@ -308,7 +311,7 @@ namespace GitHub.Services.Common
// We're out of retries and the response was an auth challenge -- then the request was unauthorized
// and we will throw a strongly-typed exception with a friendly error message.
if (!succeeded && response != null && this.Credentials.IsAuthenticationChallenge(responseWrapper))
if (!succeeded && response != null && (this.Credentials != null && this.Credentials.IsAuthenticationChallenge(responseWrapper)))
{
String message = null;
IEnumerable<String> serviceError;

View File

@@ -5,7 +5,7 @@ using GitHub.DistributedTask.Expressions2.Sdk.Functions;
namespace GitHub.DistributedTask.Expressions2
{
internal static class ExpressionConstants
public static class ExpressionConstants
{
static ExpressionConstants()
{
@@ -15,7 +15,7 @@ namespace GitHub.DistributedTask.Expressions2
AddFunction<Join>("join", 1, 2);
AddFunction<StartsWith>("startsWith", 2, 2);
AddFunction<ToJson>("toJson", 1, 1);
AddFunction<FromJson>("fromJson", 1, 1);
AddFunction<HashFiles>("hashFiles", 1, 1);
}
private static void AddFunction<T>(String name, Int32 minParameters, Int32 maxParameters)
@@ -24,6 +24,12 @@ namespace GitHub.DistributedTask.Expressions2
WellKnownFunctions.Add(name, new FunctionInfo<T>(name, minParameters, maxParameters));
}
public static void UpdateFunction<T>(String name, Int32 minParameters, Int32 maxParameters)
where T : Function, new()
{
WellKnownFunctions[name] = new FunctionInfo<T>(name, minParameters, maxParameters);
}
internal static readonly String False = "false";
internal static readonly String Infinity = "Infinity";
internal static readonly Int32 MaxDepth = 50;

View File

@@ -1,24 +0,0 @@
using System;
using System.IO;
using GitHub.DistributedTask.Pipelines.ContextData;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace GitHub.DistributedTask.Expressions2.Sdk.Functions
{
internal sealed class FromJson : Function
{
protected sealed override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
var json = Parameters[0].Evaluate(context).ConvertToString();
using (var stringReader = new StringReader(json))
using (var jsonReader = new JsonTextReader(stringReader) { DateParseHandling = DateParseHandling.None, FloatParseHandling = FloatParseHandling.Double })
{
var token = JToken.ReadFrom(jsonReader);
return token.ToPipelineContextData();
}
}
}}

View File

@@ -0,0 +1,122 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Minimatch;
using System.IO;
using System.Security.Cryptography;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.Pipelines.ContextData;
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
namespace GitHub.DistributedTask.Expressions2.Sdk.Functions
{
internal sealed class HashFiles : Function
{
protected sealed override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
// hashFiles() only works on the runner and only works with files under GITHUB_WORKSPACE
// Since GITHUB_WORKSPACE is set by runner, I am using that as the fact of this code runs on server or runner.
if (context.State is ObjectTemplating.TemplateContext templateContext &&
templateContext.ExpressionValues.TryGetValue(PipelineTemplateConstants.GitHub, out var githubContextData) &&
githubContextData is DictionaryContextData githubContext &&
githubContext.TryGetValue(PipelineTemplateConstants.Workspace, out var workspace) == true &&
workspace is StringContextData workspaceData)
{
string searchRoot = workspaceData.Value;
string pattern = Parameters[0].Evaluate(context).ConvertToString();
// Convert slashes on Windows
if (s_isWindows)
{
pattern = pattern.Replace('\\', '/');
}
// Root the pattern
if (!Path.IsPathRooted(pattern))
{
var patternRoot = s_isWindows ? searchRoot.Replace('\\', '/').TrimEnd('/') : searchRoot.TrimEnd('/');
pattern = string.Concat(patternRoot, "/", pattern);
}
// Get all files
context.Trace.Info($"Search root directory: '{searchRoot}'");
context.Trace.Info($"Search pattern: '{pattern}'");
var files = Directory.GetFiles(searchRoot, "*", SearchOption.AllDirectories)
.Select(x => s_isWindows ? x.Replace('\\', '/') : x)
.OrderBy(x => x, StringComparer.Ordinal)
.ToList();
if (files.Count == 0)
{
throw new ArgumentException($"hashFiles('{ExpressionUtility.StringEscape(pattern)}') failed. Directory '{searchRoot}' is empty");
}
else
{
context.Trace.Info($"Found {files.Count} files");
}
// Match
var matcher = new Minimatcher(pattern, s_minimatchOptions);
files = matcher.Filter(files)
.Select(x => s_isWindows ? x.Replace('/', '\\') : x)
.ToList();
if (files.Count == 0)
{
throw new ArgumentException($"hashFiles('{ExpressionUtility.StringEscape(pattern)}') failed. Search pattern '{pattern}' doesn't match any file under '{searchRoot}'");
}
else
{
context.Trace.Info($"{files.Count} matches to hash");
}
// Hash each file
List<byte> filesSha256 = new List<byte>();
foreach (var file in files)
{
context.Trace.Info($"Hash {file}");
using (SHA256 sha256hash = SHA256.Create())
{
using (var fileStream = File.OpenRead(file))
{
filesSha256.AddRange(sha256hash.ComputeHash(fileStream));
}
}
}
// Hash the hashes
using (SHA256 sha256hash = SHA256.Create())
{
var hashBytes = sha256hash.ComputeHash(filesSha256.ToArray());
StringBuilder hashString = new StringBuilder();
for (int i = 0; i < hashBytes.Length; i++)
{
hashString.Append(hashBytes[i].ToString("x2"));
}
var result = hashString.ToString();
context.Trace.Info($"Final hash result: '{result}'");
return result;
}
}
else
{
throw new InvalidOperationException("'hashfiles' expression function is only supported under runner context.");
}
}
private static readonly bool s_isWindows = Environment.OSVersion.Platform != PlatformID.Unix && Environment.OSVersion.Platform != PlatformID.MacOSX;
// Only support basic globbing (* ? and []) and globstar (**)
private static readonly Options s_minimatchOptions = new Options
{
Dot = true,
NoBrace = true,
NoCase = s_isWindows,
NoComment = true,
NoExt = true,
NoNegate = true,
};
}
}

View File

@@ -82,7 +82,7 @@ namespace GitHub.DistributedTask.WebApi
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 2),
version: new ApiResourceVersion(5.1, 1),
userState: userState,
cancellationToken: cancellationToken,
content: content);
@@ -109,7 +109,7 @@ namespace GitHub.DistributedTask.WebApi
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 2),
version: new ApiResourceVersion(5.1, 1),
userState: userState,
cancellationToken: cancellationToken).ConfigureAwait(false))
{
@@ -164,7 +164,7 @@ namespace GitHub.DistributedTask.WebApi
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 2),
version: new ApiResourceVersion(5.1, 1),
queryParameters: queryParams,
userState: userState,
cancellationToken: cancellationToken);
@@ -227,7 +227,7 @@ namespace GitHub.DistributedTask.WebApi
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 2),
version: new ApiResourceVersion(5.1, 1),
queryParameters: queryParams,
userState: userState,
cancellationToken: cancellationToken);
@@ -257,7 +257,7 @@ namespace GitHub.DistributedTask.WebApi
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 2),
version: new ApiResourceVersion(5.1, 1),
userState: userState,
cancellationToken: cancellationToken,
content: content);
@@ -287,7 +287,7 @@ namespace GitHub.DistributedTask.WebApi
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 2),
version: new ApiResourceVersion(5.1, 1),
userState: userState,
cancellationToken: cancellationToken,
content: content);
@@ -779,65 +779,5 @@ namespace GitHub.DistributedTask.WebApi
userState: userState,
cancellationToken: cancellationToken);
}
/// <summary>
/// [Preview API]
/// </summary>
/// <param name="poolId"></param>
/// <param name="agentId"></param>
/// <param name="userState"></param>
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
public Task<String> GetAgentAuthUrlAsync(
int poolId,
int agentId,
object userState = null,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("GET");
Guid locationId = new Guid("a82a119c-1e46-44b6-8d75-c82a79cf975b");
object routeValues = new { poolId = poolId, agentId = agentId };
return SendAsync<String>(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1),
userState: userState,
cancellationToken: cancellationToken);
}
/// <summary>
/// [Preview API]
/// </summary>
/// <param name="poolId"></param>
/// <param name="agentId"></param>
/// <param name="error"></param>
/// <param name="userState"></param>
/// <param name="cancellationToken">The cancellation token to cancel operation.</param>
[EditorBrowsable(EditorBrowsableState.Never)]
public virtual async Task ReportAgentAuthUrlMigrationErrorAsync(
int poolId,
int agentId,
string error,
object userState = null,
CancellationToken cancellationToken = default)
{
HttpMethod httpMethod = new HttpMethod("POST");
Guid locationId = new Guid("a82a119c-1e46-44b6-8d75-c82a79cf975b");
object routeValues = new { poolId = poolId, agentId = agentId };
HttpContent content = new ObjectContent<string>(error, new VssJsonMediaTypeFormatter(true));
using (HttpResponseMessage response = await SendAsync(
httpMethod,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(6.0, 1),
userState: userState,
cancellationToken: cancellationToken,
content: content).ConfigureAwait(false))
{
return;
}
}
}
}

View File

@@ -1,5 +1,4 @@
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
@@ -23,27 +22,10 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
{
var context = definition[i].Value.AssertSequence($"{TemplateConstants.Context}");
definition.RemoveAt(i);
var readerContext = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var evaluatorContext = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
foreach (TemplateToken item in context)
{
var itemStr = item.AssertString($"{TemplateConstants.Context} item").Value;
readerContext.Add(itemStr);
// Remove min/max parameter info
var paramIndex = itemStr.IndexOf('(');
if (paramIndex > 0)
{
evaluatorContext.Add(String.Concat(itemStr.Substring(0, paramIndex + 1), ")"));
}
else
{
evaluatorContext.Add(itemStr);
}
}
ReaderContext = readerContext.ToArray();
EvaluatorContext = evaluatorContext.ToArray();
Context = context
.Select(x => x.AssertString($"{TemplateConstants.Context} item").Value)
.Distinct()
.ToArray();
}
else if (String.Equals(definitionKey.Value, TemplateConstants.Description, StringComparison.Ordinal))
{
@@ -58,17 +40,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
internal abstract DefinitionType DefinitionType { get; }
/// <summary>
/// Used by the template reader to determine allowed expression values and functions.
/// Also used by the template reader to validate function min/max parameters.
/// </summary>
internal String[] ReaderContext { get; private set; } = new String[0];
/// <summary>
/// Used by the template evaluator to determine allowed expression values and functions.
/// The min/max parameter info is omitted.
/// </summary>
internal String[] EvaluatorContext { get; private set; } = new String[0];
internal String[] Context { get; private set; } = new String[0];
internal abstract void Validate(
TemplateSchema schema,

View File

@@ -30,7 +30,8 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
foreach (var propertiesPair in properties)
{
var propertyName = propertiesPair.Key.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.Properties} key");
Properties.Add(propertyName.Value, new PropertyValue(propertiesPair.Value));
var propertyValue = propertiesPair.Value.AssertString($"{TemplateConstants.Definition} {TemplateConstants.Mapping} {TemplateConstants.Properties} value");
Properties.Add(propertyName.Value, new PropertyValue(propertyValue.Value));
}
break;
@@ -84,7 +85,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
}
else
{
throw new ArgumentException($"Property '{TemplateConstants.LooseKeyType}' is defined but '{TemplateConstants.LooseValueType}' is not defined on '{name}'");
throw new ArgumentException($"Property '{TemplateConstants.LooseKeyType}' is defined but '{TemplateConstants.LooseValueType}' is not defined");
}
}
// Otherwise validate loose value type not be defined
@@ -94,21 +95,16 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
}
// Lookup each property
foreach (var property in Properties)
foreach (var property in Properties.Values)
{
if (String.IsNullOrEmpty(property.Value.Type))
{
throw new ArgumentException($"Type not specified for the '{property.Key}' property on the '{name}' type");
}
schema.GetDefinition(property.Value.Type);
schema.GetDefinition(property.Type);
}
if (!String.IsNullOrEmpty(Inherits))
{
var inherited = schema.GetDefinition(Inherits);
if (inherited.ReaderContext.Length > 0)
if (inherited.Context.Length > 0)
{
throw new NotSupportedException($"Property '{TemplateConstants.Context}' is not supported on inhertied definitions");
}

View File

@@ -62,7 +62,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
{
var nestedDefinition = schema.GetDefinition(nestedType);
if (nestedDefinition.ReaderContext.Length > 0)
if (nestedDefinition.Context.Length > 0)
{
throw new ArgumentException($"'{name}' is a one-of definition and references another definition that defines context. This is currently not supported.");
}

View File

@@ -1,40 +1,18 @@
using System;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
namespace GitHub.DistributedTask.ObjectTemplating.Schema
{
internal sealed class PropertyValue
{
internal PropertyValue(TemplateToken token)
internal PropertyValue()
{
if (token is StringToken stringToken)
{
Type = stringToken.Value;
}
else
{
var mapping = token.AssertMapping($"{TemplateConstants.MappingPropertyValue}");
foreach (var mappingPair in mapping)
{
var mappingKey = mappingPair.Key.AssertString($"{TemplateConstants.MappingPropertyValue} key");
switch (mappingKey.Value)
{
case TemplateConstants.Type:
Type = mappingPair.Value.AssertString($"{TemplateConstants.MappingPropertyValue} {TemplateConstants.Type}").Value;
break;
case TemplateConstants.Required:
Required = mappingPair.Value.AssertBoolean($"{TemplateConstants.MappingPropertyValue} {TemplateConstants.Required}").Value;
break;
default:
mappingKey.AssertUnexpectedValue($"{TemplateConstants.MappingPropertyValue} key");
break;
}
}
}
}
internal PropertyValue(String type)
{
Type = type;
}
internal String Type { get; set; }
internal Boolean Required { get; set; }
}
}

View File

@@ -312,8 +312,8 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
// template-schema
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Version, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Definitions, new PropertyValue(new StringToken(null, null, null, TemplateConstants.Definitions)));
mappingDefinition.Properties.Add(TemplateConstants.Version, new PropertyValue(TemplateConstants.NonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Definitions, new PropertyValue(TemplateConstants.Definitions));
schema.Definitions.Add(TemplateConstants.TemplateSchema, mappingDefinition);
// definitions
@@ -335,9 +335,9 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
// null-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Null, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NullDefinitionProperties)));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Null, new PropertyValue(TemplateConstants.NullDefinitionProperties));
schema.Definitions.Add(TemplateConstants.NullDefinition, mappingDefinition);
// null-definition-properties
@@ -346,9 +346,9 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
// boolean-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Boolean, new PropertyValue(new StringToken(null, null, null, TemplateConstants.BooleanDefinitionProperties)));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Boolean, new PropertyValue(TemplateConstants.BooleanDefinitionProperties));
schema.Definitions.Add(TemplateConstants.BooleanDefinition, mappingDefinition);
// boolean-definition-properties
@@ -357,9 +357,9 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
// number-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Number, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NumberDefinitionProperties)));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Number, new PropertyValue(TemplateConstants.NumberDefinitionProperties));
schema.Definitions.Add(TemplateConstants.NumberDefinition, mappingDefinition);
// number-definition-properties
@@ -368,68 +368,55 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
// string-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.String, new PropertyValue(new StringToken(null, null, null, TemplateConstants.StringDefinitionProperties)));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.String, new PropertyValue(TemplateConstants.StringDefinitionProperties));
schema.Definitions.Add(TemplateConstants.StringDefinition, mappingDefinition);
// string-definition-properties
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Constant, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.IgnoreCase, new PropertyValue(new StringToken(null, null, null,TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.RequireNonEmpty, new PropertyValue(new StringToken(null, null, null, TemplateConstants.Boolean)));
mappingDefinition.Properties.Add(TemplateConstants.Constant, new PropertyValue(TemplateConstants.NonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.IgnoreCase, new PropertyValue(TemplateConstants.Boolean));
mappingDefinition.Properties.Add(TemplateConstants.RequireNonEmpty, new PropertyValue(TemplateConstants.Boolean));
schema.Definitions.Add(TemplateConstants.StringDefinitionProperties, mappingDefinition);
// sequence-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Sequence, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceDefinitionProperties)));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Sequence, new PropertyValue(TemplateConstants.SequenceDefinitionProperties));
schema.Definitions.Add(TemplateConstants.SequenceDefinition, mappingDefinition);
// sequence-definition-properties
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.ItemType, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.ItemType, new PropertyValue(TemplateConstants.NonEmptyString));
schema.Definitions.Add(TemplateConstants.SequenceDefinitionProperties, mappingDefinition);
// mapping-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Mapping, new PropertyValue(new StringToken(null, null, null, TemplateConstants.MappingDefinitionProperties)));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.Mapping, new PropertyValue(TemplateConstants.MappingDefinitionProperties));
schema.Definitions.Add(TemplateConstants.MappingDefinition, mappingDefinition);
// mapping-definition-properties
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Properties, new PropertyValue(new StringToken(null, null, null, TemplateConstants.Properties)));
mappingDefinition.Properties.Add(TemplateConstants.LooseKeyType, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.LooseValueType, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Properties, new PropertyValue(TemplateConstants.Properties));
mappingDefinition.Properties.Add(TemplateConstants.LooseKeyType, new PropertyValue(TemplateConstants.NonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.LooseValueType, new PropertyValue(TemplateConstants.NonEmptyString));
schema.Definitions.Add(TemplateConstants.MappingDefinitionProperties, mappingDefinition);
// properties
mappingDefinition = new MappingDefinition();
mappingDefinition.LooseKeyType = TemplateConstants.NonEmptyString;
mappingDefinition.LooseValueType = TemplateConstants.PropertyValue;
mappingDefinition.LooseValueType = TemplateConstants.NonEmptyString;
schema.Definitions.Add(TemplateConstants.Properties, mappingDefinition);
// property-value
oneOfDefinition = new OneOfDefinition();
oneOfDefinition.OneOf.Add(TemplateConstants.NonEmptyString);
oneOfDefinition.OneOf.Add(TemplateConstants.MappingPropertyValue);
schema.Definitions.Add(TemplateConstants.PropertyValue, oneOfDefinition);
// mapping-property-value
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Type, new PropertyValue(new StringToken(null, null, null, TemplateConstants.NonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Required, new PropertyValue(new StringToken(null, null, null, TemplateConstants.Boolean)));
schema.Definitions.Add(TemplateConstants.MappingPropertyValue, mappingDefinition);
// one-of-definition
mappingDefinition = new MappingDefinition();
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(new StringToken(null, null, null, TemplateConstants.String)));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.OneOf, new PropertyValue(new StringToken(null, null, null, TemplateConstants.SequenceOfNonEmptyString)));
mappingDefinition.Properties.Add(TemplateConstants.Description, new PropertyValue(TemplateConstants.String));
mappingDefinition.Properties.Add(TemplateConstants.Context, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
mappingDefinition.Properties.Add(TemplateConstants.OneOf, new PropertyValue(TemplateConstants.SequenceOfNonEmptyString));
schema.Definitions.Add(TemplateConstants.OneOfDefinition, mappingDefinition);
// non-empty-string
@@ -490,4 +477,4 @@ namespace GitHub.DistributedTask.ObjectTemplating.Schema
private static readonly Regex s_definitionNameRegex = new Regex("^[a-zA-Z_][a-zA-Z0-9_-]*$", RegexOptions.Compiled);
private static TemplateSchema s_schema;
}
}
}

View File

@@ -22,11 +22,9 @@ namespace GitHub.DistributedTask.ObjectTemplating
internal const String ItemType = "item-type";
internal const String LooseKeyType = "loose-key-type";
internal const String LooseValueType = "loose-value-type";
internal const String MaxConstant = "MAX";
internal const String Mapping = "mapping";
internal const String MappingDefinition = "mapping-definition";
internal const String MappingDefinitionProperties = "mapping-definition-properties";
internal const String MappingPropertyValue = "mapping-property-value";
internal const String NonEmptyString = "non-empty-string";
internal const String Null = "null";
internal const String NullDefinition = "null-definition";
@@ -37,9 +35,7 @@ namespace GitHub.DistributedTask.ObjectTemplating
internal const String OneOf = "one-of";
internal const String OneOfDefinition = "one-of-definition";
internal const String OpenExpression = "${{";
internal const String PropertyValue = "property-value";
internal const String Properties = "properties";
internal const String Required = "required";
internal const String RequireNonEmpty = "require-non-empty";
internal const String Scalar = "scalar";
internal const String ScalarDefinition = "scalar-definition";
@@ -47,7 +43,6 @@ namespace GitHub.DistributedTask.ObjectTemplating
internal const String Sequence = "sequence";
internal const String SequenceDefinition = "sequence-definition";
internal const String SequenceDefinitionProperties = "sequence-definition-properties";
internal const String Type = "type";
internal const String SequenceOfNonEmptyString = "sequence-of-non-empty-string";
internal const String String = "string";
internal const String StringDefinition = "string-definition";

View File

@@ -184,7 +184,6 @@ namespace GitHub.DistributedTask.ObjectTemplating
id = FileIds.Count + 1;
FileIds.Add(file, id);
FileNames.Add(file);
Memory.AddBytes(file);
}
return id;
@@ -192,12 +191,7 @@ namespace GitHub.DistributedTask.ObjectTemplating
internal String GetFileName(Int32 fileId)
{
return FileNames.Count >= fileId ? FileNames[fileId - 1] : null;
}
internal IReadOnlyList<String> GetFileTable()
{
return FileNames.AsReadOnly();
return FileNames[fileId - 1];
}
private String GetErrorPrefix(
@@ -205,9 +199,9 @@ namespace GitHub.DistributedTask.ObjectTemplating
Int32? line,
Int32? column)
{
var fileName = fileId.HasValue ? GetFileName(fileId.Value) : null;
if (!String.IsNullOrEmpty(fileName))
if (fileId != null)
{
var fileName = GetFileName(fileId.Value);
if (line != null && column != null)
{
return $"{fileName} {TemplateStrings.LineColumn(line, column)}:";

View File

@@ -47,16 +47,7 @@ namespace GitHub.DistributedTask.ObjectTemplating
var evaluator = new TemplateEvaluator(context, template, removeBytes);
try
{
var availableContext = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
foreach (var key in context.ExpressionValues.Keys)
{
availableContext.Add(key);
}
foreach (var function in context.ExpressionFunctions)
{
availableContext.Add($"{function.Name}()");
}
var availableContext = new HashSet<String>(context.ExpressionValues.Keys);
var definitionInfo = new DefinitionInfo(context.Schema, type, availableContext);
result = evaluator.Evaluate(definitionInfo);
@@ -191,14 +182,12 @@ namespace GitHub.DistributedTask.ObjectTemplating
}
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var hasExpressionKey = false;
while (m_unraveler.AllowScalar(definition.Expand, out ScalarToken nextKeyScalar))
{
// Expression
if (nextKeyScalar is ExpressionToken)
{
hasExpressionKey = true;
var anyDefinition = new DefinitionInfo(definition, TemplateConstants.Any);
mapping.Add(nextKeyScalar, Evaluate(anyDefinition));
continue;
@@ -279,19 +268,6 @@ namespace GitHub.DistributedTask.ObjectTemplating
String listToDeDuplicate = String.Join(", ", nonDuplicates);
m_context.Error(mapping, TemplateStrings.UnableToDetermineOneOf(listToDeDuplicate));
}
else if (mappingDefinitions.Count == 1 && !hasExpressionKey)
{
foreach (var property in mappingDefinitions[0].Properties)
{
if (property.Value.Required)
{
if (!keys.Contains(property.Key))
{
m_context.Error(mapping, $"Required property is missing: {property.Key}");
}
}
}
}
m_unraveler.ReadMappingEnd();
}
@@ -402,13 +378,14 @@ namespace GitHub.DistributedTask.ObjectTemplating
Definition = m_schema.GetDefinition(name);
// Determine whether to expand
m_allowedContext = Definition.EvaluatorContext;
if (Definition.EvaluatorContext.Length > 0)
if (Definition.Context.Length > 0)
{
m_allowedContext = Definition.Context;
Expand = m_availableContext.IsSupersetOf(m_allowedContext);
}
else
{
m_allowedContext = new String[0];
Expand = false;
}
}
@@ -424,9 +401,9 @@ namespace GitHub.DistributedTask.ObjectTemplating
Definition = m_schema.GetDefinition(name);
// Determine whether to expand
if (Definition.EvaluatorContext.Length > 0)
if (Definition.Context.Length > 0)
{
m_allowedContext = new HashSet<String>(parent.m_allowedContext.Concat(Definition.EvaluatorContext), StringComparer.OrdinalIgnoreCase).ToArray();
m_allowedContext = new HashSet<String>(parent.m_allowedContext.Concat(Definition.Context)).ToArray();
Expand = m_availableContext.IsSupersetOf(m_allowedContext);
}
else

View File

@@ -49,14 +49,6 @@ namespace GitHub.DistributedTask.ObjectTemplating
m_errors = new List<TemplateValidationError>(errors ?? Enumerable.Empty<TemplateValidationError>());
}
public TemplateValidationException(
String message,
IEnumerable<TemplateValidationError> errors)
: this(message)
{
m_errors = new List<TemplateValidationError>(errors ?? Enumerable.Empty<TemplateValidationError>());
}
public TemplateValidationException(String message)
: base(message)
{

View File

@@ -178,15 +178,14 @@ namespace GitHub.DistributedTask.ObjectTemplating
}
var keys = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
var hasExpressionKey = false;
while (m_objectReader.AllowLiteral(out LiteralToken rawLiteral))
{
var nextKeyScalar = ParseScalar(rawLiteral, definition.AllowedContext);
// Expression
if (nextKeyScalar is ExpressionToken)
{
hasExpressionKey = true;
// Legal
if (definition.AllowedContext.Length > 0)
{
@@ -281,19 +280,7 @@ namespace GitHub.DistributedTask.ObjectTemplating
String listToDeDuplicate = String.Join(", ", nonDuplicates);
m_context.Error(mapping, TemplateStrings.UnableToDetermineOneOf(listToDeDuplicate));
}
else if (mappingDefinitions.Count == 1 && !hasExpressionKey)
{
foreach (var property in mappingDefinitions[0].Properties)
{
if (property.Value.Required)
{
if (!keys.Contains(property.Key))
{
m_context.Error(mapping, $"Required property is missing: {property.Key}");
}
}
}
}
ExpectMappingEnd();
}
@@ -780,8 +767,15 @@ namespace GitHub.DistributedTask.ObjectTemplating
// Lookup the definition
Definition = m_schema.GetDefinition(name);
// Record allowed context
AllowedContext = Definition.ReaderContext;
// Determine whether to expand
if (Definition.Context.Length > 0)
{
AllowedContext = Definition.Context;
}
else
{
AllowedContext = new String[0];
}
}
public DefinitionInfo(
@@ -793,10 +787,10 @@ namespace GitHub.DistributedTask.ObjectTemplating
// Lookup the definition
Definition = m_schema.GetDefinition(name);
// Record allowed context
if (Definition.ReaderContext.Length > 0)
// Determine whether to expand
if (Definition.Context.Length > 0)
{
AllowedContext = new HashSet<String>(parent.AllowedContext.Concat(Definition.ReaderContext), StringComparer.OrdinalIgnoreCase).ToArray();
AllowedContext = new HashSet<String>(parent.AllowedContext.Concat(Definition.Context)).ToArray();
}
else
{

View File

@@ -2,7 +2,6 @@
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.ObjectTemplating
@@ -42,7 +41,7 @@ namespace GitHub.DistributedTask.ObjectTemplating
{
for (int i = 0; i < 50; i++)
{
String message = !String.IsNullOrEmpty(messagePrefix) ? $"{messagePrefix} {ex.Message}" : ex.ToString();
String message = !String.IsNullOrEmpty(messagePrefix) ? $"{messagePrefix} {ex.Message}" : ex.Message;
Add(new TemplateValidationError(message));
if (ex.InnerException == null)
{
@@ -89,23 +88,6 @@ namespace GitHub.DistributedTask.ObjectTemplating
}
}
/// <summary>
/// Throws <c ref="TemplateValidationException" /> if any errors.
/// <param name="prefix">The error message prefix</param>
/// </summary>
public void Check(String prefix)
{
if (String.IsNullOrEmpty(prefix))
{
this.Check();
}
else if (m_errors.Count > 0)
{
var message = $"{prefix.Trim()} {String.Join(",", m_errors.Select(e => e.Message))}";
throw new TemplateValidationException(message, m_errors);
}
}
public void Clear()
{
m_errors.Clear();

View File

@@ -1,9 +1,7 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.Linq;
using System.Runtime.Serialization;
using System.Text.RegularExpressions;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.Services.WebApi.Internal;
@@ -37,29 +35,11 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
String[] allowedContext,
out Exception ex)
{
// Create dummy named values and functions
var namedValues = new List<INamedValueInfo>();
var functions = new List<IFunctionInfo>();
// Create dummy allowed contexts
INamedValueInfo[] namedValues = null;
if (allowedContext?.Length > 0)
{
foreach (var contextItem in allowedContext)
{
var match = s_function.Match(contextItem);
if (match.Success)
{
var functionName = match.Groups[1].Value;
var minParameters = Int32.Parse(match.Groups[2].Value, NumberStyles.None, CultureInfo.InvariantCulture);
var maxParametersRaw = match.Groups[3].Value;
var maxParameters = String.Equals(maxParametersRaw, TemplateConstants.MaxConstant, StringComparison.Ordinal)
? Int32.MaxValue
: Int32.Parse(maxParametersRaw, NumberStyles.None, CultureInfo.InvariantCulture);
functions.Add(new FunctionInfo<DummyFunction>(functionName, minParameters, maxParameters));
}
else
{
namedValues.Add(new NamedValueInfo<ContextValueNode>(contextItem));
}
}
namedValues = allowedContext.Select(x => new NamedValueInfo<ContextValueNode>(x)).ToArray();
}
// Parse
@@ -67,7 +47,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
ExpressionNode root = null;
try
{
root = new ExpressionParser().CreateTree(expression, null, namedValues, functions) as ExpressionNode;
root = new ExpressionParser().CreateTree(expression, null, namedValues, null) as ExpressionNode;
result = true;
ex = null;
@@ -80,18 +60,5 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
return result;
}
private sealed class DummyFunction : Function
{
protected override Object EvaluateCore(
EvaluationContext context,
out ResultMemory resultMemory)
{
resultMemory = null;
return null;
}
}
private static readonly Regex s_function = new Regex(@"^([a-zA-Z0-9_]+)\(([0-9]+),([0-9]+|MAX)\)$", RegexOptions.Compiled);
}
}

View File

@@ -30,14 +30,14 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
Column = column;
}
[DataMember(Name = "file", EmitDefaultValue = false)]
internal Int32? FileId { get; private set; }
[IgnoreDataMember]
internal Int32? FileId { get; set; }
[DataMember(Name = "line", EmitDefaultValue = false)]
internal Int32? Line { get; private set; }
internal Int32? Line { get; }
[DataMember(Name = "col", EmitDefaultValue = false)]
internal Int32? Column { get; private set; }
internal Int32? Column { get; }
[DataMember(Name = "type", EmitDefaultValue = false)]
internal Int32 Type { get; }

View File

@@ -1,8 +1,5 @@
using System;
using System.Collections.Generic;
using System.Linq;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk;
namespace GitHub.DistributedTask.ObjectTemplating.Tokens
{
@@ -109,43 +106,6 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
throw new ArgumentException($"Error while reading '{objectDescription}'. Unexpected value '{literal.ToString()}'");
}
/// <summary>
/// Traverses the token and checks whether all required expression values
/// and functions are provided.
/// </summary>
public static bool CheckHasRequiredContext(
this TemplateToken token,
IReadOnlyObject expressionValues,
IList<IFunctionInfo> expressionFunctions)
{
var expressionTokens = token.Traverse()
.OfType<BasicExpressionToken>()
.ToArray();
var parser = new ExpressionParser();
foreach (var expressionToken in expressionTokens)
{
var tree = parser.ValidateSyntax(expressionToken.Expression, null);
foreach (var node in tree.Traverse())
{
if (node is NamedValue namedValue)
{
if (expressionValues?.Keys.Any(x => string.Equals(x, namedValue.Name, StringComparison.OrdinalIgnoreCase)) != true)
{
return false;
}
}
else if (node is Function function &&
!ExpressionConstants.WellKnownFunctions.ContainsKey(function.Name) &&
expressionFunctions?.Any(x => string.Equals(x.Name, function.Name, StringComparison.OrdinalIgnoreCase)) != true)
{
return false;
}
}
}
return true;
}
/// <summary>
/// Returns all tokens (depth first)
/// </summary>

View File

@@ -115,12 +115,13 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
Object value,
JsonSerializer serializer)
{
base.WriteJson(writer, value, serializer);
if (value is TemplateToken token)
{
switch (token.Type)
{
case TokenType.Null:
if (token.FileId == null && token.Line == null && token.Column == null)
if (token.Line == null && token.Column == null)
{
writer.WriteNull();
}
@@ -129,17 +130,12 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
if (token.Line != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
@@ -150,7 +146,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
case TokenType.Boolean:
var booleanToken = token as BooleanToken;
if (token.FileId == null && token.Line == null && token.Column == null)
if (token.Line == null && token.Column == null)
{
writer.WriteValue(booleanToken.Value);
}
@@ -159,17 +155,12 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
if (token.Line != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
@@ -182,7 +173,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
case TokenType.Number:
var numberToken = token as NumberToken;
if (token.FileId == null && token.Line == null && token.Column == null)
if (token.Line == null && token.Column == null)
{
writer.WriteValue(numberToken.Value);
}
@@ -191,17 +182,12 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
if (token.Line != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
@@ -214,7 +200,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
case TokenType.String:
var stringToken = token as StringToken;
if (token.FileId == null && token.Line == null && token.Column == null)
if (token.Line == null && token.Column == null)
{
writer.WriteValue(stringToken.Value);
}
@@ -223,17 +209,12 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
if (token.Line != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
@@ -249,17 +230,12 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
if (token.Line != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
@@ -277,17 +253,12 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
if (token.Line != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
@@ -302,17 +273,12 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
if (token.Line != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);
@@ -335,17 +301,12 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
writer.WriteStartObject();
writer.WritePropertyName("type");
writer.WriteValue(token.Type);
if (token.FileId != null)
{
writer.WritePropertyName("file");
writer.WriteValue(token.FileId);
}
if (token.Line != null)
{
writer.WritePropertyName("line");
writer.WriteValue(token.Line);
}
if (token.Column != null)
if (token.Line != null)
{
writer.WritePropertyName("col");
writer.WriteValue(token.Column);

View File

@@ -39,10 +39,7 @@ namespace GitHub.DistributedTask.Pipelines
DictionaryContextData contextData,
WorkspaceOptions workspaceOptions,
IEnumerable<JobStep> steps,
IEnumerable<ContextScope> scopes,
IList<String> fileTable,
TemplateToken jobOutputs,
IList<TemplateToken> defaults)
IEnumerable<ContextScope> scopes)
{
this.MessageType = JobRequestMessageTypes.PipelineAgentJobRequest;
this.Plan = plan;
@@ -54,7 +51,6 @@ namespace GitHub.DistributedTask.Pipelines
this.Timeline = timeline;
this.Resources = jobResources;
this.Workspace = workspaceOptions;
this.JobOutputs = jobOutputs;
m_variables = new Dictionary<String, VariableValue>(variables, StringComparer.OrdinalIgnoreCase);
m_maskHints = new List<MaskHint>(maskHints);
@@ -70,11 +66,6 @@ namespace GitHub.DistributedTask.Pipelines
m_environmentVariables = new List<TemplateToken>(environmentVariables);
}
if (defaults?.Count > 0)
{
m_defaults = new List<TemplateToken>(defaults);
}
this.ContextData = new Dictionary<String, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
if (contextData?.Count > 0)
{
@@ -83,11 +74,6 @@ namespace GitHub.DistributedTask.Pipelines
this.ContextData[pair.Key] = pair.Value;
}
}
if (fileTable?.Count > 0)
{
m_fileTable = new List<String>(fileTable);
}
}
[DataMember]
@@ -146,13 +132,6 @@ namespace GitHub.DistributedTask.Pipelines
private set;
}
[DataMember(EmitDefaultValue = false)]
public TemplateToken JobOutputs
{
get;
private set;
}
[DataMember]
public Int64 RequestId
{
@@ -219,21 +198,6 @@ namespace GitHub.DistributedTask.Pipelines
}
}
/// <summary>
/// Gets the hierarchy of defaults to overlay, last wins.
/// </summary>
public IList<TemplateToken> Defaults
{
get
{
if (m_defaults == null)
{
m_defaults = new List<TemplateToken>();
}
return m_defaults;
}
}
/// <summary>
/// Gets the collection of variables associated with the current context.
/// </summary>
@@ -273,21 +237,6 @@ namespace GitHub.DistributedTask.Pipelines
}
}
/// <summary>
/// Gets the table of files used when parsing the pipeline (e.g. yaml files)
/// </summary>
public IList<String> FileTable
{
get
{
if (m_fileTable == null)
{
m_fileTable = new List<String>();
}
return m_fileTable;
}
}
// todo: remove after feature-flag DistributedTask.EvaluateContainerOnRunner is enabled everywhere
public void SetJobSidecarContainers(IDictionary<String, String> value)
{
@@ -396,16 +345,6 @@ namespace GitHub.DistributedTask.Pipelines
m_environmentVariables = null;
}
if (m_defaults?.Count == 0)
{
m_defaults = null;
}
if (m_fileTable?.Count == 0)
{
m_fileTable = null;
}
if (m_maskHints?.Count == 0)
{
m_maskHints = null;
@@ -435,12 +374,6 @@ namespace GitHub.DistributedTask.Pipelines
[DataMember(Name = "EnvironmentVariables", EmitDefaultValue = false)]
private List<TemplateToken> m_environmentVariables;
[DataMember(Name = "Defaults", EmitDefaultValue = false)]
private List<TemplateToken> m_defaults;
[DataMember(Name = "FileTable", EmitDefaultValue = false)]
private List<String> m_fileTable;
[DataMember(Name = "Mask", EmitDefaultValue = false)]
private List<MaskHint> m_maskHints;

View File

@@ -35,19 +35,6 @@ namespace GitHub.DistributedTask.Pipelines.ContextData
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(DictionaryContextData)}' was expected.");
}
[EditorBrowsable(EditorBrowsableState.Never)]
public static CaseSensitiveDictionaryContextData AssertCaseSensitiveDictionary(
this PipelineContextData value,
String objectDescription)
{
if (value is CaseSensitiveDictionaryContextData dictionary)
{
return dictionary;
}
throw new ArgumentException($"Unexpected type '{value?.GetType().Name}' encountered while reading '{objectDescription}'. The type '{nameof(CaseSensitiveDictionaryContextData)}' was expected.");
}
[EditorBrowsable(EditorBrowsableState.Never)]
public static StringContextData AssertString(
this PipelineContextData value,

View File

@@ -8,14 +8,12 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
public const String Always = "always";
public const String BooleanStepsContext = "boolean-steps-context";
public const String BooleanStrategyContext = "boolean-strategy-context";
public const String CancelTimeoutMinutes = "cancel-timeout-minutes";
public const String Cancelled = "cancelled";
public const String Checkout = "checkout";
public const String Clean = "clean";
public const String Container = "container";
public const String ContinueOnError = "continue-on-error";
public const String Defaults = "defaults";
public const String Env = "env";
public const String Event = "event";
public const String EventPattern = "github.event";
@@ -25,18 +23,13 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String FetchDepth = "fetch-depth";
public const String GeneratedId = "generated-id";
public const String GitHub = "github";
public const String HashFiles = "hashFiles";
public const String Id = "id";
public const String If = "if";
public const String Image = "image";
public const String Include = "include";
public const String Inputs = "inputs";
public const String Job = "job";
public const String JobDefaultsRun = "job-defaults-run";
public const String JobIfResult = "job-if-result";
public const String JobOutputs = "job-outputs";
public const String Jobs = "jobs";
public const String Labels = "labels";
public const String Lfs = "lfs";
public const String Matrix = "matrix";
public const String MaxParallel = "max-parallel";
@@ -63,7 +56,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String Shell = "shell";
public const String Skipped = "skipped";
public const String StepEnv = "step-env";
public const String StepIfResult = "step-if-result";
public const String Steps = "steps";
public const String StepsScopeInputs = "steps-scope-inputs";
public const String StepsScopeOutputs = "steps-scope-outputs";

View File

@@ -16,20 +16,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
internal static class PipelineTemplateConverter
{
internal static Boolean ConvertToIfResult(
TemplateContext context,
TemplateToken ifResult)
{
var expression = ifResult.Traverse().FirstOrDefault(x => x is ExpressionToken);
if (expression != null)
{
throw new ArgumentException($"Unexpected type '{expression.GetType().Name}' encountered while reading 'if'.");
}
var evaluationResult = EvaluationResult.CreateIntermediateResult(null, ifResult);
return evaluationResult.IsTruthy;
}
internal static Boolean? ConvertToStepContinueOnError(
TemplateContext context,
TemplateToken token,

View File

@@ -4,7 +4,7 @@ using System.ComponentModel;
using System.Linq;
using System.Threading;
using GitHub.DistributedTask.Expressions2;
using GitHub.DistributedTask.Expressions2.Sdk.Functions;
using GitHub.DistributedTask.Expressions2.Sdk;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Schema;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
@@ -14,16 +14,12 @@ using ITraceWriter = GitHub.DistributedTask.ObjectTemplating.ITraceWriter;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Evaluates parts of the workflow DOM. For example, a job strategy or step inputs.
/// </summary>
[EditorBrowsable(EditorBrowsableState.Never)]
public class PipelineTemplateEvaluator
{
public PipelineTemplateEvaluator(
ITraceWriter trace,
TemplateSchema schema,
IList<String> fileTable)
TemplateSchema schema)
{
if (!String.Equals(schema.Version, PipelineTemplateConstants.Workflow_1_0, StringComparison.Ordinal))
{
@@ -32,7 +28,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
m_trace = trace;
m_schema = schema;
m_fileTable = fileTable;
}
public Int32 MaxDepth => 50;
@@ -53,14 +48,13 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public DictionaryContextData EvaluateStepScopeInputs(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
DictionaryContextData contextData)
{
var result = default(DictionaryContextData);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsScopeInputs, token, 0, null, omitHeader: true);
@@ -80,14 +74,13 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public DictionaryContextData EvaluateStepScopeOutputs(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
DictionaryContextData contextData)
{
var result = default(DictionaryContextData);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepsScopeOutputs, token, 0, null, omitHeader: true);
@@ -107,14 +100,13 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public Boolean EvaluateStepContinueOnError(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
DictionaryContextData contextData)
{
var result = default(Boolean?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.BooleanStepsContext, token, 0, null, omitHeader: true);
@@ -132,44 +124,16 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result ?? false;
}
public String EvaluateStepDisplayName(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(String);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StringStepsContext, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToStepDisplayName(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public Dictionary<String, String> EvaluateStepEnvironment(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,
StringComparer keyComparer)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepEnv, token, 0, null, omitHeader: true);
@@ -187,44 +151,15 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result ?? new Dictionary<String, String>(keyComparer);
}
public Boolean EvaluateStepIf(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState)
{
var result = default(Boolean?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions, expressionState);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepIfResult, token, 0, null, omitHeader: true);
context.Errors.Check();
result = PipelineTemplateConverter.ConvertToIfResult(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result ?? throw new InvalidOperationException("Step if cannot be null");
}
public Dictionary<String, String> EvaluateStepInputs(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
DictionaryContextData contextData)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StepWith, token, 0, null, omitHeader: true);
@@ -244,14 +179,13 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public Int32 EvaluateStepTimeout(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
DictionaryContextData contextData)
{
var result = default(Int32?);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.NumberStepsContext, token, 0, null, omitHeader: true);
@@ -271,14 +205,13 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public JobContainer EvaluateJobContainer(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
DictionaryContextData contextData)
{
var result = default(JobContainer);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Container, token, 0, null, omitHeader: true);
@@ -296,90 +229,15 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
public Dictionary<String, String> EvaluateJobOutput(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.JobOutputs, token, 0, null, omitHeader: true);
context.Errors.Check();
result = new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
var mapping = token.AssertMapping("outputs");
foreach (var pair in mapping)
{
// Literal key
var key = pair.Key.AssertString("output key");
// Literal value
var value = pair.Value.AssertString("output value");
result[key.Value] = value.Value;
}
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public Dictionary<String, String> EvaluateJobDefaultsRun(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
{
var result = default(Dictionary<String, String>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.JobDefaultsRun, token, 0, null, omitHeader: true);
context.Errors.Check();
result = new Dictionary<String, String>(StringComparer.OrdinalIgnoreCase);
var mapping = token.AssertMapping("defaults run");
foreach (var pair in mapping)
{
// Literal key
var key = pair.Key.AssertString("defaults run key");
// Literal value
var value = pair.Value.AssertString("defaults run value");
result[key.Value] = value.Value;
}
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return result;
}
public IList<KeyValuePair<String, JobContainer>> EvaluateJobServiceContainers(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions)
DictionaryContextData contextData)
{
var result = default(List<KeyValuePair<String, JobContainer>>);
if (token != null && token.Type != TokenType.Null)
{
var context = CreateContext(contextData, expressionFunctions);
var context = CreateContext(contextData);
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.Services, token, 0, null, omitHeader: true);
@@ -397,10 +255,62 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return result;
}
private TemplateContext CreateContext(
public Boolean TryEvaluateStepDisplayName(
TemplateToken token,
DictionaryContextData contextData,
IList<IFunctionInfo> expressionFunctions,
IEnumerable<KeyValuePair<String, Object>> expressionState = null)
out String stepName)
{
stepName = default(String);
var context = CreateContext(contextData);
if (token != null && token.Type != TokenType.Null)
{
// We should only evaluate basic expressions if we are sure we have context on all the Named Values and functions
// Otherwise return and use a default name
if (token is BasicExpressionToken expressionToken)
{
ExpressionNode root = null;
try
{
root = new ExpressionParser().ValidateSyntax(expressionToken.Expression, null) as ExpressionNode;
}
catch (Exception exception)
{
context.Errors.Add(exception);
context.Errors.Check();
}
foreach (var node in root.Traverse())
{
if (node is NamedValue namedValue && !contextData.ContainsKey(namedValue.Name))
{
return false;
}
else if (node is Function function &&
!context.ExpressionFunctions.Any(item => String.Equals(item.Name, function.Name)) &&
!ExpressionConstants.WellKnownFunctions.ContainsKey(function.Name))
{
return false;
}
}
}
try
{
token = TemplateEvaluator.Evaluate(context, PipelineTemplateConstants.StringStepsContext, token, 0, null, omitHeader: true);
context.Errors.Check();
stepName = PipelineTemplateConverter.ConvertToStepDisplayName(context, token);
}
catch (Exception ex) when (!(ex is TemplateValidationException))
{
context.Errors.Add(ex);
}
context.Errors.Check();
}
return true;
}
private TemplateContext CreateContext(DictionaryContextData contextData)
{
var result = new TemplateContext
{
@@ -414,16 +324,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
TraceWriter = m_trace,
};
// Add the file table
if (m_fileTable?.Count > 0)
{
foreach (var file in m_fileTable)
{
result.GetFileId(file);
}
}
// Add named values
if (contextData != null)
{
foreach (var pair in contextData)
@@ -432,60 +332,25 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
}
}
// Add functions
var functionNames = new HashSet<String>(StringComparer.OrdinalIgnoreCase);
if (expressionFunctions?.Count > 0)
{
foreach (var function in expressionFunctions)
{
result.ExpressionFunctions.Add(function);
functionNames.Add(function.Name);
}
}
// Add missing expression values and expression functions.
// This solves the following problems:
// - Compat for new agent against old server (new contexts not sent down in job message)
// - Evaluating early when all referenced contexts are available, even though all allowed
// contexts may not yet be available. For example, evaluating step display name can often
// be performed early.
foreach (var name in s_expressionValueNames)
// Compat for new agent against old server
foreach (var name in s_contextNames)
{
if (!result.ExpressionValues.ContainsKey(name))
{
result.ExpressionValues[name] = null;
}
}
foreach (var name in s_expressionFunctionNames)
{
if (!functionNames.Contains(name))
{
result.ExpressionFunctions.Add(new FunctionInfo<NoOperation>(name, 0, Int32.MaxValue));
}
}
// Add state
if (expressionState != null)
{
foreach (var pair in expressionState)
{
result.State[pair.Key] = pair.Value;
}
}
return result;
}
private readonly ITraceWriter m_trace;
private readonly TemplateSchema m_schema;
private readonly IList<String> m_fileTable;
private readonly String[] s_expressionValueNames = new[]
private readonly String[] s_contextNames = new[]
{
PipelineTemplateConstants.GitHub,
PipelineTemplateConstants.Needs,
PipelineTemplateConstants.Strategy,
PipelineTemplateConstants.Matrix,
PipelineTemplateConstants.Needs,
PipelineTemplateConstants.Secrets,
PipelineTemplateConstants.Steps,
PipelineTemplateConstants.Inputs,
@@ -493,13 +358,5 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
PipelineTemplateConstants.Runner,
PipelineTemplateConstants.Env,
};
private readonly String[] s_expressionFunctionNames = new[]
{
PipelineTemplateConstants.Always,
PipelineTemplateConstants.Cancelled,
PipelineTemplateConstants.Failure,
PipelineTemplateConstants.HashFiles,
PipelineTemplateConstants.Success,
};
}
}

View File

@@ -2,35 +2,25 @@
using System.ComponentModel;
using System.IO;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using GitHub.DistributedTask.ObjectTemplating.Schema;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
[EditorBrowsable(EditorBrowsableState.Never)]
public static class PipelineTemplateSchemaFactory
public sealed class PipelineTemplateSchemaFactory
{
public static TemplateSchema GetSchema()
public TemplateSchema CreateSchema()
{
if (s_schema == null)
var assembly = Assembly.GetExecutingAssembly();
var json = default(String);
using (var stream = assembly.GetManifestResourceStream("GitHub.DistributedTask.Pipelines.ObjectTemplating.workflow-v1.0.json"))
using (var streamReader = new StreamReader(stream))
{
var assembly = Assembly.GetExecutingAssembly();
var json = default(String);
using (var stream = assembly.GetManifestResourceStream("GitHub.DistributedTask.Pipelines.ObjectTemplating.workflow-v1.0.json"))
using (var streamReader = new StreamReader(stream))
{
json = streamReader.ReadToEnd();
}
var objectReader = new JsonObjectReader(null, json);
var schema = TemplateSchema.Load(objectReader);
Interlocked.CompareExchange(ref s_schema, schema, null);
json = streamReader.ReadToEnd();
}
return s_schema;
var objectReader = new JsonObjectReader(null, json);
return TemplateSchema.Load(objectReader);
}
private static TemplateSchema s_schema;
}
}

View File

@@ -1,572 +0,0 @@
using System;
using System.Globalization;
using System.IO;
using System.Linq;
using GitHub.DistributedTask.ObjectTemplating;
using GitHub.DistributedTask.ObjectTemplating.Tokens;
using YamlDotNet.Core;
using YamlDotNet.Core.Events;
namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
{
/// <summary>
/// Converts a YAML file into a TemplateToken
/// </summary>
internal sealed class YamlObjectReader : IObjectReader
{
internal YamlObjectReader(
Int32? fileId,
TextReader input)
{
m_fileId = fileId;
m_parser = new Parser(input);
}
public Boolean AllowLiteral(out LiteralToken value)
{
if (EvaluateCurrent() is Scalar scalar)
{
// Tag specified
if (!String.IsNullOrEmpty(scalar.Tag))
{
// String tag
if (String.Equals(scalar.Tag, c_stringTag, StringComparison.Ordinal))
{
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
MoveNext();
return true;
}
// Not plain style
if (scalar.Style != ScalarStyle.Plain)
{
throw new NotSupportedException($"The scalar style '{scalar.Style}' on line {scalar.Start.Line} and column {scalar.Start.Column} is not valid with the tag '{scalar.Tag}'");
}
// Boolean, Float, Integer, or Null
switch (scalar.Tag)
{
case c_booleanTag:
value = ParseBoolean(scalar);
break;
case c_floatTag:
value = ParseFloat(scalar);
break;
case c_integerTag:
value = ParseInteger(scalar);
break;
case c_nullTag:
value = ParseNull(scalar);
break;
default:
throw new NotSupportedException($"Unexpected tag '{scalar.Tag}'");
}
MoveNext();
return true;
}
// Plain style, determine type using YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
if (scalar.Style == ScalarStyle.Plain)
{
if (MatchNull(scalar, out var nullToken))
{
value = nullToken;
}
else if (MatchBoolean(scalar, out var booleanToken))
{
value = booleanToken;
}
else if (MatchInteger(scalar, out var numberToken) ||
MatchFloat(scalar, out numberToken))
{
value = numberToken;
}
else
{
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
}
MoveNext();
return true;
}
// Otherwise assume string
value = new StringToken(m_fileId, scalar.Start.Line, scalar.Start.Column, scalar.Value);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowSequenceStart(out SequenceToken value)
{
if (EvaluateCurrent() is SequenceStart sequenceStart)
{
value = new SequenceToken(m_fileId, sequenceStart.Start.Line, sequenceStart.Start.Column);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowSequenceEnd()
{
if (EvaluateCurrent() is SequenceEnd)
{
MoveNext();
return true;
}
return false;
}
public Boolean AllowMappingStart(out MappingToken value)
{
if (EvaluateCurrent() is MappingStart mappingStart)
{
value = new MappingToken(m_fileId, mappingStart.Start.Line, mappingStart.Start.Column);
MoveNext();
return true;
}
value = default;
return false;
}
public Boolean AllowMappingEnd()
{
if (EvaluateCurrent() is MappingEnd)
{
MoveNext();
return true;
}
return false;
}
/// <summary>
/// Consumes the last parsing events, which are expected to be DocumentEnd and StreamEnd.
/// </summary>
public void ValidateEnd()
{
if (EvaluateCurrent() is DocumentEnd)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected document end parse event");
}
if (EvaluateCurrent() is StreamEnd)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected stream end parse event");
}
if (MoveNext())
{
throw new InvalidOperationException("Expected end of parse events");
}
}
/// <summary>
/// Consumes the first parsing events, which are expected to be StreamStart and DocumentStart.
/// </summary>
public void ValidateStart()
{
if (EvaluateCurrent() != null)
{
throw new InvalidOperationException("Unexpected parser state");
}
if (!MoveNext())
{
throw new InvalidOperationException("Expected a parse event");
}
if (EvaluateCurrent() is StreamStart)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected stream start parse event");
}
if (EvaluateCurrent() is DocumentStart)
{
MoveNext();
}
else
{
throw new InvalidOperationException("Expected document start parse event");
}
}
private ParsingEvent EvaluateCurrent()
{
if (m_current == null)
{
m_current = m_parser.Current;
if (m_current != null)
{
if (m_current is Scalar scalar)
{
// Verify not using achors
if (scalar.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{scalar.Anchor}'");
}
}
else if (m_current is MappingStart mappingStart)
{
// Verify not using achors
if (mappingStart.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{mappingStart.Anchor}'");
}
}
else if (m_current is SequenceStart sequenceStart)
{
// Verify not using achors
if (sequenceStart.Anchor != null)
{
throw new InvalidOperationException($"Anchors are not currently supported. Remove the anchor '{sequenceStart.Anchor}'");
}
}
else if (!(m_current is MappingEnd) &&
!(m_current is SequenceEnd) &&
!(m_current is DocumentStart) &&
!(m_current is DocumentEnd) &&
!(m_current is StreamStart) &&
!(m_current is StreamEnd))
{
throw new InvalidOperationException($"Unexpected parsing event type: {m_current.GetType().Name}");
}
}
}
return m_current;
}
private Boolean MoveNext()
{
m_current = null;
return m_parser.MoveNext();
}
private BooleanToken ParseBoolean(Scalar scalar)
{
if (MatchBoolean(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_booleanTag); // throws
return default;
}
private NumberToken ParseFloat(Scalar scalar)
{
if (MatchFloat(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_floatTag); // throws
return default;
}
private NumberToken ParseInteger(Scalar scalar)
{
if (MatchInteger(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_integerTag); // throws
return default;
}
private NullToken ParseNull(Scalar scalar)
{
if (MatchNull(scalar, out var token))
{
return token;
}
ThrowInvalidValue(scalar, c_nullTag); // throws
return default;
}
private Boolean MatchBoolean(
Scalar scalar,
out BooleanToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
switch (scalar.Value ?? String.Empty)
{
case "true":
case "True":
case "TRUE":
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, true);
return true;
case "false":
case "False":
case "FALSE":
value = new BooleanToken(m_fileId, scalar.Start.Line, scalar.Start.Column, false);
return true;
}
value = default;
return false;
}
private Boolean MatchFloat(
Scalar scalar,
out NumberToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
var str = scalar.Value;
if (!String.IsNullOrEmpty(str))
{
// Check for [-+]?(\.inf|\.Inf|\.INF)|\.nan|\.NaN|\.NAN
switch (str)
{
case ".inf":
case ".Inf":
case ".INF":
case "+.inf":
case "+.Inf":
case "+.INF":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.PositiveInfinity);
return true;
case "-.inf":
case "-.Inf":
case "-.INF":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NegativeInfinity);
return true;
case ".nan":
case ".NaN":
case ".NAN":
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, Double.NaN);
return true;
}
// Otherwise check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)([eE][-+]?[0-9]+)?
// Skip leading sign
var index = str[0] == '-' || str[0] == '+' ? 1 : 0;
// Check for integer portion
var length = str.Length;
var hasInteger = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasInteger = true;
index++;
}
// Check for decimal point
var hasDot = false;
if (index < length && str[index] == '.')
{
hasDot = true;
index++;
}
// Check for decimal portion
var hasDecimal = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasDecimal = true;
index++;
}
// Check [-+]?(\.[0-9]+|[0-9]+(\.[0-9]*)?)
if ((hasDot && hasDecimal) || hasInteger)
{
// Check for end
if (index == length)
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
else
{
ThrowInvalidValue(scalar, c_floatTag); // throws
}
}
// Check [eE][-+]?[0-9]
else if (index < length && (str[index] == 'e' || str[index] == 'E'))
{
index++;
// Skip sign
if (index < length && (str[index] == '-' || str[index] == '+'))
{
index++;
}
// Check for exponent
var hasExponent = false;
while (index < length && str[index] >= '0' && str[index] <= '9')
{
hasExponent = true;
index++;
}
// Check for end
if (hasExponent && index == length)
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, (Double)doubleValue);
return true;
}
// Otherwise exceeds range
else
{
ThrowInvalidValue(scalar, c_floatTag); // throws
}
}
}
}
}
value = default;
return false;
}
private Boolean MatchInteger(
Scalar scalar,
out NumberToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
var str = scalar.Value;
if (!String.IsNullOrEmpty(str))
{
// Check for [0-9]+
var firstChar = str[0];
if (firstChar >= '0' && firstChar <= '9' &&
str.Skip(1).All(x => x >= '0' && x <= '9'))
{
// Try parse
if (Double.TryParse(str, NumberStyles.None, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for (-|+)[0-9]+
else if ((firstChar == '-' || firstChar == '+') &&
str.Length > 1 &&
str.Skip(1).All(x => x >= '0' && x <= '9'))
{
// Try parse
if (Double.TryParse(str, NumberStyles.AllowLeadingSign, CultureInfo.InvariantCulture, out var doubleValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, doubleValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for 0x[0-9a-fA-F]+
else if (firstChar == '0' &&
str.Length > 2 &&
str[1] == 'x' &&
str.Skip(2).All(x => (x >= '0' && x <= '9') || (x >= 'a' && x <= 'f') || (x >= 'A' && x <= 'F')))
{
// Try parse
if (Int32.TryParse(str.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out var integerValue))
{
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
return true;
}
// Otherwise exceeds range
ThrowInvalidValue(scalar, c_integerTag); // throws
}
// Check for 0o[0-9]+
else if (firstChar == '0' &&
str.Length > 2 &&
str[1] == 'o' &&
str.Skip(2).All(x => x >= '0' && x <= '7'))
{
// Try parse
var integerValue = default(Int32);
try
{
integerValue = Convert.ToInt32(str.Substring(2), 8);
}
// Otherwise exceeds range
catch (Exception)
{
ThrowInvalidValue(scalar, c_integerTag); // throws
}
value = new NumberToken(m_fileId, scalar.Start.Line, scalar.Start.Column, integerValue);
return true;
}
}
value = default;
return false;
}
private Boolean MatchNull(
Scalar scalar,
out NullToken value)
{
// YAML 1.2 "core" schema https://yaml.org/spec/1.2/spec.html#id2804923
switch (scalar.Value ?? String.Empty)
{
case "":
case "null":
case "Null":
case "NULL":
case "~":
value = new NullToken(m_fileId, scalar.Start.Line, scalar.Start.Column);
return true;
}
value = default;
return false;
}
private void ThrowInvalidValue(
Scalar scalar,
String tag)
{
throw new NotSupportedException($"The value '{scalar.Value}' on line {scalar.Start.Line} and column {scalar.Start.Column} is invalid for the type '{scalar.Tag}'");
}
private const String c_booleanTag = "tag:yaml.org,2002:bool";
private const String c_floatTag = "tag:yaml.org,2002:float";
private const String c_integerTag = "tag:yaml.org,2002:int";
private const String c_nullTag = "tag:yaml.org,2002:null";
private const String c_stringTag = "tag:yaml.org,2002:string";
private readonly Int32? m_fileId;
private readonly Parser m_parser;
private ParsingEvent m_current;
}
}

View File

@@ -9,7 +9,6 @@
"properties": {
"on": "any",
"name": "string",
"defaults": "workflow-defaults",
"env": "workflow-env",
"jobs": "jobs"
}
@@ -38,7 +37,6 @@
"steps-scope-input-value": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
@@ -66,7 +64,6 @@
"steps-scope-output-value": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
@@ -91,7 +88,6 @@
"description": "Default input values for a steps template",
"context": [
"github",
"needs",
"strategy",
"matrix"
],
@@ -114,7 +110,6 @@
"description": "Output values for a steps template",
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
@@ -126,23 +121,6 @@
"string": {}
},
"workflow-defaults": {
"mapping": {
"properties": {
"run": "workflow-defaults-run"
}
}
},
"workflow-defaults-run": {
"mapping": {
"properties": {
"shell": "non-empty-string",
"working-directory": "non-empty-string"
}
}
},
"workflow-env": {
"context": [
"github",
@@ -165,21 +143,16 @@
"mapping": {
"properties": {
"needs": "needs",
"if": "job-if",
"if": "string",
"strategy": "strategy",
"name": "string-strategy-context",
"runs-on": {
"type": "runs-on",
"required": true
},
"runs-on": "runs-on",
"timeout-minutes": "number-strategy-context",
"cancel-timeout-minutes": "number-strategy-context",
"continue-on-error": "boolean-strategy-context",
"continue-on-error": "boolean",
"container": "container",
"services": "services",
"env": "job-env",
"outputs": "job-outputs",
"defaults": "job-defaults",
"steps": "steps"
}
}
@@ -192,41 +165,9 @@
]
},
"job-if": {
"context": [
"github",
"needs",
"always(0,0)",
"failure(0,MAX)",
"cancelled(0,0)",
"success(0,MAX)"
],
"string": {}
},
"job-if-result": {
"context": [
"github",
"needs",
"always(0,0)",
"failure(0,MAX)",
"cancelled(0,0)",
"success(0,MAX)"
],
"one-of": [
"null",
"boolean",
"number",
"string",
"sequence",
"mapping"
]
},
"strategy": {
"context": [
"github",
"needs"
"github"
],
"mapping": {
"properties": {
@@ -291,24 +232,25 @@
"runs-on": {
"context": [
"github",
"needs",
"strategy",
"matrix"
],
"one-of": [
"non-empty-string",
"sequence-of-non-empty-string",
"runs-on-string",
"runs-on-mapping"
]
},
"runs-on-string": {
"string": {
"require-non-empty": true
}
},
"runs-on-mapping": {
"mapping": {
"properties": {
"pool": {
"type": "non-empty-string",
"required": true
}
"pool": "non-empty-string"
}
}
},
@@ -316,10 +258,9 @@
"job-env": {
"context": [
"github",
"needs",
"secrets",
"strategy",
"matrix",
"secrets"
"matrix"
],
"mapping": {
"loose-key-type": "non-empty-string",
@@ -327,37 +268,6 @@
}
},
"job-defaults": {
"mapping": {
"properties": {
"run": "job-defaults-run"
}
}
},
"job-defaults-run": {
"context": [
"github",
"strategy",
"matrix",
"needs",
"env"
],
"mapping": {
"properties": {
"shell": "non-empty-string",
"working-directory": "non-empty-string"
}
}
},
"job-outputs": {
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "string-runner-context"
}
},
"steps": {
"sequence": {
"item-type": "steps-item"
@@ -391,12 +301,9 @@
"properties": {
"name": "string-steps-context",
"id": "non-empty-string",
"if": "step-if",
"if": "string",
"timeout-minutes": "number-steps-context",
"run": {
"type": "string-steps-context",
"required": true
},
"run": "string-steps-context",
"continue-on-error": "boolean-steps-context",
"env": "step-env",
"working-directory": "string-steps-context",
@@ -410,12 +317,9 @@
"properties": {
"name": "string-steps-context-in-template",
"id": "non-empty-string",
"if": "step-if-in-template",
"if": "string",
"timeout-minutes": "number-steps-context-in-template",
"run": {
"type": "string-steps-context-in-template",
"required": true
},
"run": "string-steps-context-in-template",
"continue-on-error": "boolean-steps-context-in-template",
"env": "step-env-in-template",
"working-directory": "string-steps-context-in-template",
@@ -429,13 +333,10 @@
"properties": {
"name": "string-steps-context",
"id": "non-empty-string",
"if": "step-if",
"if": "string",
"continue-on-error": "boolean-steps-context",
"timeout-minutes": "number-steps-context",
"uses": {
"type": "non-empty-string",
"required": true
},
"uses": "non-empty-string",
"with": "step-with",
"env": "step-env"
}
@@ -447,109 +348,16 @@
"properties": {
"name": "string-steps-context-in-template",
"id": "non-empty-string",
"if": "step-if-in-template",
"if": "string",
"continue-on-error": "boolean-steps-context-in-template",
"timeout-minutes": "number-steps-context-in-template",
"uses": {
"type": "non-empty-string",
"required": true
},
"uses": "non-empty-string",
"with": "step-with-in-template",
"env": "step-env-in-template"
}
}
},
"step-if": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"steps",
"job",
"runner",
"env",
"always(0,0)",
"failure(0,0)",
"cancelled(0,0)",
"success(0,0)",
"hashFiles(1,255)"
],
"string": {}
},
"step-if-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"steps",
"inputs",
"job",
"runner",
"env",
"always(0,0)",
"failure(0,0)",
"cancelled(0,0)",
"success(0,0)",
"hashFiles(1,255)"
],
"string": {}
},
"step-if-result": {
"context": [
"github",
"strategy",
"matrix",
"steps",
"job",
"runner",
"env",
"always(0,0)",
"failure(0,0)",
"cancelled(0,0)",
"success(0,0)",
"hashFiles(1,255)"
],
"one-of": [
"null",
"boolean",
"number",
"string",
"sequence",
"mapping"
]
},
"step-if-result-in-template": {
"context": [
"github",
"strategy",
"matrix",
"steps",
"inputs",
"job",
"runner",
"env",
"always(0,0)",
"failure(0,0)",
"cancelled(0,0)",
"success(0,0)",
"hashFiles(1,255)"
],
"one-of": [
"null",
"boolean",
"number",
"string",
"sequence",
"mapping"
]
},
"steps-template-reference": {
"mapping": {
"properties": {
@@ -573,7 +381,6 @@
"steps-template-reference-inputs": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
@@ -591,7 +398,6 @@
"steps-template-reference-inputs-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
@@ -610,15 +416,13 @@
"step-env": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"job",
"runner",
"env",
"hashFiles(1,255)"
"env"
],
"mapping": {
"loose-key-type": "non-empty-string",
@@ -629,7 +433,6 @@
"step-env-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
@@ -637,8 +440,7 @@
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
"env"
],
"mapping": {
"loose-key-type": "non-empty-string",
@@ -649,35 +451,13 @@
"step-with": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "string"
}
},
"step-with-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
"env"
],
"mapping": {
"loose-key-type": "non-empty-string",
@@ -688,7 +468,6 @@
"container": {
"context": [
"github",
"needs",
"strategy",
"matrix"
],
@@ -713,7 +492,6 @@
"services": {
"context": [
"github",
"needs",
"strategy",
"matrix"
],
@@ -726,7 +504,6 @@
"services-container": {
"context": [
"github",
"needs",
"strategy",
"matrix"
],
@@ -739,7 +516,25 @@
"container-env": {
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "string-runner-context"
"loose-value-type": "string"
}
},
"step-with-in-template": {
"context": [
"github",
"strategy",
"matrix",
"secrets",
"steps",
"inputs",
"job",
"runner",
"env"
],
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "string"
}
},
@@ -755,20 +550,9 @@
}
},
"boolean-strategy-context": {
"context": [
"github",
"needs",
"strategy",
"matrix"
],
"boolean": {}
},
"number-strategy-context": {
"context": [
"github",
"needs",
"strategy",
"matrix"
],
@@ -778,7 +562,6 @@
"string-strategy-context": {
"context": [
"github",
"needs",
"strategy",
"matrix"
],
@@ -788,15 +571,13 @@
"boolean-steps-context": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"job",
"runner",
"env",
"hashFiles(1,255)"
"env"
],
"boolean": {}
},
@@ -804,7 +585,6 @@
"boolean-steps-context-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
@@ -812,8 +592,7 @@
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
"env"
],
"boolean": {}
},
@@ -821,15 +600,13 @@
"number-steps-context": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"job",
"runner",
"env",
"hashFiles(1,255)"
"env"
],
"number": {}
},
@@ -837,7 +614,6 @@
"number-steps-context-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
@@ -845,16 +621,14 @@
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
"env"
],
"number": {}
},
"string-runner-context": {
"string-steps-context": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
@@ -866,26 +640,9 @@
"string": {}
},
"string-steps-context": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
"steps",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"string": {}
},
"string-steps-context-in-template": {
"context": [
"github",
"needs",
"strategy",
"matrix",
"secrets",
@@ -893,8 +650,7 @@
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
"env"
],
"string": {}
}

View File

@@ -1,59 +0,0 @@
using System.Runtime.Serialization;
using Newtonsoft.Json;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public class AgentLabel
{
[JsonConstructor]
public AgentLabel()
{
}
public AgentLabel(string name)
{
this.Name = name;
this.Type = LabelType.System;
}
public AgentLabel(string name, LabelType type)
{
this.Name = name;
this.Type = type;
}
private AgentLabel(AgentLabel labelToBeCloned)
{
this.Id = labelToBeCloned.Id;
this.Name = labelToBeCloned.Name;
this.Type = labelToBeCloned.Type;
}
[DataMember]
public int Id
{
get;
set;
}
[DataMember]
public string Name
{
get;
set;
}
[DataMember]
public LabelType Type
{
get;
set;
}
public AgentLabel Clone()
{
return new AgentLabel(this);
}
}
}

View File

@@ -31,7 +31,7 @@ namespace GitHub.DistributedTask.WebApi
}
protected JobEvent(
String name,
String name,
Guid jobId)
{
this.Name = name;
@@ -123,12 +123,11 @@ namespace GitHub.DistributedTask.WebApi
Int64 requestId,
Guid jobId,
TaskResult result,
Dictionary<String, VariableValue> outputs)
IDictionary<String, VariableValue> outputVariables)
: base(JobEventTypes.JobCompleted, jobId)
{
this.RequestId = requestId;
this.Result = result;
this.Outputs = outputs;
}
[DataMember(EmitDefaultValue = false)]
@@ -144,13 +143,6 @@ namespace GitHub.DistributedTask.WebApi
get;
set;
}
[DataMember(EmitDefaultValue = false)]
public IDictionary<String, VariableValue> Outputs
{
get;
set;
}
}
[DataContract]
@@ -161,9 +153,9 @@ namespace GitHub.DistributedTask.WebApi
}
protected TaskEvent(
string name,
Guid jobId,
Guid taskId)
string name,
Guid jobId,
Guid taskId)
: base(name, jobId)
{
TaskId = taskId;
@@ -193,9 +185,9 @@ namespace GitHub.DistributedTask.WebApi
}
public override Object ReadJson(
JsonReader reader,
Type objectType,
Object existingValue,
JsonReader reader,
Type objectType,
Object existingValue,
JsonSerializer serializer)
{
var eventObject = JObject.Load(reader);

View File

@@ -1,14 +0,0 @@
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public enum LabelType
{
[EnumMember]
System = 0,
[EnumMember]
User = 1
}
}

View File

@@ -51,7 +51,7 @@ namespace GitHub.DistributedTask.WebApi
if (agentToBeCloned.m_labels != null && agentToBeCloned.m_labels.Count > 0)
{
m_labels = new HashSet<AgentLabel>(agentToBeCloned.m_labels);
m_labels = new HashSet<string>(agentToBeCloned.m_labels, StringComparer.OrdinalIgnoreCase);
}
}
@@ -118,13 +118,13 @@ namespace GitHub.DistributedTask.WebApi
/// <summary>
/// The labels of the runner
/// </summary>
public ISet<AgentLabel> Labels
public ISet<string> Labels
{
get
{
if (m_labels == null)
{
m_labels = new HashSet<AgentLabel>();
m_labels = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
}
return m_labels;
}
@@ -164,6 +164,6 @@ namespace GitHub.DistributedTask.WebApi
private PropertiesCollection m_properties;
[DataMember(IsRequired = false, EmitDefaultValue = false, Name = "Labels")]
private HashSet<AgentLabel> m_labels;
private HashSet<string> m_labels;
}
}

View File

@@ -95,7 +95,6 @@ namespace GitHub.DistributedTask.WebApi
Int64 requestId,
Guid lockToken,
DateTime? expiresOn = null,
string orchestrationId = null,
Object userState = null,
CancellationToken cancellationToken = default(CancellationToken))
{
@@ -105,30 +104,7 @@ namespace GitHub.DistributedTask.WebApi
LockedUntil = expiresOn,
};
var additionalHeaders = new Dictionary<string, string>();
if (!string.IsNullOrEmpty(orchestrationId))
{
additionalHeaders["X-VSS-OrchestrationId"] = orchestrationId;
}
HttpMethod httpMethod = new HttpMethod("PATCH");
Guid locationId = new Guid("fc825784-c92a-4299-9221-998a02d1b54f");
object routeValues = new { poolId = poolId, requestId = requestId };
HttpContent content = new ObjectContent<TaskAgentJobRequest>(request, new VssJsonMediaTypeFormatter(true));
List<KeyValuePair<string, string>> queryParams = new List<KeyValuePair<string, string>>();
queryParams.Add("lockToken", lockToken.ToString());
return SendAsync<TaskAgentJobRequest>(
httpMethod,
additionalHeaders,
locationId,
routeValues: routeValues,
version: new ApiResourceVersion(5.1, 1),
queryParameters: queryParams,
userState: userState,
cancellationToken: cancellationToken,
content: content);
return UpdateAgentRequestAsync(poolId, requestId, lockToken, request, userState, cancellationToken);
}
public Task<TaskAgent> ReplaceAgentAsync(
@@ -195,5 +171,5 @@ namespace GitHub.DistributedTask.WebApi
}
private readonly ApiResourceVersion m_currentApiVersion = new ApiResourceVersion(3.0, 1);
}
}
}

View File

@@ -260,8 +260,5 @@ namespace GitHub.DistributedTask.WebApi
public static readonly Guid CheckpointResourcesLocationId = new Guid(CheckpointResourcesLocationIdString);
public const String CheckpointResourcesResource = "references";
public static readonly Guid RunnerAuthUrl = new Guid("{A82A119C-1E46-44B6-8D75-C82A79CF975B}");
public const string RunnerAuthUrlResource = "authurl";
}
}

View File

@@ -189,5 +189,11 @@ namespace GitHub.Services.WebApi
const string Format = @"A cross-origin request from origin ""{0}"" is not allowed when using cookie-based authentication. An authentication token needs to be provided in the Authorization header of the request.";
return string.Format(CultureInfo.CurrentCulture, Format, arg0);
}
public static string UnknownEntityType(object arg0)
{
const string Format = @"Unknown entityType {0}. Cannot parse.";
return string.Format(CultureInfo.CurrentCulture, Format, arg0);
}
}
}

Some files were not shown because too many files have changed in this diff Show More