Compare commits

..

11 Commits

Author SHA1 Message Date
TingluoHuang
c8f3726265 c 2020-10-19 14:46:46 -04:00
TingluoHuang
ef72239ff8 c 2020-09-12 23:40:47 -04:00
TingluoHuang
993357df7d 2 versions 2020-09-11 13:46:31 -04:00
TingluoHuang
c62ab23bdd new script 2020-09-11 00:42:28 -04:00
TingluoHuang
9d48d2be87 new scripe 2020-09-11 00:41:58 -04:00
TingluoHuang
69aa8d8984 fix node 2020-09-03 17:24:59 -04:00
TingluoHuang
7da6739eae dind 2020-09-03 01:12:46 -04:00
TingluoHuang
58afa42109 enterprise 2020-09-01 00:34:58 -04:00
TingluoHuang
3dc52b28af update dockerfile 2020-08-30 00:29:28 -04:00
TingluoHuang
993edc3172 config via pat. 2020-08-29 00:21:17 -04:00
TingluoHuang
6395efe7e0 k8s prototype. 2020-08-14 11:20:12 -04:00
74 changed files with 1582 additions and 1597 deletions

View File

@@ -18,28 +18,12 @@ jobs:
build:
strategy:
matrix:
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, osx-x64 ]
runtime: [ linux-x64 ]
include:
- runtime: linux-x64
os: ubuntu-latest
devScript: ./dev.sh
- runtime: linux-arm64
os: ubuntu-latest
devScript: ./dev.sh
- runtime: linux-arm
os: ubuntu-latest
devScript: ./dev.sh
- runtime: osx-x64
os: macOS-latest
devScript: ./dev.sh
- runtime: win-x64
os: windows-latest
devScript: ./dev
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v1
@@ -50,13 +34,6 @@ jobs:
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
working-directory: src
# Run tests
- name: L0
run: |
${{ matrix.devScript }} test
working-directory: src
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm'
# Create runner package tar.gz/zip
- name: Package Release
if: github.event_name != 'pull_request'
@@ -71,3 +48,18 @@ jobs:
with:
name: runner-package-${{ matrix.runtime }}
path: _package
- name: Build old version
run: |
echo 2.270.0 > runnerversion
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
${{ matrix.devScript }} package Release
working-directory: src
# Upload runner package tar.gz/zip as artifact
- name: Publish Artifact old
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@v1
with:
name: runner-package-${{ matrix.runtime }}-old
path: _package

View File

@@ -1,7 +1,6 @@
name: Runner CD
on:
workflow_dispatch:
push:
paths:
- releaseVersion

57
Dockerfile Normal file
View File

@@ -0,0 +1,57 @@
FROM mcr.microsoft.com/dotnet/core/runtime-deps:3.1-buster-slim
ENV RUNNER_CONFIG_URL=""
ENV GITHUB_PAT=""
ENV RUNNER_NAME=""
ENV RUNNER_GROUP=""
ENV RUNNER_LABELS=""
# ENV GITHUB_RUNNER_SCOPE=""
# ENV GITHUB_SERVER_URL=""
# ENV GITHUB_API_URL=""
# ENV K8S_HOST_IP=""
RUN apt-get update --fix-missing \
&& apt-get install -y --no-install-recommends \
curl \
jq \
apt-utils \
apt-transport-https \
unzip \
net-tools\
gnupg2\
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# Install kubectl
RUN curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add - && \
echo "deb https://apt.kubernetes.io/ kubernetes-xenial main" | tee -a /etc/apt/sources.list.d/kubernetes.list && \
apt-get update && apt-get -y install --no-install-recommends kubectl
# Install docker
RUN curl -fsSL https://get.docker.com -o get-docker.sh
RUN sh get-docker.sh
# Allow runner to run as root
ENV RUNNER_ALLOW_RUNASROOT=1
# Directory for runner to operate in
RUN mkdir /actions-runner
WORKDIR /actions-runner
COPY ./src/Misc/download-runner.sh /actions-runner/download-runner.sh
COPY ./src/Misc/entrypoint.sh /actions-runner/entrypoint.sh
# COPY ./src/Misc/jobstart.sh /actions-runner/jobstart.sh
# COPY ./src/Misc/jobrunning.sh /actions-runner/jobrunning.sh
# COPY ./src/Misc/jobcomplete.sh /actions-runner/jobcomplete.sh
COPY ./src/Misc/runner_lifecycle.sh /actions-runner/runner_lifecycle.sh
RUN /actions-runner/download-runner.sh
RUN rm -f /actions-runner/download-runner.sh
# ENV _INTERNAL_JOBSTART_NOTIFICATION=/actions-runner/jobstart.sh
# ENV _INTERNAL_JOBRUNNING_NOTIFICATION=/actions-runner/jobrunning.sh
# ENV _INTERNAL_JOBCOMPLETE_NOTIFICATION=/actions-runner/jobcomplete.sh
ENV _INTERNAL_RUNNER_LIFECYCLE_NOTIFICATION=/actions-runner/runner_lifecycle.sh
ENTRYPOINT ["./entrypoint.sh"]

46
Dockerfile.dind Normal file
View File

@@ -0,0 +1,46 @@
FROM docker:19.03
# https://github.com/docker/docker/blob/master/project/PACKAGERS.md#runtime-dependencies
RUN set -eux; \
apk add --no-cache \
btrfs-progs \
e2fsprogs \
e2fsprogs-extra \
iptables \
openssl \
shadow-uidmap \
xfsprogs \
xz \
# pigz: https://github.com/moby/moby/pull/35697 (faster gzip implementation)
pigz \
; \
# only install zfs if it's available for the current architecture
# https://git.alpinelinux.org/cgit/aports/tree/main/zfs/APKBUILD?h=3.6-stable#n9 ("all !armhf !ppc64le" as of 2017-11-01)
# "apk info XYZ" exits with a zero exit code but no output when the package exists but not for this arch
if zfs="$(apk info --no-cache --quiet zfs)" && [ -n "$zfs" ]; then \
apk add --no-cache zfs; \
fi
# TODO aufs-tools
# set up subuid/subgid so that "--userns-remap=default" works out-of-the-box
RUN set -x \
&& addgroup -S dockremap \
&& adduser -S -G dockremap dockremap \
&& echo 'dockremap:165536:65536' >> /etc/subuid \
&& echo 'dockremap:165536:65536' >> /etc/subgid
# https://github.com/docker/docker/tree/master/hack/dind
ENV DIND_COMMIT ed89041433a031cafc0a0f19cfe573c31688d377
RUN set -eux; \
wget -O /usr/local/bin/dind "https://raw.githubusercontent.com/docker/docker/${DIND_COMMIT}/hack/dind"; \
chmod +x /usr/local/bin/dind
COPY dockerd-entrypoint.sh /usr/local/bin/
VOLUME /var/lib/docker
EXPOSE 6788 6789
ENTRYPOINT ["dockerd-entrypoint.sh"]
CMD []

View File

@@ -6,7 +6,7 @@
[![Actions Status](https://github.com/actions/runner/workflows/Runner%20CI/badge.svg)](https://github.com/actions/runner/actions)
The runner is the application that runs a job from a GitHub Actions workflow. It is used by GitHub Actions in the [hosted virtual environments](https://github.com/actions/virtual-environments), or you can [self-host the runner](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners) in your own environment.
The runner is the application that runs a job from a GitHub Actions workflow. The runner can run on the [hosted machine pools](https://github.com/actions/virtual-environments) or run on [self-hosted environments](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/about-self-hosted-runners).
## Get Started

14
autoscalev0.yaml Normal file
View File

@@ -0,0 +1,14 @@
apiVersion: actions.summerwind.dev/v1alpha1
kind: RunnerDeployment
metadata:
name: auto-scale-runners
spec:
replicas: 1
maxRunnerLimit: 5
template:
spec:
configURL: https://github.com/bbq-beets/ting-test
githubTokenSecretKeyRef:
name: githubtoken
key: GITHUB_PAT

63
deployment.yaml Normal file
View File

@@ -0,0 +1,63 @@
apiVersion: v1
kind: Pod
metadata:
name: runner-pod
labels:
name: runner-pod
spec:
containers:
- name: runner-pod
image: huangtingluo/autoscale-runner:v0.0
imagePullPolicy: Always
env:
- name: GITHUB_PAT
value: 62c13e14e947958516c103a9584f66227697c447
- name: GITHUB_RUNNER_SCOPE
value: monalisa/main123
- name: K8S_HOST_IP
value: "192.168.120.1"
# apiVersion: apps/v1
# kind: Deployment
# metadata:
# name: runner-deployment
# spec:
# replicas: 1
# selector:
# matchLabels:
# app: runners
# template:
# metadata:
# labels:
# app: runners
# spec:
# # hostNetwork: true
# # volumes:
# # - name: docker-storage
# # emptyDir: {}
# # containers:
# # - name: docker-host
# # image: docker:18.05-dind
# # imagePullPolicy: Always
# # securityContext:
# # privileged: true
# # volumeMounts:
# # - name: docker-storage
# # mountPath: /var/lib/docker
# # hostNetwork: true
# containers:
# - name: runner
# image: huangtingluo/autoscale-runner:v0.0
# imagePullPolicy: Always
# env:
# - name: GITHUB_PAT
# value: 62c13e14e947958516c103a9584f66227697c447
# - name: GITHUB_RUNNER_SCOPE
# value: monalisa/main123
# - name: K8S_HOST_IP
# value: "192.168.120.1"
# resources:
# limits:
# memory: "128Mi"
# cpu: "500m"

186
dockerd-entrypoint.sh Executable file
View File

@@ -0,0 +1,186 @@
#!/bin/sh
set -eu
_tls_ensure_private() {
local f="$1"; shift
[ -s "$f" ] || openssl genrsa -out "$f" 4096
}
_tls_san() {
{
ip -oneline address | awk '{ gsub(/\/.+$/, "", $4); print "IP:" $4 }'
{
cat /etc/hostname
echo 'docker'
echo 'localhost'
hostname -f
hostname -s
} | sed 's/^/DNS:/'
[ -z "${DOCKER_TLS_SAN:-}" ] || echo "$DOCKER_TLS_SAN"
} | sort -u | xargs printf '%s,' | sed "s/,\$//"
}
_tls_generate_certs() {
local dir="$1"; shift
# if ca/key.pem || !ca/cert.pem, generate CA public if necessary
# if ca/key.pem, generate server public
# if ca/key.pem, generate client public
# (regenerating public certs every startup to account for SAN/IP changes and/or expiration)
# https://github.com/FiloSottile/mkcert/issues/174
local certValidDays='825'
if [ -s "$dir/ca/key.pem" ] || [ ! -s "$dir/ca/cert.pem" ]; then
# if we either have a CA private key or do *not* have a CA public key, then we should create/manage the CA
mkdir -p "$dir/ca"
_tls_ensure_private "$dir/ca/key.pem"
openssl req -new -key "$dir/ca/key.pem" \
-out "$dir/ca/cert.pem" \
-subj '/CN=docker:dind CA' -x509 -days "$certValidDays"
fi
if [ -s "$dir/ca/key.pem" ]; then
# if we have a CA private key, we should create/manage a server key
mkdir -p "$dir/server"
_tls_ensure_private "$dir/server/key.pem"
openssl req -new -key "$dir/server/key.pem" \
-out "$dir/server/csr.pem" \
-subj '/CN=docker:dind server'
cat > "$dir/server/openssl.cnf" <<-EOF
[ x509_exts ]
subjectAltName = $(_tls_san)
EOF
openssl x509 -req \
-in "$dir/server/csr.pem" \
-CA "$dir/ca/cert.pem" \
-CAkey "$dir/ca/key.pem" \
-CAcreateserial \
-out "$dir/server/cert.pem" \
-days "$certValidDays" \
-extfile "$dir/server/openssl.cnf" \
-extensions x509_exts
cp "$dir/ca/cert.pem" "$dir/server/ca.pem"
openssl verify -CAfile "$dir/server/ca.pem" "$dir/server/cert.pem"
fi
if [ -s "$dir/ca/key.pem" ]; then
# if we have a CA private key, we should create/manage a client key
mkdir -p "$dir/client"
_tls_ensure_private "$dir/client/key.pem"
chmod 0644 "$dir/client/key.pem" # openssl defaults to 0600 for the private key, but this one needs to be shared with arbitrary client contexts
openssl req -new \
-key "$dir/client/key.pem" \
-out "$dir/client/csr.pem" \
-subj '/CN=docker:dind client'
cat > "$dir/client/openssl.cnf" <<-'EOF'
[ x509_exts ]
extendedKeyUsage = clientAuth
EOF
openssl x509 -req \
-in "$dir/client/csr.pem" \
-CA "$dir/ca/cert.pem" \
-CAkey "$dir/ca/key.pem" \
-CAcreateserial \
-out "$dir/client/cert.pem" \
-days "$certValidDays" \
-extfile "$dir/client/openssl.cnf" \
-extensions x509_exts
cp "$dir/ca/cert.pem" "$dir/client/ca.pem"
openssl verify -CAfile "$dir/client/ca.pem" "$dir/client/cert.pem"
fi
}
# no arguments passed
# or first arg is `-f` or `--some-option`
if [ "$#" -eq 0 ] || [ "${1#-}" != "$1" ]; then
# set "dockerSocket" to the default "--host" *unix socket* value (for both standard or rootless)
uid="$(id -u)"
if [ "$uid" = '0' ]; then
dockerSocket='unix:///var/run/docker.sock'
else
# if we're not root, we must be trying to run rootless
: "${XDG_RUNTIME_DIR:=/run/user/$uid}"
dockerSocket="unix://$XDG_RUNTIME_DIR/docker.sock"
fi
case "${DOCKER_HOST:-}" in
unix://*)
dockerSocket="$DOCKER_HOST"
;;
esac
# add our default arguments
if [ -n "${DOCKER_TLS_CERTDIR:-}" ] \
&& _tls_generate_certs "$DOCKER_TLS_CERTDIR" \
&& [ -s "$DOCKER_TLS_CERTDIR/server/ca.pem" ] \
&& [ -s "$DOCKER_TLS_CERTDIR/server/cert.pem" ] \
&& [ -s "$DOCKER_TLS_CERTDIR/server/key.pem" ] \
; then
# generate certs and use TLS if requested/possible (default in 19.03+)
set -- dockerd \
--host="$dockerSocket" \
--host=tcp://0.0.0.0:6789 \
--tlsverify \
--tlscacert "$DOCKER_TLS_CERTDIR/server/ca.pem" \
--tlscert "$DOCKER_TLS_CERTDIR/server/cert.pem" \
--tlskey "$DOCKER_TLS_CERTDIR/server/key.pem" \
"$@"
DOCKERD_ROOTLESS_ROOTLESSKIT_FLAGS="${DOCKERD_ROOTLESS_ROOTLESSKIT_FLAGS:-} -p 0.0.0.0:6789:6789/tcp"
else
# TLS disabled (-e DOCKER_TLS_CERTDIR='') or missing certs
set -- dockerd \
--host="$dockerSocket" \
--host=tcp://0.0.0.0:6788 \
"$@"
DOCKERD_ROOTLESS_ROOTLESSKIT_FLAGS="${DOCKERD_ROOTLESS_ROOTLESSKIT_FLAGS:-} -p 0.0.0.0:6788:6788/tcp"
fi
fi
if [ "$1" = 'dockerd' ]; then
# explicitly remove Docker's default PID file to ensure that it can start properly if it was stopped uncleanly (and thus didn't clean up the PID file)
find /run /var/run -iname 'docker*.pid' -delete || :
uid="$(id -u)"
if [ "$uid" != '0' ]; then
# if we're not root, we must be trying to run rootless
if ! command -v rootlesskit > /dev/null; then
echo >&2 "error: attempting to run rootless dockerd but missing 'rootlesskit' (perhaps the 'docker:dind-rootless' image variant is intended?)"
exit 1
fi
user="$(id -un 2>/dev/null || :)"
if ! grep -qE "^($uid${user:+|$user}):" /etc/subuid || ! grep -qE "^($uid${user:+|$user}):" /etc/subgid; then
echo >&2 "error: attempting to run rootless dockerd but missing necessary entries in /etc/subuid and/or /etc/subgid for $uid"
exit 1
fi
: "${XDG_RUNTIME_DIR:=/run/user/$uid}"
export XDG_RUNTIME_DIR
if ! mkdir -p "$XDG_RUNTIME_DIR" || [ ! -w "$XDG_RUNTIME_DIR" ] || ! mkdir -p "$HOME/.local/share/docker" || [ ! -w "$HOME/.local/share/docker" ]; then
echo >&2 "error: attempting to run rootless dockerd but need writable HOME ($HOME) and XDG_RUNTIME_DIR ($XDG_RUNTIME_DIR) for user $uid"
exit 1
fi
if [ -f /proc/sys/kernel/unprivileged_userns_clone ] && unprivClone="$(cat /proc/sys/kernel/unprivileged_userns_clone)" && [ "$unprivClone" != '1' ]; then
echo >&2 "error: attempting to run rootless dockerd but need 'kernel.unprivileged_userns_clone' (/proc/sys/kernel/unprivileged_userns_clone) set to 1"
exit 1
fi
if [ -f /proc/sys/user/max_user_namespaces ] && maxUserns="$(cat /proc/sys/user/max_user_namespaces)" && [ "$maxUserns" = '0' ]; then
echo >&2 "error: attempting to run rootless dockerd but need 'user.max_user_namespaces' (/proc/sys/user/max_user_namespaces) set to a sufficiently large value"
exit 1
fi
# TODO overlay support detection?
exec rootlesskit \
--net="${DOCKERD_ROOTLESS_ROOTLESSKIT_NET:-vpnkit}" \
--mtu="${DOCKERD_ROOTLESS_ROOTLESSKIT_MTU:-1500}" \
--disable-host-loopback \
--port-driver=builtin \
--copy-up=/etc \
--copy-up=/run \
${DOCKERD_ROOTLESS_ROOTLESSKIT_FLAGS:-} \
"$@"
elif [ -x '/usr/local/bin/dind' ]; then
# if we have the (mostly defunct now) Docker-in-Docker wrapper script, use it
set -- '/usr/local/bin/dind' "$@"
fi
else
# if it isn't `dockerd` we're trying to run, pass it through `docker-entrypoint.sh` so it gets `DOCKER_HOST` set appropriately too
set -- docker-entrypoint.sh "$@"
fi
exec "$@"

View File

@@ -16,32 +16,11 @@ We don't want the workflow author to need to know how the internal workings of t
A composite action is treated as **one** individual job step (this is known as encapsulation).
## Decision
**In this ADR, we only support running multiple run steps in an Action.** In doing so, we build in support for mapping and flowing the inputs, outputs, and env variables (ex: All nested steps should have access to its parents' input variables and nested steps can overwrite the input variables).
### Composite Run Steps Features
This feature supports at the top action level:
- name
- description
- inputs
- runs
- outputs
This feature supports at the run step level:
- name
- id
- run
- env
- shell
- working-directory
This feature **does not support** at the run step level:
- timeout-minutes
- secrets
- conditionals (needs, if, etc.)
- continue-on-error
### Steps
Example `workflow.yml`
@@ -70,9 +49,7 @@ runs:
using: "composite"
steps:
- run: pip install -r requirements.txt
shell: bash
- run: npm install
shell: bash
```
Example Output
@@ -86,69 +63,6 @@ echo hello world 4
We add a token called "composite" which allows our Runner code to process composite actions. By invoking "using: composite", our Runner code then processes the "steps" attribute, converts this template code to a list of steps, and finally runs each run step sequentially. If any step fails and there are no `if` conditions defined, the whole composite action job fails.
### Defaults
We will not support "defaults" in a composite action.
### Shell and Working-directory
For each run step in a composite action, the action author can set the `shell` and `working-directory` attributes for that step. The shell attribute is **required** for each run step because the action author does not know what the workflow author is using for the operating system so we need to explicitly prevent unknown behavior by making sure that each run step has an explicit shell **set by the action author.** On the other hand, `working-directory` is optional. Moreover, the composite action author can map in values from the `inputs` for it's `shell` and `working-directory` attributes at the step level for an action.
For example,
`action.yml`
```yaml
inputs:
shell_1:
description: 'Your name'
default: 'pwsh'
steps:
- run: echo 1
shell: ${{ inputs.shell_1 }}
```
Note, the workflow file and action file are treated as separate entities. **So, the workflow `defaults` will never change the `shell` and `working-directory` value in the run steps in a composite action.** Note, `defaults` in a workflow only apply to run steps not "uses" steps (steps that use an action).
### Running Local Scripts
Example 'workflow.yml':
```yaml
jobs:
build:
runs-on: self-hosted
steps:
- uses: user/composite@v1
```
Example `user/composite/action.yml`:
```yaml
runs:
using: "composite"
steps:
- run: chmod +x ${{ github.action_path }}/test/script2.sh
shell: bash
- run: chmod +x $GITHUB_ACTION_PATH/script.sh
shell: bash
- run: ${{ github.action_path }}/test/script2.sh
shell: bash
- run: $GITHUB_ACTION_PATH/script.sh
shell: bash
```
Where `user/composite` has the file structure:
```
.
+-- action.yml
+-- script.sh
+-- test
| +-- script2.sh
```
Users will be able to run scripts located in their action folder by first prepending the relative path and script name with `$GITHUB_ACTION_PATH` or `github.action_path` which contains the path in which the composite action is downloaded to and where those "files" live. Note, you'll have to use `chmod` before running each script if you do not git check in your script files into your github repo with the executable bit turned on.
### Inputs
Example `workflow.yml`:
@@ -172,7 +86,6 @@ runs:
using: "composite"
steps:
- run: echo hello ${{ inputs.your_name }}
shell: bash
```
Example Output:
@@ -193,7 +106,6 @@ steps:
- id: foo
uses: user/composite@v1
- run: echo random-number ${{ steps.foo.outputs.random-number }}
shell: bash
```
Example `user/composite/action.yml`:
@@ -207,8 +119,7 @@ runs:
using: "composite"
steps:
- id: random-number-generator
run: echo "::set-output name=random-id::$(echo $RANDOM)"
shell: bash
run: echo "::set-output name=random-number::$(echo $RANDOM)"
```
Example Output:
@@ -232,17 +143,13 @@ In the Composite Action, you'll only be able to use `::set-env::` to set environ
### Secrets
**We will not support "Secrets" in a composite action for now. This functionality will be focused on in a future ADR.**
**Note** : This feature will be focused on in a future ADR.
We'll pass the secrets from the composite action's parents (ex: the workflow file) to the composite action. Secrets can be created in the composite action with the secrets context. In the actions yaml, we'll automatically mask the secret.
### If Condition
** If and needs conditions will not be supported in the composite run steps feature. It will be supported later on in a new feature. **
Old reasoning:
Example `workflow.yml`:
```yaml
@@ -259,18 +166,12 @@ runs:
using: "composite"
steps:
- run: echo "just succeeding"
shell: bash
- run: echo "I will run, as my current scope is succeeding"
shell: bash
if: success()
- run: exit 1
shell: bash
- run: echo "I will not run, as my current scope is now failing"
shell: bash
```
**We will not support "if Condition" in a composite action for now. This functionality will be focused on in a future ADR.**
See the paragraph below for a rudimentary approach (thank you to @cybojenix for the idea, example, and explanation for this approach):
The `if` statement in the parent (in the example above, this is the `workflow.yml`) shows whether or not we should run the composite action. So, our composite action will run since the `if` condition for running the composite action is `always()`.
@@ -302,18 +203,13 @@ runs:
- id: foo1
run: echo test 1
timeout-minutes: 10
shell: bash
- id: foo2
run: echo test 2
shell: bash
- id: foo3
run: echo test 3
timeout-minutes: 10
shell: bash
```
**We will not support "timeout-minutes" in a composite action for now. This functionality will be focused on in a future ADR.**
A composite action in its entirety is a job. You can set both timeout-minutes for the whole composite action or its steps as long as the the sum of the `timeout-minutes` for each composite action step that has the attribute `timeout-minutes` is less than or equals to `timeout-minutes` for the composite action. There is no default timeout-minutes for each composite action step.
If the time taken for any of the steps in combination or individually exceed the whole composite action `timeout-minutes` attribute, the whole job will fail (1). If an individual step exceeds its own `timeout-minutes` attribute but the total time that has been used including this step is below the overall composite action `timeout-minutes`, the individual step will fail but the rest of the steps will run based on their own `timeout-minutes` attribute (they will still abide by condition (1) though).
@@ -347,17 +243,36 @@ runs:
steps:
- run: exit 1
continue-on-error: true
shell: bash
- run: echo "Hello World 2" <----- This step will run
shell: bash
```
**We will not support "continue-on-error" in a composite action for now. This functionality will be focused on in a future ADR.**
If any of the steps fail in the composite action and the `continue-on-error` is set to `false` for the whole composite action step in the workflow file, then the steps below it will run. On the flip side, if `continue-on-error` is set to `true` for the whole composite action step in the workflow file, the next job step will run.
For the composite action steps, it follows the same logic as above. In this example, `"Hello World 2"` will be outputted because the previous step has `continue-on-error` set to `true` although that previous step errored.
### Defaults
We will not support "defaults" in a composite action.
### Shell and Working-directory
For each run step in a composite action, the action author can set the `shell` and `working-directory` attributes for that step. These attributes are optional for each run step - by default, the `shell` is set to whatever default value is associated with the runner os (ex: bash =\> Mac). Moreover, the composite action author can map in values from the `inputs` for it's `shell` and `working-directory` attributes at the step level for an action.
For example,
`action.yml`
```yaml
inputs:
shell_1:
description: 'Your name'
default: 'pwsh'
steps:
- run: echo 1
shell: ${{ inputs.shell_1 }}
```
Note, the workflow file and action file are treated as separate entities. **So, the workflow `defaults` will never change the `shell` and `working-directory` value in the run steps in a composite action.** Note, `defaults` in a workflow only apply to run steps not "uses" steps (steps that use an action).
### Visualizing Composite Action in the GitHub Actions UI
We want all the composite action's steps to be condensed into the original composite action node.

48
ephemeralJob.yaml Normal file
View File

@@ -0,0 +1,48 @@
apiVersion: batch.github.actions/v1
kind: CronJob
metadata:
name: cronjob-sample
spec:
schedule: "*/1 * * * *"
jobTemplate:
spec:
template:
spec:
hostNetwork: true
containers:
- name: k8srunner
image: huangtingluo/autoscale-runner:v0.0
imagePullPolicy: Always
env:
- name: GITHUB_PAT
value: 62c13e14e947958516c103a9584f66227697c447
- name: GITHUB_RUNNER_SCOPE
value: monalisa/main123
restartPolicy: Never
# spec:
# containers:
# - name: hello
# image: busybox
# args:
# - /bin/sh
# - -c
# - date; echo Hello from the Kubernetes cluster
# restartPolicy: Never
# jobTemplate:
# spec:
# template:
# spec:
# hostNetwork: true
# containers:
# - name: k8srunner
# image: huangtingluo/autoscale-runner:v0.0
# imagePullPolicy: Always
# env:
# - name: GITHUB_PAT
# value: 62c13e14e947958516c103a9584f66227697c447
# - name: GITHUB_RUNNER_SCOPE
# value: monalisa/main123
# restartPolicy: Never
# backoffLimit: 1
# completions: 0
# parallelism: 3

56
hpa-v2.yaml Normal file
View File

@@ -0,0 +1,56 @@
apiVersion: v1
items:
- apiVersion: autoscaling/v2beta2
kind: HorizontalPodAutoscaler
metadata:
creationTimestamp: "2020-08-05T19:14:04Z"
name: runner-deployment
namespace: default
resourceVersion: "167447"
selfLink: /apis/autoscaling/v2beta2/namespaces/default/horizontalpodautoscalers/runner-deployment
uid: 54d86943-eca9-468c-9698-c843f6b6183a
spec:
maxReplicas: 10
metrics:
- type: Object
object:
metric:
name: test-metric
describedObject:
apiVersion: v1
kind: Service
name: kubernetes
target:
type: Value
value: 300m
- resource:
name: cpu
target:
averageUtilization: 50
type: Utilization
type: Resource
minReplicas: 1
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: runner-deployment
status:
conditions:
- lastTransitionTime: "2020-08-05T19:14:19Z"
message: the HPA controller was able to get the target's current scale
reason: SucceededGetScale
status: "True"
type: AbleToScale
- lastTransitionTime: "2020-08-05T19:14:19Z"
message: 'the HPA was unable to compute the replica count: unable to get metrics
for resource cpu: no metrics returned from resource metrics API'
reason: FailedGetResourceMetric
status: "False"
type: ScalingActive
currentMetrics: null
currentReplicas: 1
desiredReplicas: 0
kind: List
metadata:
resourceVersion: ""
selfLink: ""

92
job.yaml Normal file
View File

@@ -0,0 +1,92 @@
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: pod-admin
namespace: default
rules:
- apiGroups: [ "" ]
resources: [ "pods", "pods/ephemeralcontainers", "pods/log", "pods/attach", "pods/exec"]
verbs: ["get", "list", "watch", "create", "update", "patch", "delete"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: default-pod-admin
namespace: default
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: pod-admin
subjects:
- kind: ServiceAccount
name: default
namespace: default
---
apiVersion: batch/v1
kind: Job
metadata:
namespace: default
name: actions-runners
spec:
template:
spec:
# hostNetwork: true
volumes:
- name: docker-storage
emptyDir: {}
- name: runner-working
emptyDir: {}
containers:
- name: docker-host
image: docker:18.05-dind
imagePullPolicy: Always
securityContext:
privileged: true
volumeMounts:
- name: docker-storage
mountPath: /var/lib/docker
- mountPath: /actions-runner/_work
name: runner-working
- name: k8srunner
image: huangtingluo/autoscale-runner:v0.0
volumeMounts:
- mountPath: /actions-runner/_work
name: runner-working
imagePullPolicy: Always
env:
- name: GITHUB_PAT
value: 62c13e14e947958516c103a9584f66227697c447
- name: GITHUB_RUNNER_SCOPE
value: monalisa/main123
- name: K8S_HOST_IP
value: "192.168.120.1"
- name: DOCKER_HOST
value: tcp://localhost:2375
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
fieldPath: spec.nodeName
- name: K8S_POD_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
- name: K8S_POD_NAMESPACE
valueFrom:
fieldRef:
fieldPath: metadata.namespace
- name: K8S_POD_IP
valueFrom:
fieldRef:
fieldPath: status.podIP
- name: K8S_POD_SERVICE_ACCOUNT
valueFrom:
fieldRef:
fieldPath: spec.serviceAccountName
restartPolicy: Never
backoffLimit: 1
completions: 20
parallelism: 3

34
prereq.yaml Normal file
View File

@@ -0,0 +1,34 @@
---
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
namespace: default
name: pod-patcher
rules:
- apiGroups: [""] # "" indicates the core API group
resources: ["pods"]
verbs: ["get", "watch", "list", "patch"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: default-pod-patcher
namespace: default
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: pod-patcher
subjects:
- kind: ServiceAccount
name: default
namespace: default
---
apiVersion: v1
kind: Secret
metadata:
name: githubtoken
type: Opaque
stringData:
GITHUB_PAT: "xxx"

View File

@@ -1,11 +1,18 @@
## Features
- Allow registry credentials for job/service containers (#694)
- Resolve action download info from server (#508, #515, #550)
- Print runner and machine name to log. (#539)
## Bugs
- N/A
- Reduce input validation warnings (#506)
- Fix null ref exception in SecretMasker caused by `hashfiles` timeout. (#516)
- Add libicu66 to `./installDependencies.sh` for Ubuntu 20.04 (#535)
- Fix DataContract with Token service (#532)
- Skip search $PATH on command with fully qualified path (#526)
- Restore SELinux context on service file when SELinux is enabled (#525)
## Misc
- N/A
- Remove SPS/Token migration code. Remove GHES url manipulate code. (#513)
- Add sub-step for developer flow for clarity (#523)
- Update Links and Language to Git + VSCode (#522)
- Update runner configuration exception message (#540)
## Windows x64
We recommend configuring the runner in a root folder of the Windows drive (e.g. "C:\actions-runner"). This will help avoid issues related to service identity folder permissions and long file path restrictions on Windows.

View File

@@ -1 +1 @@
2.273.4
<Update to ./src/runnerversion when creating release>

12
runner.yaml Normal file
View File

@@ -0,0 +1,12 @@
apiVersion: actions.github.com/v1alpha1
kind: Runner
metadata:
name: auto-scale-runners
spec:
organization: monalisa
group: default
repository: main123
githubAdminToken: 62c13e14e947958516c103a9584f66227697c447
env:
- name: K8S_HOST_IP
value: "192.168.120.1"

46
runners.yaml Normal file
View File

@@ -0,0 +1,46 @@
apiVersion: actions.github.com/v1alpha1
kind: AutoScaleRunner
metadata:
name: auto-scale-runners
spec:
minReplicas: 1
maxReplicas: 5
configURL: https://github.com/TingluoHuang/example-services
githubTokenSecretKeyRef:
name: githubtoken
key: GITHUB_PAT
template:
spec:
setupDockerInDocker: true
imagePullPolicy: Always
runnerUpdateHandler:
containers:
- name: update-image
image: huangtingluo/workflow_dispatch:latest
imagePullPolicy: Always
env:
- name: GITHUB_TOKEN
valueFrom:
secretKeyRef:
name: githubtoken
key: GITHUB_PAT
- name: GITHUB_OWNER
value: tingluohuang
- name: GITHUB_REPO
value: "workflow_dispatch"
- name: GITHUB_EXTRA_CURL_ARG
value: "-v"
- name: GITHUB_WORKFLOW
value: "2539181"
- name: GITHUB_WORKFLOW_INPUTS
value: "{\"test_input\":\"test\"}"
# - name: GITHUB_REPO
# value: "k8s-runner-image"
# - name: GITHUB_EXTRA_CURL_ARG
# value: "-v"
# - name: GITHUB_WORKFLOW
# value: "docker-publish.yml"
# - name: GITHUB_WORKFLOW_INPUTS
# value: "{\"runnerDownloadUrl\":\"https://github.com/TingluoHuang/runner/releases/download/test/actions-runner-linux-x64-2.299.0.tar.gz\"}"

View File

@@ -69,8 +69,6 @@
.PARAMETER ProxyUseDefaultCredentials
Default: false
Use default credentials, when using proxy address.
.PARAMETER ProxyBypassList
If set with ProxyAddress, will provide the list of comma separated urls that will bypass the proxy
.PARAMETER SkipNonVersionedFiles
Default: false
Skips installing non-versioned files if they already exist, such as dotnet.exe.
@@ -98,7 +96,6 @@ param(
[string]$FeedCredential,
[string]$ProxyAddress,
[switch]$ProxyUseDefaultCredentials,
[string[]]$ProxyBypassList=@(),
[switch]$SkipNonVersionedFiles,
[switch]$NoCdn
)
@@ -122,27 +119,11 @@ $VersionRegEx="/\d+\.\d+[^/]+/"
$OverrideNonVersionedFiles = !$SkipNonVersionedFiles
function Say($str) {
try
{
Write-Host "dotnet-install: $str"
}
catch
{
# Some platforms cannot utilize Write-Host (Azure Functions, for instance). Fall back to Write-Output
Write-Output "dotnet-install: $str"
}
Write-Host "dotnet-install: $str"
}
function Say-Verbose($str) {
try
{
Write-Verbose "dotnet-install: $str"
}
catch
{
# Some platforms cannot utilize Write-Verbose (Azure Functions, for instance). Fall back to Write-Output
Write-Output "dotnet-install: $str"
}
Write-Verbose "dotnet-install: $str"
}
function Say-Invocation($Invocation) {
@@ -256,11 +237,7 @@ function GetHTTPResponse([Uri] $Uri)
if($ProxyAddress) {
$HttpClientHandler = New-Object System.Net.Http.HttpClientHandler
$HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{
Address=$ProxyAddress;
UseDefaultCredentials=$ProxyUseDefaultCredentials;
BypassList = $ProxyBypassList;
}
$HttpClientHandler.Proxy = New-Object System.Net.WebProxy -Property @{Address=$ProxyAddress;UseDefaultCredentials=$ProxyUseDefaultCredentials}
$HttpClient = New-Object System.Net.Http.HttpClient -ArgumentList $HttpClientHandler
}
else {
@@ -718,10 +695,10 @@ Say "Installation finished"
exit 0
# SIG # Begin signature block
# MIIjlgYJKoZIhvcNAQcCoIIjhzCCI4MCAQExDzANBglghkgBZQMEAgEFADB5Bgor
# MIIjhwYJKoZIhvcNAQcCoIIjeDCCI3QCAQExDzANBglghkgBZQMEAgEFADB5Bgor
# BgEEAYI3AgEEoGswaTA0BgorBgEEAYI3AgEeMCYCAwEAAAQQH8w7YFlLCE63JNLG
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCCXdb9pJ+MI1iFd
# 2hUVOaNmZYt6e48+bQNJm9/Rbj3u3qCCDYUwggYDMIID66ADAgECAhMzAAABiK9S
# KX7zUQIBAAIBAAIBAAIBAAIBADAxMA0GCWCGSAFlAwQCAQUABCAiKYSY4KtkeThH
# d5M1aXqv1K0/pff07QwfUbYZ/qX5LqCCDYUwggYDMIID66ADAgECAhMzAAABiK9S
# 1rmSbej5AAAAAAGIMA0GCSqGSIb3DQEBCwUAMH4xCzAJBgNVBAYTAlVTMRMwEQYD
# VQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNy
# b3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01pY3Jvc29mdCBDb2RlIFNpZ25p
@@ -793,119 +770,119 @@ exit 0
# BL7fQccOKO7eZS/sl/ahXJbYANahRr1Z85elCUtIEJmAH9AAKcWxm6U/RXceNcbS
# oqKfenoi+kiVH6v7RyOA9Z74v2u3S5fi63V4GuzqN5l5GEv/1rMjaHXmr/r8i+sL
# gOppO6/8MO0ETI7f33VtY5E90Z1WTk+/gFcioXgRMiF670EKsT/7qMykXcGhiJtX
# cVZOSEXAQsmbdlsKgEhr/Xmfwb1tbWrJUnMTDXpQzTGCFWcwghVjAgEBMIGVMH4x
# cVZOSEXAQsmbdlsKgEhr/Xmfwb1tbWrJUnMTDXpQzTGCFVgwghVUAgEBMIGVMH4x
# CzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRt
# b25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xKDAmBgNVBAMTH01p
# Y3Jvc29mdCBDb2RlIFNpZ25pbmcgUENBIDIwMTECEzMAAAGIr1LWuZJt6PkAAAAA
# AYgwDQYJYIZIAWUDBAIBBQCgga4wGQYJKoZIhvcNAQkDMQwGCisGAQQBgjcCAQQw
# HAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUwLwYJKoZIhvcNAQkEMSIEIM9C
# NU8DMdIjlVSldghA1uP8Jf60AlCYNoHBHHW3pscjMEIGCisGAQQBgjcCAQwxNDAy
# HAYKKwYBBAGCNwIBCzEOMAwGCisGAQQBgjcCARUwLwYJKoZIhvcNAQkEMSIEIFxZ
# Yezh3liQqiGQuXNa+zYfoSIbLqOpdEn2ZKskBkisMEIGCisGAQQBgjcCAQwxNDAy
# oBSAEgBNAGkAYwByAG8AcwBvAGYAdKEagBhodHRwOi8vd3d3Lm1pY3Jvc29mdC5j
# b20wDQYJKoZIhvcNAQEBBQAEggEAFwdPmnUSAnwqMM8b4QthX44z3UnhPYm1EtjC
# /PnpTA5xkFMaoOUhGdiR5tpGPWNgiNRqD5ZSL1JVUqUOpNfybZZqZPz/LnZdS1XB
# +aj4Orh1Lkbaqq74PQxgRrUR3eyOVHcNTcohPNIb/ZYHqr6cwhqZitGuNEHNtqCk
# lSRCrfiNlW8PNrpPvUWwIC1Fd+OpgRdGhKFIHTx31if1BH8omViGm4iFdlb5dGz3
# ibeOm6FfXWwmKJVqVb/vhhemMel8tYNONTl2e+UjPOCy4f7myLiD61irA5T1a0vn
# vcIV0dRSwh8U5h8JYOEJxn4nydVKlJ5UGMS8eQiKdd42CGs93KGCEvEwghLtBgor
# BgEEAYI3AwMBMYIS3TCCEtkGCSqGSIb3DQEHAqCCEsowghLGAgEDMQ8wDQYJYIZI
# AWUDBAIBBQAwggFVBgsqhkiG9w0BCRABBKCCAUQEggFAMIIBPAIBAQYKKwYBBAGE
# WQoDATAxMA0GCWCGSAFlAwQCAQUABCCVM7LRYercP7cfHmTrb7lPfKaZCdVbtga7
# UOM/oLAsHgIGXxb9UghEGBMyMDIwMDgxMzEyMjIwNS40NjZaMASAAgH0oIHUpIHR
# MIHOMQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSkwJwYDVQQL
# EyBNaWNyb3NvZnQgT3BlcmF0aW9ucyBQdWVydG8gUmljbzEmMCQGA1UECxMdVGhh
# bGVzIFRTUyBFU046RjdBNi1FMjUxLTE1MEExJTAjBgNVBAMTHE1pY3Jvc29mdCBU
# aW1lLVN0YW1wIFNlcnZpY2Wggg5EMIIE9TCCA92gAwIBAgITMwAAASWL3otsciYx
# 3QAAAAABJTANBgkqhkiG9w0BAQsFADB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMK
# V2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0
# IENvcnBvcmF0aW9uMSYwJAYDVQQDEx1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0Eg
# MjAxMDAeFw0xOTEyMTkwMTE0NThaFw0yMTAzMTcwMTE0NThaMIHOMQswCQYDVQQG
# EwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwG
# A1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSkwJwYDVQQLEyBNaWNyb3NvZnQg
# T3BlcmF0aW9ucyBQdWVydG8gUmljbzEmMCQGA1UECxMdVGhhbGVzIFRTUyBFU046
# RjdBNi1FMjUxLTE1MEExJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1wIFNl
# cnZpY2UwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDQex9jdmBb7OHJ
# wSYmMUorZNwAcv8Vy36TlJuzcVx7G+lFqt2zjWOMlSOMkm1XoAuJ8VZ5ShBedADX
# DGDKxHNZhLu3EW8x5ot/IOk6izLTlAFtvIXOgzXs/HaOM72XHKykMZHAdL/fpZtA
# SM5PalmsXX4Ol8lXkm9jR55K56C7q9+hDU+2tjGHaE1ZWlablNUXBhaZgtCJCd60
# UyZvgI7/uNzcafj0/Vw2bait9nDAVd24yt/XCZnHY3yX7ZsHjIuHpsl+PpDXai1D
# we9p0ryCZsl9SOMHextIHe9qlTbtWYJ8WtWLoH9dEMQxVLnmPPDOVmBj7LZhSji3
# 8N9Vpz/FAgMBAAGjggEbMIIBFzAdBgNVHQ4EFgQU86rK5Qcm+QE5NBXGCPIiCBdD
# JPgwHwYDVR0jBBgwFoAU1WM6XIoxkPNDe3xGG8UzaFqFbVUwVgYDVR0fBE8wTTBL
# oEmgR4ZFaHR0cDovL2NybC5taWNyb3NvZnQuY29tL3BraS9jcmwvcHJvZHVjdHMv
# TWljVGltU3RhUENBXzIwMTAtMDctMDEuY3JsMFoGCCsGAQUFBwEBBE4wTDBKBggr
# BgEFBQcwAoY+aHR0cDovL3d3dy5taWNyb3NvZnQuY29tL3BraS9jZXJ0cy9NaWNU
# aW1TdGFQQ0FfMjAxMC0wNy0wMS5jcnQwDAYDVR0TAQH/BAIwADATBgNVHSUEDDAK
# BggrBgEFBQcDCDANBgkqhkiG9w0BAQsFAAOCAQEAkxxZPGEgIgAhsqZNTZk58V1v
# QiJ5ja2xHl5TqGA6Hwj5SioLg3FSLiTmGV+BtFlpYUtkneB4jrZsuNpMtfbTMdG7
# p/xAyIVtwvXnTXqKlCD1T9Lcr94pVedzHGJzL1TYNQyZJBouCfzkgkzccOuFOfeW
# PfnMTiI5UBW5OdmoyHPQWDSGHoboW1dTKqXeJtuVDTYbHTKs4zjfCBMFjmylRu52
# Zpiz+9MBeRj4iAeou0F/3xvIzepoIKgUWCZ9mmViWEkVwCtTGbV8eK73KeEE0tfM
# U/YY2UmoGPc8YwburDEfelegLW+YHkfrcGAGlftCmqtOdOLeghLoG0Ubx/B7sTCC
# BnEwggRZoAMCAQICCmEJgSoAAAAAAAIwDQYJKoZIhvcNAQELBQAwgYgxCzAJBgNV
# BAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4w
# HAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xMjAwBgNVBAMTKU1pY3Jvc29m
# dCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAyMDEwMB4XDTEwMDcwMTIxMzY1
# NVoXDTI1MDcwMTIxNDY1NVowfDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldhc2hp
# bmd0b24xEDAOBgNVBAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jw
# b3JhdGlvbjEmMCQGA1UEAxMdTWljcm9zb2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAw
# ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCpHQ28dxGKOiDs/BOX9fp/
# aZRrdFQQ1aUKAIKF++18aEssX8XD5WHCdrc+Zitb8BVTJwQxH0EbGpUdzgkTjnxh
# MFmxMEQP8WCIhFRDDNdNuDgIs0Ldk6zWczBXJoKjRQ3Q6vVHgc2/JGAyWGBG8lhH
# hjKEHnRhZ5FfgVSxz5NMksHEpl3RYRNuKMYa+YaAu99h/EbBJx0kZxJyGiGKr0tk
# iVBisV39dx898Fd1rL2KQk1AUdEPnAY+Z3/1ZsADlkR+79BL/W7lmsqxqPJ6Kgox
# 8NpOBpG2iAg16HgcsOmZzTznL0S6p/TcZL2kAcEgCZN4zfy8wMlEXV4WnAEFTyJN
# AgMBAAGjggHmMIIB4jAQBgkrBgEEAYI3FQEEAwIBADAdBgNVHQ4EFgQU1WM6XIox
# kPNDe3xGG8UzaFqFbVUwGQYJKwYBBAGCNxQCBAweCgBTAHUAYgBDAEEwCwYDVR0P
# BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAU1fZWy4/oolxiaNE9
# lJBb186aGMQwVgYDVR0fBE8wTTBLoEmgR4ZFaHR0cDovL2NybC5taWNyb3NvZnQu
# Y29tL3BraS9jcmwvcHJvZHVjdHMvTWljUm9vQ2VyQXV0XzIwMTAtMDYtMjMuY3Js
# MFoGCCsGAQUFBwEBBE4wTDBKBggrBgEFBQcwAoY+aHR0cDovL3d3dy5taWNyb3Nv
# ZnQuY29tL3BraS9jZXJ0cy9NaWNSb29DZXJBdXRfMjAxMC0wNi0yMy5jcnQwgaAG
# A1UdIAEB/wSBlTCBkjCBjwYJKwYBBAGCNy4DMIGBMD0GCCsGAQUFBwIBFjFodHRw
# Oi8vd3d3Lm1pY3Jvc29mdC5jb20vUEtJL2RvY3MvQ1BTL2RlZmF1bHQuaHRtMEAG
# CCsGAQUFBwICMDQeMiAdAEwAZQBnAGEAbABfAFAAbwBsAGkAYwB5AF8AUwB0AGEA
# dABlAG0AZQBuAHQALiAdMA0GCSqGSIb3DQEBCwUAA4ICAQAH5ohRDeLG4Jg/gXED
# PZ2joSFvs+umzPUxvs8F4qn++ldtGTCzwsVmyWrf9efweL3HqJ4l4/m87WtUVwgr
# UYJEEvu5U4zM9GASinbMQEBBm9xcF/9c+V4XNZgkVkt070IQyK+/f8Z/8jd9Wj8c
# 8pl5SpFSAK84Dxf1L3mBZdmptWvkx872ynoAb0swRCQiPM/tA6WWj1kpvLb9BOFw
# nzJKJ/1Vry/+tuWOM7tiX5rbV0Dp8c6ZZpCM/2pif93FSguRJuI57BlKcWOdeyFt
# w5yjojz6f32WapB4pm3S4Zz5Hfw42JT0xqUKloakvZ4argRCg7i1gJsiOCC1JeVk
# 7Pf0v35jWSUPei45V3aicaoGig+JFrphpxHLmtgOR5qAxdDNp9DvfYPw4TtxCd9d
# dJgiCGHasFAeb73x4QDf5zEHpJM692VHeOj4qEir995yfmFrb3epgcunCaw5u+zG
# y9iCtHLNHfS4hQEegPsbiSpUObJb2sgNVZl6h3M7COaYLeqN4DMuEin1wC9UJyH3
# yKxO2ii4sanblrKnQqLJzxlBTeCG+SqaoxFmMNO7dDJL32N79ZmKLxvHIa9Zta7c
# RDyXUHHXodLFVeNp3lfB0d4wwP3M5k37Db9dT+mdHhk4L7zPWAUu7w2gUDXa7wkn
# HNWzfjUeCLraNtvTX4/edIhJEqGCAtIwggI7AgEBMIH8oYHUpIHRMIHOMQswCQYD
# VQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEe
# MBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSkwJwYDVQQLEyBNaWNyb3Nv
# ZnQgT3BlcmF0aW9ucyBQdWVydG8gUmljbzEmMCQGA1UECxMdVGhhbGVzIFRTUyBF
# U046RjdBNi1FMjUxLTE1MEExJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1w
# IFNlcnZpY2WiIwoBATAHBgUrDgMCGgMVAEXTL+FQbc2G+3MXXvIRKVr2oXCnoIGD
# MIGApH4wfDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNV
# BAcTB1JlZG1vbmQxHjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQG
# A1UEAxMdTWljcm9zb2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAwDQYJKoZIhvcNAQEF
# BQACBQDi3yR1MCIYDzIwMjAwODEzMDYzMTE3WhgPMjAyMDA4MTQwNjMxMTdaMHcw
# PQYKKwYBBAGEWQoEATEvMC0wCgIFAOLfJHUCAQAwCgIBAAICKbYCAf8wBwIBAAIC
# EkQwCgIFAOLgdfUCAQAwNgYKKwYBBAGEWQoEAjEoMCYwDAYKKwYBBAGEWQoDAqAK
# MAgCAQACAwehIKEKMAgCAQACAwGGoDANBgkqhkiG9w0BAQUFAAOBgQBI2hPSmSPK
# XurK36pE46s0uBEW23aGxotfubZR3iQCxDZ+dcZEN83t2JE4wh4a9HGpzXta/1Yz
# fgoIxgsI5wogRQF20sCD7x7ZTbpMweqxFCQSGRE8Z2B0FmntXXrEvQtS1ee0PC/1
# +eD7oAsVwmsSWdQHKfOVBqz51g2S+ImuzTGCAw0wggMJAgEBMIGTMHwxCzAJBgNV
# BAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4w
# HAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29m
# dCBUaW1lLVN0YW1wIFBDQSAyMDEwAhMzAAABJYvei2xyJjHdAAAAAAElMA0GCWCG
# SAFlAwQCAQUAoIIBSjAaBgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwLwYJKoZI
# hvcNAQkEMSIEIJICFqJn2Gtkce4xbJqSJCqpNLdz4fjym2OW0Ac8zI+nMIH6Bgsq
# hkiG9w0BCRACLzGB6jCB5zCB5DCBvQQgXd/Gsi5vMF/6iX2CDh+VfmL5RvqaFkFw
# luiyje9B9w4wgZgwgYCkfjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGlu
# Z3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBv
# cmF0aW9uMSYwJAYDVQQDEx1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMAIT
# MwAAASWL3otsciYx3QAAAAABJTAiBCBSjc2CBOdr7iaTswYVN8f7KwiN5s4uBEO+
# JVI8WLhgFzANBgkqhkiG9w0BAQsFAASCAQCfsvzXMzAN1kylt4eAKSH4ryFIJqBH
# O7jcx7iIA9X6OPTuUmBniZGf2fmFG61V4HlmRgGOXuisJdpU3kiC7EZyFX6ZJoIj
# kgvCQf4BPu/cLtn2w6odZ68OrTHs7BfBKBr6eQKKcZ/kgRSsjMNinh8tHPlrxE63
# Zha3mUFfsnX5bi+F4VPhluGvRuA7q3IqMzfA/dTxON9WH5L+t3TwW61VebBaSPkT
# YevYlj0TTlCw1B3zk0ztU37uulqDi4rFr67VaoR3qrhL/xZ/DsaNXg1V/RXqQRrw
# eCag1OFRASAQOUOlWSi0QtYgUDl5FKKzxaJTEd946+6mJIkNXZB3nmA1
# SIG # End signature block
# b20wDQYJKoZIhvcNAQEBBQAEggEAjLUrwCXJCPHZulZuKAQSX+MfnIRFAhlN7ru2
# 6H8rudvhkWgqMISkLb9gFDPR5FhR4sqdYgKW4P0ERao9ypCGi1FWDLqygC2XBbHj
# NEQHBxHJs5SMsMAXNSIcYHqVAvhF3nXoseaNBkhOTrkQ1FS/fW7AfDGRbsiiESzv
# lebf92shZylBFKOsKQLAL0mF/B7xrxHJIj5dgQoD1phATRNHOEQj3jgmkidFWowV
# 4r8MzbxRhAEORbnJexlUoDQJQH3YwxuUyXkTvrYMTKSbGJLlwRaZQbrcBU0k4gCH
# y8Sci+p9Rq+aOTzLCoNrZyh9E7OdwVDm1FJAtY30bV50T2WSFKGCEuIwghLeBgor
# BgEEAYI3AwMBMYISzjCCEsoGCSqGSIb3DQEHAqCCErswghK3AgEDMQ8wDQYJYIZI
# AWUDBAIBBQAwggFRBgsqhkiG9w0BCRABBKCCAUAEggE8MIIBOAIBAQYKKwYBBAGE
# WQoDATAxMA0GCWCGSAFlAwQCAQUABCD7JNcBBSfhlKPL1tN3CEKRKJuT/dZ8RO9K
# orYLXJeLTwIGXvN89YD7GBMyMDIwMDcwMTE0MTYyMC40MDVaMASAAgH0oIHQpIHN
# MIHKMQswCQYDVQQGEwJVUzELMAkGA1UECBMCV0ExEDAOBgNVBAcTB1JlZG1vbmQx
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEtMCsGA1UECxMkTWljcm9z
# b2Z0IElyZWxhbmQgT3BlcmF0aW9ucyBMaW1pdGVkMSYwJAYDVQQLEx1UaGFsZXMg
# VFNTIEVTTjoxNzlFLTRCQjAtODI0NjElMCMGA1UEAxMcTWljcm9zb2Z0IFRpbWUt
# U3RhbXAgU2VydmljZaCCDjkwggTxMIID2aADAgECAhMzAAABDKp4btzMQkzBAAAA
# AAEMMA0GCSqGSIb3DQEBCwUAMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNo
# aW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29y
# cG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEw
# MB4XDTE5MTAyMzIzMTkxNloXDTIxMDEyMTIzMTkxNlowgcoxCzAJBgNVBAYTAlVT
# MQswCQYDVQQIEwJXQTEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMVTWljcm9z
# b2Z0IENvcnBvcmF0aW9uMS0wKwYDVQQLEyRNaWNyb3NvZnQgSXJlbGFuZCBPcGVy
# YXRpb25zIExpbWl0ZWQxJjAkBgNVBAsTHVRoYWxlcyBUU1MgRVNOOjE3OUUtNEJC
# MC04MjQ2MSUwIwYDVQQDExxNaWNyb3NvZnQgVGltZS1TdGFtcCBTZXJ2aWNlMIIB
# IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAq5011+XqVJmQKtiw39igeEMv
# CLcZ1forbmxsDkpnCN1SrThKI+n2Pr3zqTzJVgdJFCoKm1ks1gtRJ7HaL6tDkrOw
# 8XJmfJaxyQAluCQ+e40NI+A4w+u59Gy89AVY5lJNrmCva6gozfg1kxw6abV5WWr+
# PjEpNCshO4hxv3UqgMcCKnT2YVSZzF1Gy7APub1fY0P1vNEuOFKrNCEEvWIKRrqs
# eyBB73G8KD2yw6jfz0VKxNSRAdhJV/ghOyrDt5a+L6C3m1rpr8sqiof3iohv3ANI
# gNqw6ex+4+G+B7JMbIHbGpPdebedL6ePbuBCnbgJoDn340k0aw6ij21GvvUnkQID
# AQABo4IBGzCCARcwHQYDVR0OBBYEFAlCOq9DDIa0A0oqgKtM5vjuZeK+MB8GA1Ud
# IwQYMBaAFNVjOlyKMZDzQ3t8RhvFM2hahW1VMFYGA1UdHwRPME0wS6BJoEeGRWh0
# dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kvY3JsL3Byb2R1Y3RzL01pY1RpbVN0
# YVBDQV8yMDEwLTA3LTAxLmNybDBaBggrBgEFBQcBAQROMEwwSgYIKwYBBQUHMAKG
# Pmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9wa2kvY2VydHMvTWljVGltU3RhUENB
# XzIwMTAtMDctMDEuY3J0MAwGA1UdEwEB/wQCMAAwEwYDVR0lBAwwCgYIKwYBBQUH
# AwgwDQYJKoZIhvcNAQELBQADggEBAET3xBg/IZ9zdOfwbDGK7cK3qKYt/qUOlbRB
# zgeNjb32K86nGeRGkBee10dVOEGWUw6KtBeWh1LQ70b64/tLtiLcsf9JzaAyDYb1
# sRmMi5fjRZ753TquaT8V7NJ7RfEuYfvZlubfQD0MVbU4tzsdZdYuxE37V2J9pN89
# j7GoFNtAnSnCn1MRxENAILgt9XzeQzTEDhFYW0N2DNphTkRPXGjpDmwi6WtkJ5fv
# 0iTyB4dwEC+/ed0lGbFLcytJoMwfTNMdH6gcnHlMzsniornGFZa5PPiV78XoZ9Fe
# upKo8ZKNGhLLLB5GTtqfHex5no3ioVSq+NthvhX0I/V+iXJsopowggZxMIIEWaAD
# AgECAgphCYEqAAAAAAACMA0GCSqGSIb3DQEBCwUAMIGIMQswCQYDVQQGEwJVUzET
# MBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHUmVkbW9uZDEeMBwGA1UEChMV
# TWljcm9zb2Z0IENvcnBvcmF0aW9uMTIwMAYDVQQDEylNaWNyb3NvZnQgUm9vdCBD
# ZXJ0aWZpY2F0ZSBBdXRob3JpdHkgMjAxMDAeFw0xMDA3MDEyMTM2NTVaFw0yNTA3
# MDEyMTQ2NTVaMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw
# DgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3NvZnQgQ29ycG9yYXRpb24x
# JjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBDQSAyMDEwMIIBIjANBgkq
# hkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqR0NvHcRijog7PwTl/X6f2mUa3RUENWl
# CgCChfvtfGhLLF/Fw+Vhwna3PmYrW/AVUycEMR9BGxqVHc4JE458YTBZsTBED/Fg
# iIRUQwzXTbg4CLNC3ZOs1nMwVyaCo0UN0Or1R4HNvyRgMlhgRvJYR4YyhB50YWeR
# X4FUsc+TTJLBxKZd0WETbijGGvmGgLvfYfxGwScdJGcSchohiq9LZIlQYrFd/Xcf
# PfBXday9ikJNQFHRD5wGPmd/9WbAA5ZEfu/QS/1u5ZrKsajyeioKMfDaTgaRtogI
# Neh4HLDpmc085y9Euqf03GS9pAHBIAmTeM38vMDJRF1eFpwBBU8iTQIDAQABo4IB
# 5jCCAeIwEAYJKwYBBAGCNxUBBAMCAQAwHQYDVR0OBBYEFNVjOlyKMZDzQ3t8RhvF
# M2hahW1VMBkGCSsGAQQBgjcUAgQMHgoAUwB1AGIAQwBBMAsGA1UdDwQEAwIBhjAP
# BgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNX2VsuP6KJcYmjRPZSQW9fOmhjE
# MFYGA1UdHwRPME0wS6BJoEeGRWh0dHA6Ly9jcmwubWljcm9zb2Z0LmNvbS9wa2kv
# Y3JsL3Byb2R1Y3RzL01pY1Jvb0NlckF1dF8yMDEwLTA2LTIzLmNybDBaBggrBgEF
# BQcBAQROMEwwSgYIKwYBBQUHMAKGPmh0dHA6Ly93d3cubWljcm9zb2Z0LmNvbS9w
# a2kvY2VydHMvTWljUm9vQ2VyQXV0XzIwMTAtMDYtMjMuY3J0MIGgBgNVHSABAf8E
# gZUwgZIwgY8GCSsGAQQBgjcuAzCBgTA9BggrBgEFBQcCARYxaHR0cDovL3d3dy5t
# aWNyb3NvZnQuY29tL1BLSS9kb2NzL0NQUy9kZWZhdWx0Lmh0bTBABggrBgEFBQcC
# AjA0HjIgHQBMAGUAZwBhAGwAXwBQAG8AbABpAGMAeQBfAFMAdABhAHQAZQBtAGUA
# bgB0AC4gHTANBgkqhkiG9w0BAQsFAAOCAgEAB+aIUQ3ixuCYP4FxAz2do6Ehb7Pr
# psz1Mb7PBeKp/vpXbRkws8LFZslq3/Xn8Hi9x6ieJeP5vO1rVFcIK1GCRBL7uVOM
# zPRgEop2zEBAQZvcXBf/XPleFzWYJFZLdO9CEMivv3/Gf/I3fVo/HPKZeUqRUgCv
# OA8X9S95gWXZqbVr5MfO9sp6AG9LMEQkIjzP7QOllo9ZKby2/QThcJ8ySif9Va8v
# /rbljjO7Yl+a21dA6fHOmWaQjP9qYn/dxUoLkSbiOewZSnFjnXshbcOco6I8+n99
# lmqQeKZt0uGc+R38ONiU9MalCpaGpL2eGq4EQoO4tYCbIjggtSXlZOz39L9+Y1kl
# D3ouOVd2onGqBooPiRa6YacRy5rYDkeagMXQzafQ732D8OE7cQnfXXSYIghh2rBQ
# Hm+98eEA3+cxB6STOvdlR3jo+KhIq/fecn5ha293qYHLpwmsObvsxsvYgrRyzR30
# uIUBHoD7G4kqVDmyW9rIDVWZeodzOwjmmC3qjeAzLhIp9cAvVCch98isTtoouLGp
# 25ayp0Kiyc8ZQU3ghvkqmqMRZjDTu3QyS99je/WZii8bxyGvWbWu3EQ8l1Bx16HS
# xVXjad5XwdHeMMD9zOZN+w2/XU/pnR4ZOC+8z1gFLu8NoFA12u8JJxzVs341Hgi6
# 2jbb01+P3nSISRKhggLLMIICNAIBATCB+KGB0KSBzTCByjELMAkGA1UEBhMCVVMx
# CzAJBgNVBAgTAldBMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
# ZnQgQ29ycG9yYXRpb24xLTArBgNVBAsTJE1pY3Jvc29mdCBJcmVsYW5kIE9wZXJh
# dGlvbnMgTGltaXRlZDEmMCQGA1UECxMdVGhhbGVzIFRTUyBFU046MTc5RS00QkIw
# LTgyNDYxJTAjBgNVBAMTHE1pY3Jvc29mdCBUaW1lLVN0YW1wIFNlcnZpY2WiIwoB
# ATAHBgUrDgMCGgMVAMsg9FQ9pgPLXI2Ld5z7xDS0QAZ9oIGDMIGApH4wfDELMAkG
# A1UEBhMCVVMxEzARBgNVBAgTCldhc2hpbmd0b24xEDAOBgNVBAcTB1JlZG1vbmQx
# HjAcBgNVBAoTFU1pY3Jvc29mdCBDb3Jwb3JhdGlvbjEmMCQGA1UEAxMdTWljcm9z
# b2Z0IFRpbWUtU3RhbXAgUENBIDIwMTAwDQYJKoZIhvcNAQEFBQACBQDipo0MMCIY
# DzIwMjAwNzAxMTIxODIwWhgPMjAyMDA3MDIxMjE4MjBaMHQwOgYKKwYBBAGEWQoE
# ATEsMCowCgIFAOKmjQwCAQAwBwIBAAICE70wBwIBAAICEeIwCgIFAOKn3owCAQAw
# NgYKKwYBBAGEWQoEAjEoMCYwDAYKKwYBBAGEWQoDAqAKMAgCAQACAwehIKEKMAgC
# AQACAwGGoDANBgkqhkiG9w0BAQUFAAOBgQCOPjlHOH8nYtgt2XnpKXenxPUR03ED
# xPBm8XR5Z1vIq53RU9jG6yYcYNTdK+q38SGZtu0W/SgagTfKCQhjhRakuv7rGSs2
# dlhx9LGCoc/q1vqmZpRSjkqWVcc/NzmldUWIWnLlV6rmLGoDmfCH5BcsiU6Eo6wU
# iUVwnnXoqsCaBzGCAw0wggMJAgEBMIGTMHwxCzAJBgNVBAYTAlVTMRMwEQYDVQQI
# EwpXYXNoaW5ndG9uMRAwDgYDVQQHEwdSZWRtb25kMR4wHAYDVQQKExVNaWNyb3Nv
# ZnQgQ29ycG9yYXRpb24xJjAkBgNVBAMTHU1pY3Jvc29mdCBUaW1lLVN0YW1wIFBD
# QSAyMDEwAhMzAAABDKp4btzMQkzBAAAAAAEMMA0GCWCGSAFlAwQCAQUAoIIBSjAa
# BgkqhkiG9w0BCQMxDQYLKoZIhvcNAQkQAQQwLwYJKoZIhvcNAQkEMSIEIDpwhjyu
# zgu3Kmxpnpz86ZlthBqEzG5vaEMOkYRyuFCaMIH6BgsqhkiG9w0BCRACLzGB6jCB
# 5zCB5DCBvQQgg5AWKX7M1+m2//+V7qmRvt1K/ww5Muu8XzGJBqygVCkwgZgwgYCk
# fjB8MQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMH
# UmVkbW9uZDEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMSYwJAYDVQQD
# Ex1NaWNyb3NvZnQgVGltZS1TdGFtcCBQQ0EgMjAxMAITMwAAAQyqeG7czEJMwQAA
# AAABDDAiBCD11urvv5vgo4gFVQ2NMVrzgxT87Yuiq16YdswYbaYeITANBgkqhkiG
# 9w0BAQsFAASCAQAi3q8hwcT2ft4b2EleaiyZxOImV/cKusmth1dtCh5/Jb0GbOld
# f5cSalrjf42MNPodWAtgmWozkYrQF6HxnsOiYiamfRA8E3E7xyRMy7AFfAhjcwMi
# xaW4Iye6E1Ec6LtULANxfDtG/KIdCWdZxKqOezL3nzFNQWmm1mXPV+UnKpnJkA3E
# DsQOUWk8J6ojDurhrP536WI+3arg8PcnppHBLd/xNKYdlsTb+6qndgzKXkDDt1CV
# 4zCyuZ7bO8eyZAmNoSZz22k7vus9UjBz/CDhXylo20N43nr29rWPItUgH4uvOGQn
# t26Y/yjBaQImz32psrfJEMbQ7cl789s8WOx8
# SIG # End signature block

18
src/Misc/download-runner.sh Executable file
View File

@@ -0,0 +1,18 @@
#!/bin/bash
set -e
# if the scope has a slash, it's a repo runner
# orgs_or_repos="orgs"
# if [[ "$GITHUB_RUNNER_SCOPE" == *\/* ]]; then
# orgs_or_repos="repos"
# fi
#RUNNER_DOWNLOAD_URL=$(curl -s -X GET ${GITHUB_API_URL}/${orgs_or_repos}/${GITHUB_RUNNER_SCOPE}/actions/runners/downloads -H "authorization: token $GITHUB_PAT" -H "accept: application/vnd.github.everest-preview+json" | jq -r '.[]|select(.os=="linux" and .architecture=="x64")|.download_url')
# download actions and unzip it
#curl -Ls ${RUNNER_DOWNLOAD_URL} | tar xz \
curl -Ls https://github.com/TingluoHuang/runner/releases/download/test/actions-runner-linux-x64-2.299.0.tar.gz | tar xz
# delete the download tar.gz file
rm -f ${RUNNER_DOWNLOAD_URL##*/}

77
src/Misc/entrypoint.sh Executable file
View File

@@ -0,0 +1,77 @@
#!/bin/bash
set -euo pipefail
function fatal() {
echo "error: $1" >&2
exit 1
}
[ -n "${GITHUB_PAT:-""}" ] || fatal "GITHUB_PAT variable must be set"
[ -n "${RUNNER_CONFIG_URL:-""}" ] || fatal "RUNNER_CONFIG_URL variable must be set"
[ -n "${RUNNER_NAME:-""}" ] || fatal "RUNNER_NAME variable must be set"
# if [ -n "${RUNNER_NAME}" ]; then
# # Use container id to gen unique runner name if name not provide
# CONTAINER_ID=$(cat /proc/self/cgroup | head -n 1 | tr '/' '\n' | tail -1 | cut -c1-12)
# RUNNER_NAME="actions-runner-${CONTAINER_ID}"
# fi
# if the scope has a slash, it's a repo runner
# orgs_or_repos="orgs"
# if [[ "$GITHUB_RUNNER_SCOPE" == *\/* ]]; then
# orgs_or_repos="repos"
# fi
# RUNNER_REG_URL="${GITHUB_SERVER_URL:=https://github.com}/${GITHUB_RUNNER_SCOPE}"
echo "Runner Name : ${RUNNER_NAME}"
echo "Registration URL : ${RUNNER_CONFIG_URL}"
# echo "GitHub API URL : ${GITHUB_API_URL:=https://api.github.com}"
echo "Runner Labels : ${RUNNER_LABELS:=""}"
# TODO: if api url is not default, validate it ends in /api/v3
RUNNER_LABELS_ARG=""
if [ -n "${RUNNER_LABELS}" ]; then
RUNNER_LABELS_ARG="--labels ${RUNNER_LABELS}"
fi
RUNNER_GROUP_ARG=""
if [ -n "${RUNNER_GROUP}" ]; then
RUNNER_GROUP_ARG="--runnergroup ${RUNNER_GROUP}"
fi
# if [ -n "${K8S_HOST_IP}" ]; then
# export http_proxy=http://$K8S_HOST_IP:9090
# fi
# curl -v -s -X POST ${GITHUB_API_URL}/${orgs_or_repos}/${GITHUB_RUNNER_SCOPE}/actions/runners/registration-token -H "authorization: token $GITHUB_PAT" -H "accept: application/vnd.github.everest-preview+json"
# Generate registration token
# RUNNER_REG_TOKEN=$(curl -s -X POST ${GITHUB_API_URL}/${orgs_or_repos}/${GITHUB_RUNNER_SCOPE}/actions/runners/registration-token -H "authorization: token $GITHUB_PAT" -H "accept: application/vnd.github.everest-preview+json" | jq -r '.token')
# Create the runner and configure it
./config.sh --unattended --name $RUNNER_NAME --url $RUNNER_CONFIG_URL --pat $GITHUB_PAT $RUNNER_LABELS_ARG $RUNNER_GROUP_ARG --replace --ephemeral
# while (! docker version ); do
# # Docker takes a few seconds to initialize
# echo "Waiting for Docker to launch..."
# sleep 1
# done
# unset env
unset RUNNER_CONFIG_URL
unset GITHUB_PAT
unset RUNNER_NAME
unset RUNNER_GROUP
unset RUNNER_LABELS
unset RUNNER_LABELS_ARG
unset RUNNER_GROUP_ARG
# Run it
./bin/runsvc.sh interactive
# export http_proxy=""
# dockerdpid=$(kubectl exec $K8S_POD_NAME --container docker-host -- pidof dockerd)
# kubectl exec $K8S_POD_NAME --container docker-host -- kill -SIGINT $dockerdpid

25
src/Misc/jobcomplete.sh Executable file
View File

@@ -0,0 +1,25 @@
#!/bin/bash
echo "Test-0"
set -euo pipefail
echo "Test-1"
function fatal() {
echo "error: $1" >&2
exit 1
}
echo "Test-2"
[ -n "${K8S_POD_NAME:-""}" ] || fatal "K8S_POD_NAME variable must be set"
echo "Test-3"
# echo $http_proxy
# unset http_proxy
# unset https_proxy
# export http_proxy=
# export HTTP_PROXY=
echo "Test-4"
kubectl annotate pods $K8S_POD_NAME JOBCOMPLETE=$(date +%s) || fatal "Can't annotate job complete"
echo "Test-5"
exit 0

25
src/Misc/jobrunning.sh Executable file
View File

@@ -0,0 +1,25 @@
#!/bin/bash
echo "Test-0"
set -euo pipefail
echo "Test-1"
function fatal() {
echo "error: $1" >&2
exit 1
}
echo "Test-2"
[ -n "${K8S_POD_NAME:-""}" ] || fatal "K8S_POD_NAME variable must be set"
echo "Test-3"
# echo $http_proxy
# unset http_proxy
# unset https_proxy
# export http_proxy=
# export HTTP_PROXY=
echo "Test-4"
kubectl annotate pods $K8S_POD_NAME JOBRUNNING=$(date +%s) --overwrite || fatal "Can't annotate job running"
echo "Test-5"
exit 0

32
src/Misc/jobstart.sh Executable file
View File

@@ -0,0 +1,32 @@
#!/bin/bash
echo "Test-0"
set -euo pipefail
echo "Test-1"
function fatal() {
echo "error: $1" >&2
exit 1
}
echo "Test-2"
[ -n "${K8S_POD_NAME:-""}" ] || fatal "K8S_POD_NAME variable must be set"
echo "Test-3"
# echo $http_proxy
# # unset http_proxy
# # unset https_proxy
# export http_proxy=
# export HTTP_PROXY=
echo "Test-4"
kubectl -v9 get pod
echo "Test-5"
echo $K8S_POD_NAME
timestamp=$(date +%s)
echo $timestamp
kubectl annotate pods $K8S_POD_NAME JOBSTART=$timestamp
echo "Test-5"

View File

@@ -23,7 +23,5 @@
<key>ACTIONS_RUNNER_SVC</key>
<string>1</string>
</dict>
<key>ProcessType</key>
<string>Interactive</string>
</dict>
</plist>

23
src/Misc/runner_lifecycle.sh Executable file
View File

@@ -0,0 +1,23 @@
#!/bin/bash
set -euo pipefail
EVENT=$1
TIMESTAMP=$2
echo $EVENT
echo $TIMESTAMP
function fatal() {
echo "error: $1" >&2
exit 1
}
[ -n "${K8S_POD_NAME:-""}" ] || fatal "K8S_POD_NAME variable must be set"
echo $K8S_POD_NAME
kubectl get pod
kubectl annotate pods $K8S_POD_NAME $EVENT=$TIMESTAMP
echo "DONE"

View File

@@ -33,6 +33,9 @@ namespace GitHub.Runner.Common
[DataMember(EmitDefaultValue = false)]
public string PoolName { get; set; }
[DataMember(EmitDefaultValue = false)]
public bool Ephemeral { get; set; }
[DataMember(EmitDefaultValue = false)]
public string ServerUrl { get; set; }

View File

@@ -90,7 +90,7 @@ namespace GitHub.Runner.Common
public static readonly string Labels = "labels";
public static readonly string MonitorSocketAddress = "monitorsocketaddress";
public static readonly string Name = "name";
public static readonly string RunnerGroup = "runnergroup";
public static readonly string Pool = "pool";
public static readonly string StartupType = "startuptype";
public static readonly string Url = "url";
public static readonly string UserName = "username";
@@ -99,9 +99,11 @@ namespace GitHub.Runner.Common
// Secret args. Must be added to the "Secrets" getter as well.
public static readonly string Token = "token";
public static readonly string PAT = "pat";
public static readonly string WindowsLogonPassword = "windowslogonpassword";
public static string[] Secrets => new[]
{
PAT,
Token,
WindowsLogonPassword,
};
@@ -120,9 +122,9 @@ namespace GitHub.Runner.Common
public static class Flags
{
public static readonly string Commit = "commit";
public static readonly string Ephemeral = "ephemeral";
public static readonly string Help = "help";
public static readonly string Replace = "replace";
public static readonly string Once = "once";
public static readonly string RunAsService = "runasservice";
public static readonly string Unattended = "unattended";
public static readonly string Version = "version";

View File

@@ -56,10 +56,6 @@ namespace GitHub.Runner.Common
Add<T>(extensions, "GitHub.Runner.Worker.EndGroupCommandExtension, Runner.Worker");
Add<T>(extensions, "GitHub.Runner.Worker.EchoCommandExtension, Runner.Worker");
break;
case "GitHub.Runner.Worker.IFileCommandExtension":
Add<T>(extensions, "GitHub.Runner.Worker.AddPathFileCommand, Runner.Worker");
Add<T>(extensions, "GitHub.Runner.Worker.SetEnvFileCommand, Runner.Worker");
break;
default:
// This should never happen.
throw new NotSupportedException($"Unexpected extension type: '{typeof(T).FullName}'");

View File

@@ -16,7 +16,6 @@ namespace GitHub.Runner.Common
// logging and console
Task<TaskLog> AppendLogContentAsync(Guid scopeIdentifier, string hubName, Guid planId, int logId, Stream uploadStream, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, CancellationToken cancellationToken);
Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken);
Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, String type, String name, Stream uploadStream, CancellationToken cancellationToken);
Task<TaskLog> CreateLogAsync(Guid scopeIdentifier, string hubName, Guid planId, TaskLog log, CancellationToken cancellationToken);
Task<Timeline> CreateTimelineAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, CancellationToken cancellationToken);
@@ -80,12 +79,6 @@ namespace GitHub.Runner.Common
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, cancellationToken: cancellationToken);
}
public Task AppendTimelineRecordFeedAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, Guid stepId, IList<string> lines, long startLine, CancellationToken cancellationToken)
{
CheckConnection();
return _taskClient.AppendTimelineRecordFeedAsync(scopeIdentifier, hubName, planId, timelineId, timelineRecordId, stepId, lines, startLine, cancellationToken: cancellationToken);
}
public Task<TaskAttachment> CreateAttachmentAsync(Guid scopeIdentifier, string hubName, Guid planId, Guid timelineId, Guid timelineRecordId, string type, string name, Stream uploadStream, CancellationToken cancellationToken)
{
CheckConnection();

View File

@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common
event EventHandler<ThrottlingEventArgs> JobServerQueueThrottling;
Task ShutdownAsync();
void Start(Pipelines.AgentJobRequestMessage jobRequest);
void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber = null);
void QueueWebConsoleLine(Guid stepRecordId, string line);
void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource);
void QueueTimelineRecordUpdate(Guid timelineId, TimelineRecord timelineRecord);
}
@@ -155,10 +155,10 @@ namespace GitHub.Runner.Common
Trace.Info("All queue process tasks have been stopped, and all queues are drained.");
}
public void QueueWebConsoleLine(Guid stepRecordId, string line, long? lineNumber)
public void QueueWebConsoleLine(Guid stepRecordId, string line)
{
Trace.Verbose("Enqueue web console line queue: {0}", line);
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line, lineNumber));
_webConsoleLineQueue.Enqueue(new ConsoleLineInfo(stepRecordId, line));
}
public void QueueFileUpload(Guid timelineId, Guid timelineRecordId, string type, string name, string path, bool deleteSource)
@@ -214,7 +214,7 @@ namespace GitHub.Runner.Common
}
// Group consolelines by timeline record of each step
Dictionary<Guid, List<TimelineRecordLogLine>> stepsConsoleLines = new Dictionary<Guid, List<TimelineRecordLogLine>>();
Dictionary<Guid, List<string>> stepsConsoleLines = new Dictionary<Guid, List<string>>();
List<Guid> stepRecordIds = new List<Guid>(); // We need to keep lines in order
int linesCounter = 0;
ConsoleLineInfo lineInfo;
@@ -222,7 +222,7 @@ namespace GitHub.Runner.Common
{
if (!stepsConsoleLines.ContainsKey(lineInfo.StepRecordId))
{
stepsConsoleLines[lineInfo.StepRecordId] = new List<TimelineRecordLogLine>();
stepsConsoleLines[lineInfo.StepRecordId] = new List<string>();
stepRecordIds.Add(lineInfo.StepRecordId);
}
@@ -232,7 +232,7 @@ namespace GitHub.Runner.Common
lineInfo.Line = $"{lineInfo.Line.Substring(0, 1024)}...";
}
stepsConsoleLines[lineInfo.StepRecordId].Add(new TimelineRecordLogLine(lineInfo.Line, lineInfo.LineNumber));
stepsConsoleLines[lineInfo.StepRecordId].Add(lineInfo.Line);
linesCounter++;
// process at most about 500 lines of web console line during regular timer dequeue task.
@@ -247,13 +247,13 @@ namespace GitHub.Runner.Common
{
// Split consolelines into batch, each batch will container at most 100 lines.
int batchCounter = 0;
List<List<TimelineRecordLogLine>> batchedLines = new List<List<TimelineRecordLogLine>>();
List<List<string>> batchedLines = new List<List<string>>();
foreach (var line in stepsConsoleLines[stepRecordId])
{
var currentBatch = batchedLines.ElementAtOrDefault(batchCounter);
if (currentBatch == null)
{
batchedLines.Add(new List<TimelineRecordLogLine>());
batchedLines.Add(new List<string>());
currentBatch = batchedLines.ElementAt(batchCounter);
}
@@ -275,6 +275,7 @@ namespace GitHub.Runner.Common
{
Trace.Info($"Skip {batchedLines.Count - 2} batches web console lines for last run");
batchedLines = batchedLines.TakeLast(2).ToList();
batchedLines[0].Insert(0, "...");
}
int errorCount = 0;
@@ -283,15 +284,7 @@ namespace GitHub.Runner.Common
try
{
// we will not requeue failed batch, since the web console lines are time sensitive.
if (batch[0].LineNumber.HasValue)
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), batch[0].LineNumber.Value, default(CancellationToken));
}
else
{
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch.Select(logLine => logLine.Line).ToList(), default(CancellationToken));
}
await _jobServer.AppendTimelineRecordFeedAsync(_scopeIdentifier, _hubName, _planId, _jobTimelineId, _jobTimelineRecordId, stepRecordId, batch, default(CancellationToken));
if (_firstConsoleOutputs)
{
HostContext.WritePerfCounter($"WorkerJobServerQueueAppendFirstConsoleOutput_{_planId.ToString()}");
@@ -660,15 +653,13 @@ namespace GitHub.Runner.Common
internal class ConsoleLineInfo
{
public ConsoleLineInfo(Guid recordId, string line, long? lineNumber)
public ConsoleLineInfo(Guid recordId, string line)
{
this.StepRecordId = recordId;
this.Line = line;
this.LineNumber = lineNumber;
}
public Guid StepRecordId { get; set; }
public string Line { get; set; }
public long? LineNumber { get; set; }
}
}

View File

@@ -28,10 +28,10 @@ namespace GitHub.Runner.Listener
private readonly string[] validFlags =
{
Constants.Runner.CommandLine.Flags.Commit,
Constants.Runner.CommandLine.Flags.Ephemeral,
Constants.Runner.CommandLine.Flags.Help,
Constants.Runner.CommandLine.Flags.Replace,
Constants.Runner.CommandLine.Flags.RunAsService,
Constants.Runner.CommandLine.Flags.Once,
Constants.Runner.CommandLine.Flags.Unattended,
Constants.Runner.CommandLine.Flags.Version
};
@@ -42,7 +42,8 @@ namespace GitHub.Runner.Listener
Constants.Runner.CommandLine.Args.Labels,
Constants.Runner.CommandLine.Args.MonitorSocketAddress,
Constants.Runner.CommandLine.Args.Name,
Constants.Runner.CommandLine.Args.RunnerGroup,
Constants.Runner.CommandLine.Args.PAT,
Constants.Runner.CommandLine.Args.Pool,
Constants.Runner.CommandLine.Args.StartupType,
Constants.Runner.CommandLine.Args.Token,
Constants.Runner.CommandLine.Args.Url,
@@ -63,8 +64,7 @@ namespace GitHub.Runner.Listener
public bool Help => TestFlag(Constants.Runner.CommandLine.Flags.Help);
public bool Unattended => TestFlag(Constants.Runner.CommandLine.Flags.Unattended);
public bool Version => TestFlag(Constants.Runner.CommandLine.Flags.Version);
public bool RunOnce => TestFlag(Constants.Runner.CommandLine.Flags.Once);
public bool Ephemeral => TestFlag(Constants.Runner.CommandLine.Flags.Ephemeral);
// Constructor.
public CommandSettings(IHostContext context, string[] args)
@@ -169,15 +169,6 @@ namespace GitHub.Runner.Listener
validator: Validators.NonEmptyValidator);
}
public string GetRunnerGroupName(string defaultPoolName = null)
{
return GetArgOrPrompt(
name: Constants.Runner.CommandLine.Args.RunnerGroup,
description: "Enter the name of the runner group to add this runner to:",
defaultValue: defaultPoolName ?? "default",
validator: Validators.NonEmptyValidator);
}
public string GetToken()
{
return GetArgOrPrompt(
@@ -187,6 +178,11 @@ namespace GitHub.Runner.Listener
validator: Validators.NonEmptyValidator);
}
public string GetGitHubPersonalAccessToken()
{
return GetArg(name: Constants.Runner.CommandLine.Args.PAT);
}
public string GetRunnerRegisterToken()
{
return GetArgOrPrompt(

View File

@@ -7,11 +7,13 @@ using GitHub.Services.OAuth;
using GitHub.Services.WebApi;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
namespace GitHub.Runner.Listener.Configuration
@@ -107,8 +109,21 @@ namespace GitHub.Runner.Listener.Configuration
else
{
runnerSettings.GitHubUrl = inputUrl;
var githubToken = command.GetRunnerRegisterToken();
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, githubToken, Constants.RunnerEvent.Register);
var githubPAT = command.GetGitHubPersonalAccessToken();
var registerToken = string.Empty;
if (!string.IsNullOrEmpty(githubPAT))
{
Trace.Info("Retriving runner register token using GitHub PAT.");
var jitToken = await GetJITRunnerTokenAsync(inputUrl, githubPAT, "registration");
Trace.Info($"Retrived runner register token is good to {jitToken.ExpiresAt}.");
HostContext.SecretMasker.AddValue(jitToken.Token);
registerToken = jitToken.Token;
}
if (string.IsNullOrEmpty(registerToken))
{
registerToken = command.GetRunnerRegisterToken();
}
GitHubAuthResult authResult = await GetTenantCredential(inputUrl, registerToken, Constants.RunnerEvent.Register);
runnerSettings.ServerUrl = authResult.TenantUrl;
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
@@ -159,34 +174,17 @@ namespace GitHub.Runner.Listener.Configuration
_term.WriteSection("Runner Registration");
// If we have more than one runner group available, allow the user to specify which one to be added into
string poolName = null;
TaskAgentPool agentPool = null;
//Get all the agent pools, and select the first private pool
List<TaskAgentPool> agentPools = await _runnerServer.GetAgentPoolsAsync();
TaskAgentPool defaultPool = agentPools?.Where(x => x.IsInternal).FirstOrDefault();
TaskAgentPool agentPool = agentPools?.Where(x => x.IsHosted == false).FirstOrDefault();
if (agentPools?.Where(x => !x.IsHosted).Count() > 1)
if (agentPool == null)
{
poolName = command.GetRunnerGroupName(defaultPool?.Name);
_term.WriteLine();
agentPool = agentPools.Where(x => string.Equals(poolName, x.Name, StringComparison.OrdinalIgnoreCase) && !x.IsHosted).FirstOrDefault();
throw new TaskAgentPoolNotFoundException($"Could not find any private pool. Contact support.");
}
else
{
agentPool = defaultPool;
}
if (agentPool == null && poolName == null)
{
throw new TaskAgentPoolNotFoundException($"Could not find any self-hosted runner groups. Contact support.");
}
else if (agentPool == null && poolName != null)
{
throw new TaskAgentPoolNotFoundException($"Could not find any self-hosted runner group named \"{poolName}\".");
}
else
{
Trace.Info("Found a self-hosted runner group with id {1} and name {2}", agentPool.Id, agentPool.Name);
Trace.Info("Found a private pool with id {1} and name {2}", agentPool.Id, agentPool.Name);
runnerSettings.PoolId = agentPool.Id;
runnerSettings.PoolName = agentPool.Name;
}
@@ -194,6 +192,7 @@ namespace GitHub.Runner.Listener.Configuration
TaskAgent agent;
while (true)
{
runnerSettings.Ephemeral = command.Ephemeral;
runnerSettings.AgentName = command.GetRunnerName();
_term.WriteLine();
@@ -210,7 +209,7 @@ namespace GitHub.Runner.Listener.Configuration
if (command.GetReplace())
{
// Update existing agent with new PublicKey, agent version.
agent = UpdateExistingAgent(agent, publicKey, userLabels);
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral);
try
{
@@ -233,7 +232,7 @@ namespace GitHub.Runner.Listener.Configuration
else
{
// Create a new agent.
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels);
agent = CreateNewAgent(runnerSettings.AgentName, publicKey, userLabels, runnerSettings.Ephemeral);
try
{
@@ -373,8 +372,22 @@ namespace GitHub.Runner.Listener.Configuration
}
else
{
var githubToken = command.GetRunnerDeletionToken();
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, githubToken, Constants.RunnerEvent.Remove);
var githubPAT = command.GetGitHubPersonalAccessToken();
var deletionToken = string.Empty;
if (!string.IsNullOrEmpty(githubPAT))
{
Trace.Info("Retriving runner deletion token using GitHub PAT.");
var jitToken = await GetJITRunnerTokenAsync(settings.GitHubUrl, githubPAT, "remove");
Trace.Info($"Retrived runner deletion token is good to {jitToken.ExpiresAt}.");
HostContext.SecretMasker.AddValue(jitToken.Token);
deletionToken = jitToken.Token;
}
if (string.IsNullOrEmpty(deletionToken))
{
deletionToken = command.GetRunnerDeletionToken();
}
GitHubAuthResult authResult = await GetTenantCredential(settings.GitHubUrl, deletionToken, Constants.RunnerEvent.Remove);
creds = authResult.ToVssCredentials();
Trace.Info("cred retrieved via GitHub auth");
}
@@ -457,7 +470,7 @@ namespace GitHub.Runner.Listener.Configuration
}
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels)
private TaskAgent UpdateExistingAgent(TaskAgent agent, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral)
{
ArgUtil.NotNull(agent, nameof(agent));
agent.Authorization = new TaskAgentAuthorization
@@ -468,6 +481,8 @@ namespace GitHub.Runner.Listener.Configuration
// update should replace the existing labels
agent.Version = BuildConstants.RunnerPackage.Version;
agent.OSDescription = RuntimeInformation.OSDescription;
agent.Ephemeral = ephemeral;
agent.MaxParallelism = 1;
agent.Labels.Clear();
@@ -483,7 +498,7 @@ namespace GitHub.Runner.Listener.Configuration
return agent;
}
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels)
private TaskAgent CreateNewAgent(string agentName, RSAParameters publicKey, ISet<string> userLabels, bool ephemeral)
{
TaskAgent agent = new TaskAgent(agentName)
{
@@ -494,6 +509,7 @@ namespace GitHub.Runner.Listener.Configuration
MaxParallelism = 1,
Version = BuildConstants.RunnerPackage.Version,
OSDescription = RuntimeInformation.OSDescription,
Ephemeral = ephemeral,
};
agent.Labels.Add(new AgentLabel("self-hosted", LabelType.System));
@@ -515,6 +531,72 @@ namespace GitHub.Runner.Listener.Configuration
string.Equals(gitHubUrl.Host, "github.localhost", StringComparison.OrdinalIgnoreCase);
}
private async Task<GitHubRunnerRegisterToken> GetJITRunnerTokenAsync(string githubUrl, string githubToken, string tokenType)
{
var githubApiUrl = "";
var gitHubUrlBuilder = new UriBuilder(githubUrl);
var path = gitHubUrlBuilder.Path.Split(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar, StringSplitOptions.RemoveEmptyEntries);
if (path.Length == 1)
{
if (IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runners/{tokenType}-token";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runners/{tokenType}-token";
}
}
if (path.Length == 2)
{
var repoScope = "repos/";
if (string.Equals(path[0], "enterprises", StringComparison.OrdinalIgnoreCase))
{
repoScope = "";
}
if (IsHostedServer(gitHubUrlBuilder))
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{repoScope}{path[0]}/{path[1]}/actions/runners/{tokenType}-token";
}
else
{
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{repoScope}{path[0]}/{path[1]}/actions/runners/{tokenType}-token";
}
}
else
{
throw new ArgumentException($"'{githubUrl}' should point to an org or repository.");
}
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
using (var httpClient = new HttpClient(httpClientHandler))
{
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"github:{githubToken}"));
HostContext.SecretMasker.AddValue(base64EncodingToken);
httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("basic", base64EncodingToken);
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/vnd.github.v3+json");
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
if (response.IsSuccessStatusCode)
{
Trace.Info($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var jsonResponse = await response.Content.ReadAsStringAsync();
return StringUtil.ConvertFromJson<GitHubRunnerRegisterToken>(jsonResponse);
}
else
{
_term.WriteError($"Http response code: {response.StatusCode} from 'POST {githubApiUrl}'");
var errorResponse = await response.Content.ReadAsStringAsync();
_term.WriteError(errorResponse);
response.EnsureSuccessStatusCode();
return null;
}
}
}
private async Task<GitHubAuthResult> GetTenantCredential(string githubUrl, string githubToken, string runnerEvent)
{
var githubApiUrl = "";

View File

@@ -71,6 +71,16 @@ namespace GitHub.Runner.Listener.Configuration
}
}
[DataContract]
public sealed class GitHubRunnerRegisterToken
{
[DataMember(Name = "token")]
public string Token { get; set; }
[DataMember(Name = "expires_at")]
public string ExpiresAt { get; set; }
}
[DataContract]
public sealed class GitHubAuthResult
{

View File

@@ -477,6 +477,53 @@ namespace GitHub.Runner.Listener
var systemConnection = message.Resources.Endpoints.SingleOrDefault(x => string.Equals(x.Name, WellKnownServiceEndpointNames.SystemVssConnection, StringComparison.OrdinalIgnoreCase));
var accessToken = systemConnection?.Authorization?.Parameters["AccessToken"];
notification.JobStarted(message.JobId, accessToken, systemConnection.Url);
var jobStartNotification = Environment.GetEnvironmentVariable("_INTERNAL_RUNNER_LIFECYCLE_NOTIFICATION");
if (!string.IsNullOrEmpty(jobStartNotification))
{
term.WriteLine($"{DateTime.UtcNow:u}: Publish JobStart to {jobStartNotification}");
using (var jobStartInvoker = HostContext.CreateService<IProcessInvoker>())
{
jobStartInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
Trace.Info($"JobStartNotification: {stdout.Data}");
}
};
jobStartInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
if (!string.IsNullOrEmpty(stderr.Data))
{
Trace.Error($"JobStartNotification: {stderr.Data}");
}
}
};
try
{
await jobStartInvoker.ExecuteAsync(
workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: WhichUtil.Which("bash"),
arguments: $"-c \"{jobStartNotification} JOBSTART {DateTime.UtcNow.ToString("O")}\"",
environment: null,
requireExitCodeZero: true,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: new CancellationTokenSource(10000).Token);
}
catch (Exception ex)
{
Trace.Error($"Fail to publish JobStart notification: {ex}");
}
}
}
HostContext.WritePerfCounter($"SentJobToWorker_{requestId.ToString()}");
@@ -613,6 +660,53 @@ namespace GitHub.Runner.Listener
{
// This should be the last thing to run so we don't notify external parties until actually finished
await notification.JobCompleted(message.JobId);
var jobCompleteNotification = Environment.GetEnvironmentVariable("_INTERNAL_RUNNER_LIFECYCLE_NOTIFICATION");
if (!string.IsNullOrEmpty(jobCompleteNotification))
{
term.WriteLine($"{DateTime.UtcNow:u}: Publish JobComplete to {jobCompleteNotification}");
using (var jobCompleteInvoker = HostContext.CreateService<IProcessInvoker>())
{
jobCompleteInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
Trace.Info($"jobCompleteNotification: {stdout.Data}");
}
};
jobCompleteInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
if (!string.IsNullOrEmpty(stderr.Data))
{
Trace.Error($"jobCompleteNotification: {stderr.Data}");
}
}
};
try
{
await jobCompleteInvoker.ExecuteAsync(
workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: WhichUtil.Which("bash"),
arguments: $"-c \"{jobCompleteNotification} JOBCOMPLETE {DateTime.UtcNow.ToString("O")}\"",
environment: null,
requireExitCodeZero: true,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: new CancellationTokenSource(10000).Token);
}
catch (Exception ex)
{
Trace.Error($"Fail to publish JobComplete notification: {ex}");
}
}
}
}
}
}
@@ -645,7 +739,56 @@ namespace GitHub.Runner.Listener
// fire first renew succeed event.
firstJobRequestRenewed.TrySetResult(0);
}
else
{
var jobRunningNotification = Environment.GetEnvironmentVariable("_INTERNAL_RUNNER_LIFECYCLE_NOTIFICATION");
if (!string.IsNullOrEmpty(jobRunningNotification))
{
HostContext.GetService<ITerminal>().WriteLine($"{DateTime.UtcNow:u}: Publish JobRunning to {jobRunningNotification}");
using (var jobRunningInvoker = HostContext.CreateService<IProcessInvoker>())
{
jobRunningInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
Trace.Info($"JobRunningNotification: {stdout.Data}");
}
};
jobRunningInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
if (!string.IsNullOrEmpty(stderr.Data))
{
Trace.Error($"JobRunningNotification: {stderr.Data}");
}
}
};
try
{
await jobRunningInvoker.ExecuteAsync(
workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: WhichUtil.Which("bash"),
arguments: $"-c \"{jobRunningNotification} JOBRUNNING {DateTime.UtcNow.ToString("O")}\"",
environment: null,
requireExitCodeZero: true,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: new CancellationTokenSource(10000).Token);
}
catch (Exception ex)
{
Trace.Error($"Fail to publish JobRunning notification: {ex}");
}
}
}
}
if (encounteringError > 0)
{
encounteringError = 0;

View File

@@ -193,7 +193,7 @@ namespace GitHub.Runner.Listener
HostContext.StartupType = startType;
// Run the runner interactively or as service
return await RunAsync(settings, command.RunOnce);
return await RunAsync(settings, settings.Ephemeral);
}
else
{
@@ -462,20 +462,19 @@ Options:
--commit Prints the runner commit
Config Options:
--unattended Disable interactive prompts for missing arguments. Defaults will be used for missing options
--url string Repository to add the runner to. Required if unattended
--token string Registration token. Required if unattended
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
--runnergroup string Name of the runner group to add this runner to (defaults to the default runner group)
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
--replace Replace any existing runner with the same name (default false)");
--unattended Disable interactive prompts for missing arguments. Defaults will be used for missing options
--url string Repository to add the runner to. Required if unattended
--token string Registration token. Required if unattended
--name string Name of the runner to configure (default {Environment.MachineName ?? "myrunner"})
--labels string Extra labels in addition to the default: 'self-hosted,{Constants.Runner.Platform},{Constants.Runner.PlatformArchitecture}'
--work string Relative runner work directory (default {Constants.Path.WorkDirectory})
--replace Replace any existing runner with the same name (default false)");
#if OS_WINDOWS
_term.WriteLine($@" --runasservice Run the runner as a service");
_term.WriteLine($@" --windowslogonaccount string Account to run the service as. Requires runasservice");
_term.WriteLine($@" --windowslogonpassword string Password for the service account. Requires runasservice");
#endif
_term.WriteLine($@"
_term.WriteLine($@"
Examples:
Configure a runner non-interactively:
.{separator}config.{ext} --unattended --url <url> --token <token>

View File

@@ -59,6 +59,53 @@ namespace GitHub.Runner.Listener
Trace.Info($"An update is available.");
var runnerUpdateNotification = Environment.GetEnvironmentVariable("_INTERNAL_RUNNER_LIFECYCLE_NOTIFICATION");
if (!string.IsNullOrEmpty(runnerUpdateNotification))
{
HostContext.GetService<ITerminal>().WriteLine($"{DateTime.UtcNow:u}: Publish RunnerUpdate to {runnerUpdateNotification}");
using (var runnerUpdateInvoker = HostContext.CreateService<IProcessInvoker>())
{
runnerUpdateInvoker.OutputDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stdout)
{
if (!string.IsNullOrEmpty(stdout.Data))
{
Trace.Info($"RunnerUpdateNotification: {stdout.Data}");
}
};
runnerUpdateInvoker.ErrorDataReceived += delegate (object sender, ProcessDataReceivedEventArgs stderr)
{
if (!string.IsNullOrEmpty(stderr.Data))
{
if (!string.IsNullOrEmpty(stderr.Data))
{
Trace.Error($"RunnerUpdateNotification: {stderr.Data}");
}
}
};
try
{
await runnerUpdateInvoker.ExecuteAsync(
workingDirectory: HostContext.GetDirectory(WellKnownDirectory.Root),
fileName: WhichUtil.Which("bash"),
arguments: $"-c \"{runnerUpdateNotification} RUNNERUPDATE {DateTime.UtcNow.ToString("O")}\"",
environment: null,
requireExitCodeZero: true,
outputEncoding: null,
killProcessOnCancel: true,
redirectStandardIn: null,
inheritConsoleHandler: false,
keepStandardInOpen: false,
highPriorityProcess: true,
cancellationToken: new CancellationTokenSource(10000).Token);
}
catch (Exception ex)
{
Trace.Error($"Fail to publish RunnerUpdate notification: {ex}");
}
}
}
// Print console line that warn user not shutdown runner.
await UpdateRunnerUpdateStateAsync("Runner update in progress, do not shutdown runner.");
await UpdateRunnerUpdateStateAsync($"Downloading {_targetPackage.Version} runner");

View File

@@ -395,7 +395,7 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action cleanup plugin: {plugin.PluginTypeName}.");
}
}
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Composite)
else if (definition.Data.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
var compositeAction = definition.Data.Execution as CompositeActionExecutionData;
Trace.Info($"Load {compositeAction.Steps?.Count ?? 0} action steps.");
@@ -492,7 +492,7 @@ namespace GitHub.Runner.Worker
}
}
}
executionContext.Output("##[endgroup]");
executionContext.Output("##[endgroup");
if (retryCount == 3 && pullExitCode != 0)
{
@@ -1048,7 +1048,7 @@ namespace GitHub.Runner.Worker
Trace.Info($"Action plugin: {(actionDefinitionData.Execution as PluginActionExecutionData).Plugin}, no more preparation.");
return null;
}
else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Composite)
else if (actionDefinitionData.Execution.ExecutionType == ActionExecutionType.Composite && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
Trace.Info($"Action composite: {(actionDefinitionData.Execution as CompositeActionExecutionData).Steps}, no more preparation.");
return null;

View File

@@ -105,7 +105,12 @@ namespace GitHub.Runner.Worker
break;
case "outputs":
actionOutputs = actionPair.Value.AssertMapping("outputs");
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
actionOutputs = actionPair.Value.AssertMapping("outputs");
break;
}
Trace.Info($"Ignore action property outputs. Outputs for a whole action is not supported yet.");
break;
case "description":
@@ -418,10 +423,14 @@ namespace GitHub.Runner.Worker
preIfToken = run.Value.AssertString("pre-if");
break;
case "steps":
var stepsToken = run.Value.AssertSequence("steps");
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
templateContext.Errors.Check();
break;
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
var stepsToken = run.Value.AssertSequence("steps");
steps = PipelineTemplateConverter.ConvertToSteps(templateContext, stepsToken);
templateContext.Errors.Check();
break;
}
throw new Exception("You aren't supposed to be using Composite Actions yet!");
default:
Trace.Info($"Ignore run property {runsKey}.");
break;
@@ -469,7 +478,7 @@ namespace GitHub.Runner.Worker
};
}
}
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase))
else if (string.Equals(usingToken.Value, "composite", StringComparison.OrdinalIgnoreCase) && !string.IsNullOrEmpty(Environment.GetEnvironmentVariable("TESTING_COMPOSITE_ACTIONS_ALPHA")))
{
if (steps == null)
{

View File

@@ -145,10 +145,6 @@ namespace GitHub.Runner.Worker
stepHost = containerStepHost;
}
// Setup File Command Manager
var fileCommandManager = HostContext.CreateService<IFileCommandManager>();
fileCommandManager.InitializeFiles(ExecutionContext, null);
// Load the inputs.
ExecutionContext.Debug("Loading inputs");
var templateEvaluator = ExecutionContext.ToPipelineTemplateEvaluator();
@@ -242,15 +238,7 @@ namespace GitHub.Runner.Worker
handler.PrintActionDetails(Stage);
// Run the task.
try
{
await handler.RunAsync(Stage);
}
finally
{
fileCommandManager.ProcessFiles(ExecutionContext, ExecutionContext.Global.Container);
}
await handler.RunAsync(Stage);
}
public bool TryEvaluateDisplayName(DictionaryContextData contextData, IExecutionContext context)

View File

@@ -34,9 +34,6 @@ namespace GitHub.Runner.Worker.Container
_environmentVariables = container.Environment;
this.IsJobContainer = isJobContainer;
this.ContainerNetworkAlias = networkAlias;
this.RegistryAuthUsername = container.Credentials?.Username;
this.RegistryAuthPassword = container.Credentials?.Password;
this.RegistryServer = DockerUtil.ParseRegistryHostnameFromImageName(this.ContainerImage);
#if OS_WINDOWS
_pathMappings.Add(new PathMapping(hostContext.GetDirectory(WellKnownDirectory.Work), "C:\\__w"));
@@ -82,9 +79,6 @@ namespace GitHub.Runner.Worker.Container
public string ContainerWorkDirectory { get; set; }
public string ContainerCreateOptions { get; private set; }
public string ContainerRuntimePath { get; set; }
public string RegistryServer { get; set; }
public string RegistryAuthUsername { get; set; }
public string RegistryAuthPassword { get; set; }
public bool IsJobContainer { get; set; }
public IDictionary<string, string> ContainerEnvironmentVariables

View File

@@ -4,7 +4,6 @@ using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Channels;
using System.Threading.Tasks;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
@@ -18,7 +17,6 @@ namespace GitHub.Runner.Worker.Container
string DockerInstanceLabel { get; }
Task<DockerVersion> DockerVersion(IExecutionContext context);
Task<int> DockerPull(IExecutionContext context, string image);
Task<int> DockerPull(IExecutionContext context, string image, string configFileDirectory);
Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag);
Task<string> DockerCreate(IExecutionContext context, ContainerInfo container);
Task<int> DockerRun(IExecutionContext context, ContainerInfo container, EventHandler<ProcessDataReceivedEventArgs> stdoutDataReceived, EventHandler<ProcessDataReceivedEventArgs> stderrDataReceived);
@@ -33,7 +31,6 @@ namespace GitHub.Runner.Worker.Container
Task<int> DockerExec(IExecutionContext context, string containerId, string options, string command, List<string> outputs);
Task<List<string>> DockerInspect(IExecutionContext context, string dockerObject, string options);
Task<List<PortMapping>> DockerPort(IExecutionContext context, string containerId);
Task<int> DockerLogin(IExecutionContext context, string configFileDirectory, string registry, string username, string password);
}
public class DockerCommandManager : RunnerService, IDockerCommandManager
@@ -85,18 +82,9 @@ namespace GitHub.Runner.Worker.Container
return new DockerVersion(serverVersion, clientVersion);
}
public Task<int> DockerPull(IExecutionContext context, string image)
public async Task<int> DockerPull(IExecutionContext context, string image)
{
return DockerPull(context, image, null);
}
public async Task<int> DockerPull(IExecutionContext context, string image, string configFileDirectory)
{
if (string.IsNullOrEmpty(configFileDirectory))
{
return await ExecuteDockerCommandAsync(context, $"pull", image, context.CancellationToken);
}
return await ExecuteDockerCommandAsync(context, $"--config {configFileDirectory} pull", image, context.CancellationToken);
return await ExecuteDockerCommandAsync(context, "pull", image, context.CancellationToken);
}
public async Task<int> DockerBuild(IExecutionContext context, string workingDirectory, string dockerFile, string dockerContext, string tag)
@@ -358,28 +346,6 @@ namespace GitHub.Runner.Worker.Container
return DockerUtil.ParseDockerPort(portMappingLines);
}
public Task<int> DockerLogin(IExecutionContext context, string configFileDirectory, string registry, string username, string password)
{
string args = $"--config {configFileDirectory} login {registry} -u {username} --password-stdin";
context.Command($"{DockerPath} {args}");
var input = Channel.CreateBounded<string>(new BoundedChannelOptions(1) { SingleReader = true, SingleWriter = true });
input.Writer.TryWrite(password);
var processInvoker = HostContext.CreateService<IProcessInvoker>();
return processInvoker.ExecuteAsync(
workingDirectory: context.GetGitHubContext("workspace"),
fileName: DockerPath,
arguments: args,
environment: null,
requireExitCodeZero: false,
outputEncoding: null,
killProcessOnCancel: false,
redirectStandardIn: input,
cancellationToken: context.CancellationToken);
}
private Task<int> ExecuteDockerCommandAsync(IExecutionContext context, string command, string options, CancellationToken cancellationToken = default(CancellationToken))
{
return ExecuteDockerCommandAsync(context, command, options, null, cancellationToken);

View File

@@ -45,21 +45,5 @@ namespace GitHub.Runner.Worker.Container
}
return "";
}
public static string ParseRegistryHostnameFromImageName(string name)
{
var nameSplit = name.Split('/');
// Single slash is implictly from Dockerhub, unless first part has .tld or :port
if (nameSplit.Length == 2 && (nameSplit[0].Contains(":") || nameSplit[0].Contains(".")))
{
return nameSplit[0];
}
// All other non Dockerhub registries
else if (nameSplit.Length > 2)
{
return nameSplit[0];
}
return "";
}
}
}

View File

@@ -140,6 +140,11 @@ namespace GitHub.Runner.Worker
executionContext.JobContext.Container["network"] = new StringContextData(containerNetwork);
executionContext.Output("##[endgroup]");
if (Environment.GetEnvironmentVariable("K8S_POD_NAME") != null)
{
IOUtil.CopyDirectory(HostContext.GetDirectory(WellKnownDirectory.Externals), Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals_copy"), CancellationToken.None);
}
foreach (var container in containers)
{
container.ContainerNetwork = containerNetwork;
@@ -198,18 +203,12 @@ namespace GitHub.Runner.Worker
}
}
// TODO: Add at a later date. This currently no local package registry to test with
// UpdateRegistryAuthForGitHubToken(executionContext, container);
// Before pulling, generate client authentication if required
var configLocation = await ContainerRegistryLogin(executionContext, container);
// Pull down docker image with retry up to 3 times
int retryCount = 0;
int pullExitCode = 0;
while (retryCount < 3)
{
pullExitCode = await _dockerManger.DockerPull(executionContext, container.ContainerImage, configLocation);
pullExitCode = await _dockerManger.DockerPull(executionContext, container.ContainerImage);
if (pullExitCode == 0)
{
break;
@@ -226,9 +225,6 @@ namespace GitHub.Runner.Worker
}
}
// Remove credentials after pulling
ContainerRegistryLogout(configLocation);
if (retryCount == 3 && pullExitCode != 0)
{
throw new InvalidOperationException($"Docker pull failed with exit code {pullExitCode}");
@@ -245,7 +241,14 @@ namespace GitHub.Runner.Worker
#if OS_WINDOWS
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Externals), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Externals))));
#else
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Externals), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Externals)), true));
if (Environment.GetEnvironmentVariable("K8S_POD_NAME") != null)
{
container.MountVolumes.Add(new MountVolume(Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals_copy"), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Externals)), true));
}
else
{
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Externals), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Externals)), true));
}
#endif
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Temp), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Temp))));
container.MountVolumes.Add(new MountVolume(HostContext.GetDirectory(WellKnownDirectory.Actions), container.TranslateToContainerPath(HostContext.GetDirectory(WellKnownDirectory.Actions))));
@@ -446,83 +449,5 @@ namespace GitHub.Runner.Worker
throw new InvalidOperationException($"Failed to initialize, {container.ContainerNetworkAlias} service is {serviceHealth}.");
}
}
private async Task<string> ContainerRegistryLogin(IExecutionContext executionContext, ContainerInfo container)
{
if (string.IsNullOrEmpty(container.RegistryAuthUsername) || string.IsNullOrEmpty(container.RegistryAuthPassword))
{
// No valid client config can be generated
return "";
}
var configLocation = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Temp), $".docker_{Guid.NewGuid()}");
try
{
var dirInfo = Directory.CreateDirectory(configLocation);
}
catch (Exception e)
{
throw new InvalidOperationException($"Failed to create directory to store registry client credentials: {e.Message}");
}
var loginExitCode = await _dockerManger.DockerLogin(
executionContext,
configLocation,
container.RegistryServer,
container.RegistryAuthUsername,
container.RegistryAuthPassword);
if (loginExitCode != 0)
{
throw new InvalidOperationException($"Docker login for '{container.RegistryServer}' failed with exit code {loginExitCode}");
}
return configLocation;
}
private void ContainerRegistryLogout(string configLocation)
{
try
{
if (!string.IsNullOrEmpty(configLocation) && Directory.Exists(configLocation))
{
Directory.Delete(configLocation, recursive: true);
}
}
catch (Exception e)
{
throw new InvalidOperationException($"Failed to remove directory containing Docker client credentials: {e.Message}");
}
}
private void UpdateRegistryAuthForGitHubToken(IExecutionContext executionContext, ContainerInfo container)
{
var registryIsTokenCompatible = container.RegistryServer.Equals("docker.pkg.github.com", StringComparison.OrdinalIgnoreCase);
if (!registryIsTokenCompatible)
{
return;
}
var registryMatchesWorkflow = false;
// REGISTRY/OWNER/REPO/IMAGE[:TAG]
var imageParts = container.ContainerImage.Split('/');
if (imageParts.Length != 4)
{
executionContext.Warning($"Could not identify owner and repo for container image {container.ContainerImage}. Skipping automatic token auth");
return;
}
var owner = imageParts[1];
var repo = imageParts[2];
var nwo = $"{owner}/{repo}";
if (nwo.Equals(executionContext.GetGitHubContext("repository"), StringComparison.OrdinalIgnoreCase))
{
registryMatchesWorkflow = true;
}
var registryCredentialsNotSupplied = string.IsNullOrEmpty(container.RegistryAuthUsername) && string.IsNullOrEmpty(container.RegistryAuthPassword);
if (registryCredentialsNotSupplied && registryMatchesWorkflow)
{
container.RegistryAuthUsername = executionContext.GetGitHubContext("actor");
container.RegistryAuthPassword = executionContext.GetGitHubContext("token");
}
}
}
}

View File

@@ -53,21 +53,19 @@ namespace GitHub.Runner.Worker
JobContext JobContext { get; }
// Only job level ExecutionContext has JobSteps
Queue<IStep> JobSteps { get; }
List<IStep> JobSteps { get; }
// Only job level ExecutionContext has PostJobSteps
Stack<IStep> PostJobSteps { get; }
bool EchoOnActionCommand { get; set; }
bool InsideComposite { get; }
ExecutionContext Root { get; }
// Initialize
void InitializeJob(Pipelines.AgentJobRequestMessage message, CancellationToken token);
void CancelToken();
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool insideComposite = false, CancellationTokenSource cancellationTokenSource = null);
IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, CancellationTokenSource cancellationTokenSource = null);
// logging
long Write(string tag, string message);
@@ -144,7 +142,7 @@ namespace GitHub.Runner.Worker
public GlobalContext Global { get; private set; }
// Only job level ExecutionContext has JobSteps
public Queue<IStep> JobSteps { get; private set; }
public List<IStep> JobSteps { get; private set; }
// Only job level ExecutionContext has PostJobSteps
public Stack<IStep> PostJobSteps { get; private set; }
@@ -154,8 +152,6 @@ namespace GitHub.Runner.Worker
public bool EchoOnActionCommand { get; set; }
public bool InsideComposite { get; private set; }
public TaskResult? Result
{
get
@@ -260,7 +256,7 @@ namespace GitHub.Runner.Worker
DictionaryContextData inputsData,
Dictionary<string, string> envData)
{
step.ExecutionContext = Root.CreateChild(_record.Id, _record.Name, _record.Id.ToString("N"), scopeName, step.Action.ContextName, logger: _logger, insideComposite: true, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token));
step.ExecutionContext = Root.CreateChild(_record.Id, step.DisplayName, _record.Id.ToString("N"), scopeName, step.Action.ContextName, logger: _logger, cancellationTokenSource: CancellationTokenSource.CreateLinkedTokenSource(_cancellationTokenSource.Token));
step.ExecutionContext.ExpressionValues["inputs"] = inputsData;
step.ExecutionContext.ExpressionValues["steps"] = Global.StepsContext.GetScope(step.ExecutionContext.GetFullyQualifiedContextName());
@@ -279,7 +275,7 @@ namespace GitHub.Runner.Worker
return step;
}
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, bool insideComposite = false, CancellationTokenSource cancellationTokenSource = null)
public IExecutionContext CreateChild(Guid recordId, string displayName, string refName, string scopeName, string contextName, Dictionary<string, string> intraActionState = null, int? recordOrder = null, IPagingLogger logger = null, CancellationTokenSource cancellationTokenSource = null)
{
Trace.Entering();
@@ -326,8 +322,6 @@ namespace GitHub.Runner.Worker
child._logger.Setup(_mainTimelineId, recordId);
}
child.InsideComposite = insideComposite;
return child;
}
@@ -384,8 +378,8 @@ namespace GitHub.Runner.Worker
_logger.End();
// Skip if generated context name. Generated context names start with "__". After M271-ish the server will never send an empty context name.
if (!string.IsNullOrEmpty(ContextName) && !ContextName.StartsWith("__", StringComparison.Ordinal))
// todo: Skip if generated context name. After M271-ish the server will never send an empty context name. Generated context names will start with "__"
if (!string.IsNullOrEmpty(ContextName))
{
Global.StepsContext.SetOutcome(ScopeName, ContextName, (Outcome ?? Result ?? TaskResult.Succeeded).ToActionResult());
Global.StepsContext.SetConclusion(ScopeName, ContextName, (Result ?? TaskResult.Succeeded).ToActionResult());
@@ -447,8 +441,8 @@ namespace GitHub.Runner.Worker
{
ArgUtil.NotNullOrEmpty(name, nameof(name));
// Skip if generated context name. Generated context names start with "__". After M271-ish the server will never send an empty context name.
if (string.IsNullOrEmpty(ContextName) || ContextName.StartsWith("__", StringComparison.Ordinal))
// todo: Skip if generated context name. After M271-ish the server will never send an empty context name. Generated context names will start with "__"
if (string.IsNullOrEmpty(ContextName))
{
reference = null;
return;
@@ -663,7 +657,7 @@ namespace GitHub.Runner.Worker
Global.PrependPath = new List<string>();
// JobSteps for job ExecutionContext
JobSteps = new Queue<IStep>();
JobSteps = new List<IStep>();
// PostJobSteps for job ExecutionContext
PostJobSteps = new Stack<IStep>();
@@ -717,8 +711,7 @@ namespace GitHub.Runner.Worker
}
}
_jobServerQueue.QueueWebConsoleLine(_record.Id, msg, totalLines);
_jobServerQueue.QueueWebConsoleLine(_record.Id, msg);
return totalLines;
}

View File

@@ -1,262 +0,0 @@
using GitHub.DistributedTask.WebApi;
using GitHub.Runner.Worker.Container;
using GitHub.Runner.Common;
using GitHub.Runner.Sdk;
using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace GitHub.Runner.Worker
{
[ServiceLocator(Default = typeof(FileCommandManager))]
public interface IFileCommandManager : IRunnerService
{
void InitializeFiles(IExecutionContext context, ContainerInfo container);
void ProcessFiles(IExecutionContext context, ContainerInfo container);
}
public sealed class FileCommandManager : RunnerService, IFileCommandManager
{
private const string _folderName = "_runner_file_commands";
private List<IFileCommandExtension> _commandExtensions;
private string _fileSuffix = String.Empty;
private string _fileCommandDirectory;
private Tracing _trace;
public override void Initialize(IHostContext hostContext)
{
base.Initialize(hostContext);
_trace = HostContext.GetTrace(nameof(FileCommandManager));
_fileCommandDirectory = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Temp), _folderName);
if (!Directory.Exists(_fileCommandDirectory))
{
Directory.CreateDirectory(_fileCommandDirectory);
}
var extensionManager = hostContext.GetService<IExtensionManager>();
_commandExtensions = extensionManager.GetExtensions<IFileCommandExtension>() ?? new List<IFileCommandExtension>();
}
public void InitializeFiles(IExecutionContext context, ContainerInfo container)
{
var oldSuffix = _fileSuffix;
_fileSuffix = Guid.NewGuid().ToString();
foreach (var fileCommand in _commandExtensions)
{
var oldPath = Path.Combine(_fileCommandDirectory, fileCommand.FilePrefix + oldSuffix);
if (oldSuffix != String.Empty && File.Exists(oldPath))
{
TryDeleteFile(oldPath);
}
var newPath = Path.Combine(_fileCommandDirectory, fileCommand.FilePrefix + _fileSuffix);
TryDeleteFile(newPath);
File.Create(newPath).Dispose();
var pathToSet = container != null ? container.TranslateToContainerPath(newPath) : newPath;
context.SetGitHubContext(fileCommand.ContextName, pathToSet);
}
}
public void ProcessFiles(IExecutionContext context, ContainerInfo container)
{
foreach (var fileCommand in _commandExtensions)
{
try
{
fileCommand.ProcessCommand(context, Path.Combine(_fileCommandDirectory, fileCommand.FilePrefix + _fileSuffix),container);
}
catch (Exception ex)
{
context.Error($"Unable to process file command '{fileCommand.ContextName}' successfully.");
context.Error(ex);
context.CommandResult = TaskResult.Failed;
}
}
}
private bool TryDeleteFile(string path)
{
if (!File.Exists(path))
{
return true;
}
try
{
File.Delete(path);
}
catch (Exception e)
{
_trace.Warning($"Unable to delete file {path} for reason: {e.ToString()}");
return false;
}
return true;
}
}
public interface IFileCommandExtension : IExtension
{
string ContextName { get; }
string FilePrefix { get; }
void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container);
}
public sealed class AddPathFileCommand : RunnerService, IFileCommandExtension
{
public string ContextName => "path";
public string FilePrefix => "add_path_";
public Type ExtensionType => typeof(IFileCommandExtension);
public void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container)
{
if (File.Exists(filePath))
{
var lines = File.ReadAllLines(filePath, Encoding.UTF8);
foreach(var line in lines)
{
if (line == string.Empty)
{
continue;
}
context.Global.PrependPath.RemoveAll(x => string.Equals(x, line, StringComparison.CurrentCulture));
context.Global.PrependPath.Add(line);
}
}
}
}
public sealed class SetEnvFileCommand : RunnerService, IFileCommandExtension
{
public string ContextName => "env";
public string FilePrefix => "set_env_";
public Type ExtensionType => typeof(IFileCommandExtension);
public void ProcessCommand(IExecutionContext context, string filePath, ContainerInfo container)
{
try
{
var text = File.ReadAllText(filePath) ?? string.Empty;
var index = 0;
var line = ReadLine(text, ref index);
while (line != null)
{
if (!string.IsNullOrEmpty(line))
{
var equalsIndex = line.IndexOf("=", StringComparison.Ordinal);
var heredocIndex = line.IndexOf("<<", StringComparison.Ordinal);
// Normal style NAME=VALUE
if (equalsIndex >= 0 && (heredocIndex < 0 || equalsIndex < heredocIndex))
{
var split = line.Split(new[] { '=' }, 2, StringSplitOptions.None);
if (string.IsNullOrEmpty(line))
{
throw new Exception($"Invalid environment variable format '{line}'. Environment variable name must not be empty");
}
SetEnvironmentVariable(context, split[0], split[1]);
}
// Heredoc style NAME<<EOF
else if (heredocIndex >= 0 && (equalsIndex < 0 || heredocIndex < equalsIndex))
{
var split = line.Split(new[] { "<<" }, 2, StringSplitOptions.None);
if (string.IsNullOrEmpty(split[0]) || string.IsNullOrEmpty(split[1]))
{
throw new Exception($"Invalid environment variable format '{line}'. Environment variable name must not be empty and delimiter must not be empty");
}
var name = split[0];
var delimiter = split[1];
var startIndex = index; // Start index of the value (inclusive)
var endIndex = index; // End index of the value (exclusive)
var tempLine = ReadLine(text, ref index, out var newline);
while (!string.Equals(tempLine, delimiter, StringComparison.Ordinal))
{
if (tempLine == null)
{
throw new Exception($"Invalid environment variable value. Matching delimiter not found '{delimiter}'");
}
endIndex = index - newline.Length;
tempLine = ReadLine(text, ref index, out newline);
}
var value = endIndex > startIndex ? text.Substring(startIndex, endIndex - startIndex) : string.Empty;
SetEnvironmentVariable(context, name, value);
}
else
{
throw new Exception($"Invalid environment variable format '{line}'");
}
}
line = ReadLine(text, ref index);
}
}
catch (DirectoryNotFoundException)
{
context.Debug($"Environment variables file does not exist '{filePath}'");
}
catch (FileNotFoundException)
{
context.Debug($"Environment variables file does not exist '{filePath}'");
}
}
private static void SetEnvironmentVariable(
IExecutionContext context,
string name,
string value)
{
context.Global.EnvironmentVariables[name] = value;
context.SetEnvContext(name, value);
context.Debug($"{name}='{value}'");
}
private static string ReadLine(
string text,
ref int index)
{
return ReadLine(text, ref index, out _);
}
private static string ReadLine(
string text,
ref int index,
out string newline)
{
if (index >= text.Length)
{
newline = null;
return null;
}
var originalIndex = index;
var lfIndex = text.IndexOf("\n", index, StringComparison.Ordinal);
if (lfIndex < 0)
{
index = text.Length;
newline = null;
return text.Substring(originalIndex);
}
#if OS_WINDOWS
var crLFIndex = text.IndexOf("\r\n", index, StringComparison.Ordinal);
if (crLFIndex >= 0 && crLFIndex < lfIndex)
{
index = crLFIndex + 2; // Skip over CRLF
newline = "\r\n";
return text.Substring(originalIndex, crLFIndex - originalIndex);
}
#endif
index = lfIndex + 1; // Skip over LF
newline = "\n";
return text.Substring(originalIndex, lfIndex - originalIndex);
}
}
}

View File

@@ -6,20 +6,17 @@ namespace GitHub.Runner.Worker
{
public sealed class GitHubContext : DictionaryContextData, IEnvironmentContextData
{
private readonly HashSet<string> _contextEnvAllowlist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
private readonly HashSet<string> _contextEnvWhitelist = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"action",
"action_path",
"actor",
"api_url",
"base_ref",
"env",
"event_name",
"event_path",
"graphql_url",
"head_ref",
"job",
"path",
"ref",
"repository",
"repository_owner",
@@ -35,23 +32,11 @@ namespace GitHub.Runner.Worker
{
foreach (var data in this)
{
if (_contextEnvAllowlist.Contains(data.Key) && data.Value is StringContextData value)
if (_contextEnvWhitelist.Contains(data.Key) && data.Value is StringContextData value)
{
yield return new KeyValuePair<string, string>($"GITHUB_{data.Key.ToUpperInvariant()}", value);
}
}
}
public GitHubContext ShallowCopy()
{
var copy = new GitHubContext();
foreach (var pair in this)
{
copy[pair.Key] = pair.Value;
}
return copy;
}
}
}

View File

@@ -32,6 +32,9 @@ namespace GitHub.Runner.Worker.Handlers
ArgUtil.NotNull(Inputs, nameof(Inputs));
ArgUtil.NotNull(Data.Steps, nameof(Data.Steps));
var githubContext = ExecutionContext.ExpressionValues["github"] as GitHubContext;
ArgUtil.NotNull(githubContext, nameof(githubContext));
// Resolve action steps
var actionSteps = Data.Steps;
@@ -61,16 +64,6 @@ namespace GitHub.Runner.Worker.Handlers
actionRunner.Condition = actionStep.Condition;
var step = ExecutionContext.CreateCompositeStep(childScopeName, actionRunner, inputsData, Environment);
// Shallow copy github context
var gitHubContext = step.ExecutionContext.ExpressionValues["github"] as GitHubContext;
ArgUtil.NotNull(gitHubContext, nameof(gitHubContext));
gitHubContext = gitHubContext.ShallowCopy();
step.ExecutionContext.ExpressionValues["github"] = gitHubContext;
// Set GITHUB_ACTION_PATH
step.ExecutionContext.SetGitHubContext("action_path", ActionDirectory);
compositeSteps.Add(step);
}
@@ -84,8 +77,6 @@ namespace GitHub.Runner.Worker.Handlers
ExecutionContext.ExpressionValues["steps"] = ExecutionContext.Global.StepsContext.GetScope(ExecutionContext.GetFullyQualifiedContextName());
ProcessCompositeActionOutputs();
ExecutionContext.Global.StepsContext.ClearScope(childScopeName);
}
catch (Exception ex)
{
@@ -131,19 +122,12 @@ namespace GitHub.Runner.Worker.Handlers
var outputsName = pair.Key;
var outputsAttributes = pair.Value as DictionaryContextData;
outputsAttributes.TryGetValue("value", out var val);
var outputsValue = val as StringContextData;
if (val != null)
// Set output in the whole composite scope.
if (!String.IsNullOrEmpty(outputsName) && !String.IsNullOrEmpty(outputsValue))
{
var outputsValue = val as StringContextData;
// Set output in the whole composite scope.
if (!String.IsNullOrEmpty(outputsValue))
{
ExecutionContext.SetOutput(outputsName, outputsValue, out _);
}
else
{
ExecutionContext.SetOutput(outputsName, "", out _);
}
ExecutionContext.SetOutput(outputsName, outputsValue, out _);
}
}
}
@@ -192,6 +176,9 @@ namespace GitHub.Runner.Worker.Handlers
var actionStep = step as IActionRunner;
// Set GITHUB_ACTION
step.ExecutionContext.SetGitHubContext("action", step.ExecutionContext.GetFullyQualifiedContextName());
try
{
// Evaluate and merge action's env block to env context
@@ -228,6 +215,12 @@ namespace GitHub.Runner.Worker.Handlers
private async Task RunStepAsync(IStep step)
{
// Try to evaluate the display name
if (step is IActionRunner actionRunner && actionRunner.Stage == ActionRunStage.Main)
{
actionRunner.TryEvaluateDisplayName(step.ExecutionContext.ExpressionValues, step.ExecutionContext);
}
// Start the step.
Trace.Info("Starting the step.");
step.ExecutionContext.Debug($"Starting: {step.DisplayName}");

View File

@@ -161,21 +161,16 @@ namespace GitHub.Runner.Worker.Handlers
Directory.CreateDirectory(tempHomeDirectory);
this.Environment["HOME"] = tempHomeDirectory;
var tempFileCommandDirectory = Path.Combine(tempDirectory, "_runner_file_commands");
ArgUtil.Directory(tempFileCommandDirectory, nameof(tempFileCommandDirectory));
var tempWorkflowDirectory = Path.Combine(tempDirectory, "_github_workflow");
ArgUtil.Directory(tempWorkflowDirectory, nameof(tempWorkflowDirectory));
container.MountVolumes.Add(new MountVolume("/var/run/docker.sock", "/var/run/docker.sock"));
container.MountVolumes.Add(new MountVolume(tempHomeDirectory, "/github/home"));
container.MountVolumes.Add(new MountVolume(tempWorkflowDirectory, "/github/workflow"));
container.MountVolumes.Add(new MountVolume(tempFileCommandDirectory, "/github/file_commands"));
container.MountVolumes.Add(new MountVolume(defaultWorkingDirectory, "/github/workspace"));
container.AddPathTranslateMapping(tempHomeDirectory, "/github/home");
container.AddPathTranslateMapping(tempWorkflowDirectory, "/github/workflow");
container.AddPathTranslateMapping(tempFileCommandDirectory, "/github/file_commands");
container.AddPathTranslateMapping(defaultWorkingDirectory, "/github/workspace");
container.ContainerWorkDirectory = "/github/workspace";

View File

@@ -82,6 +82,10 @@ namespace GitHub.Runner.Worker.Handlers
var nodeRuntimeVersion = await StepHost.DetermineNodeRuntimeVersion(ExecutionContext);
string file = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Externals), nodeRuntimeVersion, "bin", $"node{IOUtil.ExeExtension}");
if (System.Environment.GetEnvironmentVariable("K8S_POD_NAME") != null)
{
file = Path.Combine(Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Work), "__externals_copy"), nodeRuntimeVersion, "bin", $"node{IOUtil.ExeExtension}");
}
// Format the arguments passed to node.
// 1) Wrap the script file path in double quotes.

View File

@@ -23,19 +23,6 @@ namespace GitHub.Runner.Worker.Handlers
public override void PrintActionDetails(ActionRunStage stage)
{
// We don't want to display the internal workings if composite (similar/equivalent information can be found in debug)
void writeDetails(string message)
{
if (ExecutionContext.InsideComposite)
{
ExecutionContext.Debug(message);
}
else
{
ExecutionContext.Output(message);
}
}
if (stage == ActionRunStage.Post)
{
throw new NotSupportedException("Script action should not have 'Post' job action.");
@@ -52,7 +39,7 @@ namespace GitHub.Runner.Worker.Handlers
firstLine = firstLine.Substring(0, firstNewLine);
}
writeDetails(ExecutionContext.InsideComposite ? $"Run {firstLine}" : $"##[group]Run {firstLine}");
ExecutionContext.Output($"##[group]Run {firstLine}");
}
else
{
@@ -63,7 +50,7 @@ namespace GitHub.Runner.Worker.Handlers
foreach (var line in multiLines)
{
// Bright Cyan color
writeDetails($"\x1b[36;1m{line}\x1b[0m");
ExecutionContext.Output($"\x1b[36;1m{line}\x1b[0m");
}
string argFormat;
@@ -122,23 +109,23 @@ namespace GitHub.Runner.Worker.Handlers
if (!string.IsNullOrEmpty(shellCommandPath))
{
writeDetails($"shell: {shellCommandPath} {argFormat}");
ExecutionContext.Output($"shell: {shellCommandPath} {argFormat}");
}
else
{
writeDetails($"shell: {shellCommand} {argFormat}");
ExecutionContext.Output($"shell: {shellCommand} {argFormat}");
}
if (this.Environment?.Count > 0)
{
writeDetails("env:");
ExecutionContext.Output("env:");
foreach (var env in this.Environment)
{
writeDetails($" {env.Key}: {env.Value}");
ExecutionContext.Output($" {env.Key}: {env.Value}");
}
}
writeDetails(ExecutionContext.InsideComposite ? "" : "##[endgroup]");
ExecutionContext.Output("##[endgroup]");
}
public async Task RunAsync(ActionRunStage stage)
@@ -164,6 +151,8 @@ namespace GitHub.Runner.Worker.Handlers
string workingDirectory = null;
if (!Inputs.TryGetValue("workingDirectory", out workingDirectory))
{
// TODO: figure out how defaults interact with template later
// for now, we won't check job.defaults if we are inside a template.
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.Global.JobDefaults.TryGetValue("run", out var runDefaults))
{
if (runDefaults.TryGetValue("working-directory", out workingDirectory))
@@ -178,6 +167,8 @@ namespace GitHub.Runner.Worker.Handlers
string shell = null;
if (!Inputs.TryGetValue("shell", out shell) || string.IsNullOrEmpty(shell))
{
// TODO: figure out how defaults interact with template later
// for now, we won't check job.defaults if we are inside a template.
if (string.IsNullOrEmpty(ExecutionContext.ScopeName) && ExecutionContext.Global.JobDefaults.TryGetValue("run", out var runDefaults))
{
if (runDefaults.TryGetValue("shell", out shell))

View File

@@ -152,7 +152,7 @@ namespace GitHub.Runner.Worker
{
foreach (var step in jobSteps)
{
jobContext.JobSteps.Enqueue(step);
jobContext.JobSteps.Add(step);
}
await stepsRunner.RunAsync(jobContext);

View File

@@ -15,14 +15,6 @@ namespace GitHub.Runner.Worker
private static readonly Regex _propertyRegex = new Regex("^[a-zA-Z_][a-zA-Z0-9_]*$", RegexOptions.Compiled);
private readonly DictionaryContextData _contextData = new DictionaryContextData();
public void ClearScope(string scopeName)
{
if (_contextData.TryGetValue(scopeName, out _))
{
_contextData[scopeName] = new DictionaryContextData();
}
}
public DictionaryContextData GetScope(string scopeName)
{
if (scopeName == null)

View File

@@ -59,13 +59,14 @@ namespace GitHub.Runner.Worker
checkPostJobActions = true;
while (jobContext.PostJobSteps.TryPop(out var postStep))
{
jobContext.JobSteps.Enqueue(postStep);
jobContext.JobSteps.Add(postStep);
}
continue;
}
var step = jobContext.JobSteps.Dequeue();
var step = jobContext.JobSteps[0];
jobContext.JobSteps.RemoveAt(0);
Trace.Info($"Processing step: DisplayName='{step.DisplayName}'");
ArgUtil.NotNull(step.ExecutionContext, nameof(step.ExecutionContext));
@@ -104,7 +105,16 @@ namespace GitHub.Runner.Worker
if (step is IActionRunner actionStep)
{
// Set GITHUB_ACTION
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
// Warning: Do not turn on FF DistributedTask.UseContextNameForGITHUBACTION until after M271-ish. After M271-ish
// the server will never send an empty context name. Generated context names start with "__"
if (step.ExecutionContext.Global.Variables.GetBoolean("DistributedTask.UseContextNameForGITHUBACTION") ?? false)
{
step.ExecutionContext.SetGitHubContext("action", actionStep.Action.Name);
}
else
{
step.ExecutionContext.SetGitHubContext("action", step.ExecutionContext.GetFullyQualifiedContextName());
}
try
{

View File

@@ -108,26 +108,19 @@
}
},
"composite-steps": {
"context": [
"github",
"strategy",
"matrix",
"steps",
"inputs",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"sequence": {
"item-type": "composite-step"
}
},
"composite-step": {
"mapping": {
"properties": {
"name": "string-steps-context",
"id": "non-empty-string",
"run": {
"type": "string-steps-context",
"required": true
},
"env": "step-env",
"working-directory": "string-steps-context",
"shell": {
"type": "non-empty-string",
"required": true
}
}
"item-type": "any"
}
},
"container-runs-context": {
@@ -164,37 +157,6 @@
"string": {
"require-non-empty": true
}
},
"string-steps-context": {
"context": [
"github",
"inputs",
"strategy",
"matrix",
"steps",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"string": {}
},
"step-env": {
"context": [
"github",
"inputs",
"strategy",
"matrix",
"steps",
"job",
"runner",
"env",
"hashFiles(1,255)"
],
"mapping": {
"loose-key-type": "non-empty-string",
"loose-value-type": "string"
}
}
}
}
}

View File

@@ -56,36 +56,5 @@ namespace GitHub.DistributedTask.Pipelines
get;
set;
}
/// <summary>
/// Gets or sets the credentials used for pulling the container iamge.
/// </summary>
public ContainerRegistryCredentials Credentials
{
get;
set;
}
}
[EditorBrowsable(EditorBrowsableState.Never)]
public sealed class ContainerRegistryCredentials
{
/// <summary>
/// Gets or sets the user to authenticate to a registry with
/// </summary>
public String Username
{
get;
set;
}
/// <summary>
/// Gets or sets the password to authenticate to a registry with
/// </summary>
public String Password
{
get;
set;
}
}
}

View File

@@ -14,7 +14,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String Clean= "clean";
public const String Container = "container";
public const String ContinueOnError = "continue-on-error";
public const String Credentials = "credentials";
public const String Defaults = "defaults";
public const String Env = "env";
public const String Event = "event";
@@ -46,7 +45,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String Options = "options";
public const String Outputs = "outputs";
public const String OutputsPattern = "needs.*.outputs";
public const String Password = "password";
public const String Path = "path";
public const String Pool = "pool";
public const String Ports = "ports";
@@ -70,7 +68,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
public const String Success = "success";
public const String Template = "template";
public const String TimeoutMinutes = "timeout-minutes";
public const String Username = "username";
public const String Uses = "uses";
public const String VmImage = "vmImage";
public const String Volumes = "volumes";

View File

@@ -209,30 +209,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
return (Int32)numberToken.Value;
}
internal static ContainerRegistryCredentials ConvertToContainerCredentials(TemplateToken token)
{
var credentials = token.AssertMapping(PipelineTemplateConstants.Credentials);
var result = new ContainerRegistryCredentials();
foreach (var credentialProperty in credentials)
{
var propertyName = credentialProperty.Key.AssertString($"{PipelineTemplateConstants.Credentials} key");
switch (propertyName.Value)
{
case PipelineTemplateConstants.Username:
result.Username = credentialProperty.Value.AssertString(PipelineTemplateConstants.Username).Value;
break;
case PipelineTemplateConstants.Password:
result.Password = credentialProperty.Value.AssertString(PipelineTemplateConstants.Password).Value;
break;
default:
propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Credentials} key {propertyName}");
break;
}
}
return result;
}
internal static JobContainer ConvertToJobContainer(
TemplateContext context,
TemplateToken value,
@@ -299,9 +275,6 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
}
result.Volumes = volumeList;
break;
case PipelineTemplateConstants.Credentials:
result.Credentials = ConvertToContainerCredentials(containerPropertyPair.Value);
break;
default:
propertyName.AssertUnexpectedValue($"{PipelineTemplateConstants.Container} key");
break;

View File

@@ -373,8 +373,7 @@
"options": "non-empty-string",
"env": "container-env",
"ports": "sequence-of-non-empty-string",
"volumes": "sequence-of-non-empty-string",
"credentials": "container-registry-credentials"
"volumes": "sequence-of-non-empty-string"
}
}
},
@@ -405,20 +404,6 @@
]
},
"container-registry-credentials": {
"context": [
"secrets",
"env",
"github"
],
"mapping": {
"properties": {
"username": "non-empty-string",
"password": "non-empty-string"
}
}
},
"container-env": {
"mapping": {
"loose-key-type": "non-empty-string",

View File

@@ -29,7 +29,6 @@ namespace GitHub.DistributedTask.WebApi
this.PoolType = referenceToBeCloned.PoolType;
this.Size = referenceToBeCloned.Size;
this.IsLegacy = referenceToBeCloned.IsLegacy;
this.IsInternal = referenceToBeCloned.IsInternal;
}
public TaskAgentPoolReference Clone()
@@ -68,16 +67,6 @@ namespace GitHub.DistributedTask.WebApi
set;
}
/// <summary>
/// Gets or sets a value indicating whether or not this pool is internal and can't be modified by users
/// </summary>
[DataMember]
public bool IsInternal
{
get;
set;
}
/// <summary>
/// Gets or sets the type of the pool
/// </summary>

View File

@@ -24,6 +24,7 @@ namespace GitHub.DistributedTask.WebApi
this.OSDescription = referenceToBeCloned.OSDescription;
this.ProvisioningState = referenceToBeCloned.ProvisioningState;
this.AccessPoint = referenceToBeCloned.AccessPoint;
this.Ephemeral = referenceToBeCloned.Ephemeral;
if (referenceToBeCloned.m_links != null)
{
@@ -81,6 +82,16 @@ namespace GitHub.DistributedTask.WebApi
set;
}
/// <summary>
/// Signifies that this Agent can only run one job and will be removed by the server after that one job finish.
/// </summary>
[DataMember]
public bool? Ephemeral
{
get;
set;
}
/// <summary>
/// Whether or not the agent is online.
/// </summary>

View File

@@ -50,7 +50,7 @@ namespace GitHub.DistributedTask.WebApi
: base(baseUrl, pipeline, disposeHandler)
{
}
public Task AppendTimelineRecordFeedAsync(
Guid scopeIdentifier,
String planType,
@@ -91,28 +91,6 @@ namespace GitHub.DistributedTask.WebApi
userState,
cancellationToken);
}
public Task AppendTimelineRecordFeedAsync(
Guid scopeIdentifier,
String planType,
Guid planId,
Guid timelineId,
Guid recordId,
Guid stepId,
IList<String> lines,
long startLine,
CancellationToken cancellationToken = default(CancellationToken),
Object userState = null)
{
return AppendTimelineRecordFeedAsync(scopeIdentifier,
planType,
planId,
timelineId,
recordId,
new TimelineRecordFeedLinesWrapper(stepId, lines, startLine),
userState,
cancellationToken);
}
public async Task RaisePlanEventAsync<T>(
Guid scopeIdentifier,

View File

@@ -20,12 +20,6 @@ namespace GitHub.DistributedTask.WebApi
this.Count = lines.Count;
}
public TimelineRecordFeedLinesWrapper(Guid stepId, IList<string> lines, Int64 startLine)
: this(stepId, lines)
{
this.StartLine = startLine;
}
[DataMember(Order = 0)]
public Int32 Count { get; private set; }
@@ -37,8 +31,5 @@ namespace GitHub.DistributedTask.WebApi
[DataMember(EmitDefaultValue = false)]
public Guid StepId { get; set; }
[DataMember (EmitDefaultValue = false)]
public Int64? StartLine { get; private set; }
}
}

View File

@@ -1,29 +0,0 @@
using System;
using System.Runtime.Serialization;
namespace GitHub.DistributedTask.WebApi
{
[DataContract]
public sealed class TimelineRecordLogLine
{
public TimelineRecordLogLine(String line, long? lineNumber)
{
this.Line = line;
this.LineNumber = lineNumber;
}
[DataMember]
public String Line
{
get;
set;
}
[DataMember (EmitDefaultValue = false)]
public long? LineNumber
{
get;
set;
}
}
}

View File

@@ -126,23 +126,5 @@ namespace GitHub.Runner.Common.Tests.Worker.Container
Assert.NotNull(result5);
Assert.Equal("/foo/bar:/baz", result5);
}
[Theory]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
[InlineData("dockerhub/repo", "")]
[InlineData("localhost/doesnt_work", "")]
[InlineData("localhost:port/works", "localhost:port")]
[InlineData("host.tld/works", "host.tld")]
[InlineData("ghcr.io/owner/image", "ghcr.io")]
[InlineData("gcr.io/project/image", "gcr.io")]
[InlineData("myregistry.azurecr.io/namespace/image", "myregistry.azurecr.io")]
[InlineData("account.dkr.ecr.region.amazonaws.com/image", "account.dkr.ecr.region.amazonaws.com")]
[InlineData("docker.pkg.github.com/owner/repo/image", "docker.pkg.github.com")]
public void ParseRegistryHostnameFromImageName(string input, string expected)
{
var actual = DockerUtil.ParseRegistryHostnameFromImageName(input);
Assert.Equal(expected, actual);
}
}
}

View File

@@ -1,58 +1,58 @@
using Xunit;
using System.IO;
using System.Net.Http;
using System.Threading.Tasks;
// using Xunit;
// using System.IO;
// using System.Net.Http;
// using System.Threading.Tasks;
namespace GitHub.Runner.Common.Tests
{
public sealed class DotnetsdkDownloadScriptL0
{
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async Task EnsureDotnetsdkBashDownloadScriptUpToDate()
{
string shDownloadUrl = "https://dot.net/v1/dotnet-install.sh";
// namespace GitHub.Runner.Common.Tests
// {
// public sealed class DotnetsdkDownloadScriptL0
// {
// [Fact]
// [Trait("Level", "L0")]
// [Trait("Category", "Runner")]
// public async Task EnsureDotnetsdkBashDownloadScriptUpToDate()
// {
// string shDownloadUrl = "https://dot.net/v1/dotnet-install.sh";
using (HttpClient downloadClient = new HttpClient())
{
var response = await downloadClient.GetAsync("https://www.bing.com");
if (!response.IsSuccessStatusCode)
{
return;
}
// using (HttpClient downloadClient = new HttpClient())
// {
// var response = await downloadClient.GetAsync("https://www.bing.com");
// if (!response.IsSuccessStatusCode)
// {
// return;
// }
string shScript = await downloadClient.GetStringAsync(shDownloadUrl);
// string shScript = await downloadClient.GetStringAsync(shDownloadUrl);
string existingShScript = File.ReadAllText(Path.Combine(TestUtil.GetSrcPath(), "Misc/dotnet-install.sh"));
// string existingShScript = File.ReadAllText(Path.Combine(TestUtil.GetSrcPath(), "Misc/dotnet-install.sh"));
bool shScriptMatched = string.Equals(shScript.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"), existingShScript.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"));
Assert.True(shScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.sh with content from https://dot.net/v1/dotnet-install.sh");
}
}
// bool shScriptMatched = string.Equals(shScript.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"), existingShScript.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"));
// Assert.True(shScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.sh with content from https://dot.net/v1/dotnet-install.sh");
// }
// }
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Runner")]
public async Task EnsureDotnetsdkPowershellDownloadScriptUpToDate()
{
string ps1DownloadUrl = "https://dot.net/v1/dotnet-install.ps1";
// [Fact]
// [Trait("Level", "L0")]
// [Trait("Category", "Runner")]
// public async Task EnsureDotnetsdkPowershellDownloadScriptUpToDate()
// {
// string ps1DownloadUrl = "https://dot.net/v1/dotnet-install.ps1";
using (HttpClient downloadClient = new HttpClient())
{
var response = await downloadClient.GetAsync("https://www.bing.com");
if (!response.IsSuccessStatusCode)
{
return;
}
// using (HttpClient downloadClient = new HttpClient())
// {
// var response = await downloadClient.GetAsync("https://www.bing.com");
// if (!response.IsSuccessStatusCode)
// {
// return;
// }
string ps1Script = await downloadClient.GetStringAsync(ps1DownloadUrl);
// string ps1Script = await downloadClient.GetStringAsync(ps1DownloadUrl);
string existingPs1Script = File.ReadAllText(Path.Combine(TestUtil.GetSrcPath(), "Misc/dotnet-install.ps1"));
// string existingPs1Script = File.ReadAllText(Path.Combine(TestUtil.GetSrcPath(), "Misc/dotnet-install.ps1"));
bool ps1ScriptMatched = string.Equals(ps1Script.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"), existingPs1Script.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"));
Assert.True(ps1ScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.ps1 with content from https://dot.net/v1/dotnet-install.ps1");
}
}
}
}
// bool ps1ScriptMatched = string.Equals(ps1Script.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"), existingPs1Script.TrimEnd('\n', '\r', '\0').Replace("\r\n", "\n").Replace("\r", "\n"));
// Assert.True(ps1ScriptMatched, "Fix the test by updating Src/Misc/dotnet-install.ps1 with content from https://dot.net/v1/dotnet-install.ps1");
// }
// }
// }
// }

View File

@@ -39,12 +39,10 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
private string _expectedToken = "expectedToken";
private string _expectedServerUrl = "https://codedev.ms";
private string _expectedAgentName = "expectedAgentName";
private string _defaultRunnerGroupName = "defaultRunnerGroup";
private string _secondRunnerGroupName = "secondRunnerGroup";
private string _expectedPoolName = "poolName";
private string _expectedAuthType = "pat";
private string _expectedWorkFolder = "_work";
private int _defaultRunnerGroupId = 1;
private int _secondRunnerGroupId = 2;
private int _expectedPoolId = 1;
private RSACryptoServiceProvider rsa = null;
private RunnerSettings _configMgrAgentSettings = new RunnerSettings();
@@ -99,7 +97,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
_serviceControlManager.Setup(x => x.GenerateScripts(It.IsAny<RunnerSettings>()));
#endif
var expectedPools = new List<TaskAgentPool>() { new TaskAgentPool(_defaultRunnerGroupName) { Id = _defaultRunnerGroupId, IsInternal = true }, new TaskAgentPool(_secondRunnerGroupName) { Id = _secondRunnerGroupId } };
var expectedPools = new List<TaskAgentPool>() { new TaskAgentPool(_expectedPoolName) { Id = _expectedPoolId } };
_runnerServer.Setup(x => x.GetAgentPoolsAsync(It.IsAny<string>(), It.IsAny<TaskAgentPoolType>())).Returns(Task.FromResult(expectedPools));
var expectedAgents = new List<TaskAgent>();
@@ -157,7 +155,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
"configure",
"--url", _expectedServerUrl,
"--name", _expectedAgentName,
"--runnergroup", _secondRunnerGroupName,
"--pool", _expectedPoolName,
"--work", _expectedWorkFolder,
"--auth", _expectedAuthType,
"--token", _expectedToken,
@@ -177,7 +175,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
Assert.NotNull(s);
Assert.True(s.ServerUrl.Equals(_expectedServerUrl));
Assert.True(s.AgentName.Equals(_expectedAgentName));
Assert.True(s.PoolId.Equals(_secondRunnerGroupId));
Assert.True(s.PoolId.Equals(_expectedPoolId));
Assert.True(s.WorkFolder.Equals(_expectedWorkFolder));
// validate GetAgentPoolsAsync gets called twice with automation pool type

View File

@@ -243,7 +243,8 @@ namespace GitHub.Runner.Common.Tests.Listener
runner.Initialize(hc);
var settings = new RunnerSettings
{
PoolId = 43242
PoolId = 43242,
Ephemeral = true
};
var message = new TaskAgentMessage()
@@ -294,7 +295,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_configStore.Setup(x => x.IsServiceConfigured()).Returns(false);
//Act
var command = new CommandSettings(hc, new string[] { "run", "--once" });
var command = new CommandSettings(hc, new string[] { "run" });
Task<int> runnerTask = runner.ExecuteCommand(command);
//Assert
@@ -332,7 +333,8 @@ namespace GitHub.Runner.Common.Tests.Listener
runner.Initialize(hc);
var settings = new RunnerSettings
{
PoolId = 43242
PoolId = 43242,
Ephemeral = true
};
var message1 = new TaskAgentMessage()
@@ -390,7 +392,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_configStore.Setup(x => x.IsServiceConfigured()).Returns(false);
//Act
var command = new CommandSettings(hc, new string[] { "run", "--once" });
var command = new CommandSettings(hc, new string[] { "run" });
Task<int> runnerTask = runner.ExecuteCommand(command);
//Assert
@@ -431,7 +433,8 @@ namespace GitHub.Runner.Common.Tests.Listener
var settings = new RunnerSettings
{
PoolId = 43242,
AgentId = 5678
AgentId = 5678,
Ephemeral = true
};
var message1 = new TaskAgentMessage()
@@ -475,7 +478,7 @@ namespace GitHub.Runner.Common.Tests.Listener
_configStore.Setup(x => x.IsServiceConfigured()).Returns(false);
//Act
var command = new CommandSettings(hc, new string[] { "run", "--once" });
var command = new CommandSettings(hc, new string[] { "run" });
Task<int> runnerTask = runner.ExecuteCommand(command);
//Assert

View File

@@ -60,7 +60,6 @@ namespace GitHub.Runner.Common.Tests
{
typeof(IActionCommandExtension),
typeof(IExecutionContext),
typeof(IFileCommandExtension),
typeof(IHandler),
typeof(IJobExtension),
typeof(IStep),

View File

@@ -32,8 +32,6 @@ namespace GitHub.Runner.Common.Tests.Worker
private TestHostContext _hc;
private ActionRunner _actionRunner;
private IActionManifestManager _actionManifestManager;
private Mock<IFileCommandManager> _fileCommandManager;
private DictionaryContextData _context = new DictionaryContextData();
[Fact]
@@ -364,7 +362,6 @@ namespace GitHub.Runner.Common.Tests.Worker
_handlerFactory = new Mock<IHandlerFactory>();
_defaultStepHost = new Mock<IDefaultStepHost>();
_actionManifestManager = new ActionManifestManager();
_fileCommandManager = new Mock<IFileCommandManager>();
_actionManifestManager.Initialize(_hc);
var githubContext = new GitHubContext();
@@ -397,8 +394,6 @@ namespace GitHub.Runner.Common.Tests.Worker
_hc.EnqueueInstance<IDefaultStepHost>(_defaultStepHost.Object);
_hc.EnqueueInstance(_fileCommandManager.Object);
// Instance to test.
_actionRunner = new ActionRunner();
_actionRunner.Initialize(_hc);

View File

@@ -116,7 +116,7 @@ namespace GitHub.Runner.Common.Tests.Worker
var pagingLogger = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
jobServerQueue.Setup(x => x.QueueWebConsoleLine(It.IsAny<Guid>(), It.IsAny<string>(),It.IsAny<long>())).Callback((Guid id, string msg, long? lineNumber) => { hc.GetTrace().Info(msg); });
jobServerQueue.Setup(x => x.QueueWebConsoleLine(It.IsAny<Guid>(), It.IsAny<string>())).Callback((Guid id, string msg) => { hc.GetTrace().Info(msg); });
hc.EnqueueInstance(pagingLogger.Object);
hc.SetSingleton(jobServerQueue.Object);
@@ -137,7 +137,7 @@ namespace GitHub.Runner.Common.Tests.Worker
ec.Complete();
jobServerQueue.Verify(x => x.QueueWebConsoleLine(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<long?>()), Times.Exactly(10));
jobServerQueue.Verify(x => x.QueueWebConsoleLine(It.IsAny<Guid>(), It.IsAny<string>()), Times.Exactly(10));
}
}
@@ -171,7 +171,7 @@ namespace GitHub.Runner.Common.Tests.Worker
var pagingLogger5 = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
jobServerQueue.Setup(x => x.QueueWebConsoleLine(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<long?>())).Callback((Guid id, string msg, long? lineNumber) => { hc.GetTrace().Info(msg); });
jobServerQueue.Setup(x => x.QueueWebConsoleLine(It.IsAny<Guid>(), It.IsAny<string>())).Callback((Guid id, string msg) => { hc.GetTrace().Info(msg); });
var actionRunner1 = new ActionRunner();
actionRunner1.Initialize(hc);
@@ -269,7 +269,7 @@ namespace GitHub.Runner.Common.Tests.Worker
var pagingLogger5 = new Mock<IPagingLogger>();
var jobServerQueue = new Mock<IJobServerQueue>();
jobServerQueue.Setup(x => x.QueueTimelineRecordUpdate(It.IsAny<Guid>(), It.IsAny<TimelineRecord>()));
jobServerQueue.Setup(x => x.QueueWebConsoleLine(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<long?>())).Callback((Guid id, string msg, long? lineNumber) => { hc.GetTrace().Info(msg); });
jobServerQueue.Setup(x => x.QueueWebConsoleLine(It.IsAny<Guid>(), It.IsAny<string>())).Callback((Guid id, string msg) => { hc.GetTrace().Info(msg); });
var actionRunner1 = new ActionRunner();
actionRunner1.Initialize(hc);

View File

@@ -1,390 +0,0 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Runtime.CompilerServices;
using GitHub.Runner.Common.Util;
using GitHub.Runner.Sdk;
using GitHub.Runner.Worker;
using GitHub.Runner.Worker.Container;
using GitHub.Runner.Worker.Handlers;
using Moq;
using Xunit;
using DTWebApi = GitHub.DistributedTask.WebApi;
namespace GitHub.Runner.Common.Tests.Worker
{
public sealed class SetEnvFileCommandL0
{
private Mock<IExecutionContext> _executionContext;
private List<Tuple<DTWebApi.Issue, string>> _issues;
private string _rootDirectory;
private SetEnvFileCommand _setEnvFileCommand;
private ITraceWriter _trace;
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_DirectoryNotFound()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "directory-not-found", "env");
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(0, _executionContext.Object.Global.EnvironmentVariables.Count);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_NotFound()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "file-not-found");
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(0, _executionContext.Object.Global.EnvironmentVariables.Count);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_EmptyFile()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "empty-file");
var content = new List<string>();
WriteContent(envFile, content);
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(0, _executionContext.Object.Global.EnvironmentVariables.Count);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Simple()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "simple");
var content = new List<string>
{
"MY_ENV=MY VALUE",
};
WriteContent(envFile, content);
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(1, _executionContext.Object.Global.EnvironmentVariables.Count);
Assert.Equal("MY VALUE", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Simple_SkipEmptyLines()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "simple");
var content = new List<string>
{
string.Empty,
"MY_ENV=my value",
string.Empty,
"MY_ENV_2=my second value",
string.Empty,
};
WriteContent(envFile, content);
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(2, _executionContext.Object.Global.EnvironmentVariables.Count);
Assert.Equal("my value", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
Assert.Equal("my second value", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_2"]);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Simple_EmptyValue()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "simple-empty-value");
var content = new List<string>
{
"MY_ENV=",
};
WriteContent(envFile, content);
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(1, _executionContext.Object.Global.EnvironmentVariables.Count);
Assert.Equal(string.Empty, _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Simple_MultipleValues()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "simple");
var content = new List<string>
{
"MY_ENV=my value",
"MY_ENV_2=",
"MY_ENV_3=my third value",
};
WriteContent(envFile, content);
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(3, _executionContext.Object.Global.EnvironmentVariables.Count);
Assert.Equal("my value", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
Assert.Equal(string.Empty, _executionContext.Object.Global.EnvironmentVariables["MY_ENV_2"]);
Assert.Equal("my third value", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_3"]);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Simple_SpecialCharacters()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "simple");
var content = new List<string>
{
"MY_ENV==abc",
"MY_ENV_2=def=ghi",
"MY_ENV_3=jkl=",
};
WriteContent(envFile, content);
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(3, _executionContext.Object.Global.EnvironmentVariables.Count);
Assert.Equal("=abc", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
Assert.Equal("def=ghi", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_2"]);
Assert.Equal("jkl=", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_3"]);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Heredoc()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "heredoc");
var content = new List<string>
{
"MY_ENV<<EOF",
"line one",
"line two",
"line three",
"EOF",
};
WriteContent(envFile, content);
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(1, _executionContext.Object.Global.EnvironmentVariables.Count);
Assert.Equal($"line one{Environment.NewLine}line two{Environment.NewLine}line three", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Heredoc_EmptyValue()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "heredoc");
var content = new List<string>
{
"MY_ENV<<EOF",
"EOF",
};
WriteContent(envFile, content);
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(1, _executionContext.Object.Global.EnvironmentVariables.Count);
Assert.Equal(string.Empty, _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Heredoc_SkipEmptyLines()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "heredoc");
var content = new List<string>
{
string.Empty,
"MY_ENV<<EOF",
"hello",
"world",
"EOF",
string.Empty,
"MY_ENV_2<<EOF",
"HELLO",
"AGAIN",
"EOF",
string.Empty,
};
WriteContent(envFile, content);
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(2, _executionContext.Object.Global.EnvironmentVariables.Count);
Assert.Equal($"hello{Environment.NewLine}world", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
Assert.Equal($"HELLO{Environment.NewLine}AGAIN", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_2"]);
}
}
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Heredoc_SpecialCharacters()
{
using (var hostContext = Setup())
{
var envFile = Path.Combine(_rootDirectory, "heredoc");
var content = new List<string>
{
"MY_ENV<<=EOF",
"hello",
"one",
"=EOF",
"MY_ENV_2<<<EOF",
"hello",
"two",
"<EOF",
"MY_ENV_3<<EOF",
"hello",
string.Empty,
"three",
string.Empty,
"EOF",
"MY_ENV_4<<EOF",
"hello=four",
"EOF",
"MY_ENV_5<<EOF",
" EOF",
"EOF",
};
WriteContent(envFile, content);
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(5, _executionContext.Object.Global.EnvironmentVariables.Count);
Assert.Equal($"hello{Environment.NewLine}one", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
Assert.Equal($"hello{Environment.NewLine}two", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_2"]);
Assert.Equal($"hello{Environment.NewLine}{Environment.NewLine}three{Environment.NewLine}", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_3"]);
Assert.Equal($"hello=four", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_4"]);
Assert.Equal($" EOF", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_5"]);
}
}
#if OS_WINDOWS
[Fact]
[Trait("Level", "L0")]
[Trait("Category", "Worker")]
public void SetEnvFileCommand_Heredoc_PreservesNewline()
{
using (var hostContext = Setup())
{
var newline = "\n";
var envFile = Path.Combine(_rootDirectory, "heredoc");
var content = new List<string>
{
"MY_ENV<<EOF",
"hello",
"world",
"EOF",
};
WriteContent(envFile, content, newline: newline);
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
Assert.Equal(0, _issues.Count);
Assert.Equal(1, _executionContext.Object.Global.EnvironmentVariables.Count);
Assert.Equal($"hello{newline}world", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
}
}
#endif
private void WriteContent(
string path,
List<string> content,
string newline = null)
{
if (string.IsNullOrEmpty(newline))
{
newline = Environment.NewLine;
}
var encoding = new UTF8Encoding(true); // Emit BOM
var contentStr = string.Join(newline, content);
File.WriteAllText(path, contentStr, encoding);
}
private TestHostContext Setup([CallerMemberName] string name = "")
{
_issues = new List<Tuple<DTWebApi.Issue, string>>();
var hostContext = new TestHostContext(this, name);
// Trace
_trace = hostContext.GetTrace();
// Directory for test data
var workDirectory = hostContext.GetDirectory(WellKnownDirectory.Work);
ArgUtil.NotNullOrEmpty(workDirectory, nameof(workDirectory));
Directory.CreateDirectory(workDirectory);
_rootDirectory = Path.Combine(workDirectory, nameof(SetEnvFileCommandL0));
Directory.CreateDirectory(_rootDirectory);
// Execution context
_executionContext = new Mock<IExecutionContext>();
_executionContext.Setup(x => x.Global)
.Returns(new GlobalContext
{
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
WriteDebug = true,
});
_executionContext.Setup(x => x.AddIssue(It.IsAny<DTWebApi.Issue>(), It.IsAny<string>()))
.Callback((DTWebApi.Issue issue, string logMessage) =>
{
_issues.Add(new Tuple<DTWebApi.Issue, string>(issue, logMessage));
var message = !string.IsNullOrEmpty(logMessage) ? logMessage : issue.Message;
_trace.Info($"Issue '{issue.Type}': {message}");
});
_executionContext.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>()))
.Callback((string tag, string message) =>
{
_trace.Info($"{tag}{message}");
});
// SetEnvFileCommand
_setEnvFileCommand = new SetEnvFileCommand();
_setEnvFileCommand.Initialize(hostContext);
return hostContext;
}
}
}

View File

@@ -44,7 +44,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_contexts = new DictionaryContextData();
_jobContext = new JobContext();
_contexts["github"] = new GitHubContext();
_contexts["github"] = new DictionaryContextData();
_contexts["runner"] = new DictionaryContextData();
_contexts["job"] = _jobContext;
_ec.Setup(x => x.ExpressionValues).Returns(_contexts);
@@ -82,7 +82,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -117,7 +117,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -156,7 +156,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -210,7 +210,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -289,7 +289,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Steps.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -332,7 +332,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Step.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Step.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -363,7 +363,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -393,7 +393,7 @@ namespace GitHub.Runner.Common.Tests.Worker
{
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(variableSet.Select(x => x.Object).ToList()));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(variableSet.Select(x => x.Object).ToList()));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -419,7 +419,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object }));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -457,7 +457,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object }));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -495,7 +495,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object }));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -526,7 +526,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -562,7 +562,7 @@ namespace GitHub.Runner.Common.Tests.Worker
_ec.Object.Result = null;
_ec.Setup(x => x.JobSteps).Returns(new Queue<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
_ec.Setup(x => x.JobSteps).Returns(new List<IStep>(new[] { step1.Object, step2.Object, step3.Object }));
// Act.
await _stepsRunner.RunAsync(jobContext: _ec.Object);
@@ -602,12 +602,7 @@ namespace GitHub.Runner.Common.Tests.Worker
var stepContext = new Mock<IExecutionContext>();
stepContext.SetupAllProperties();
stepContext.Setup(x => x.Global).Returns(() => _ec.Object.Global);
var expressionValues = new DictionaryContextData();
foreach (var pair in _ec.Object.ExpressionValues)
{
expressionValues[pair.Key] = pair.Value;
}
stepContext.Setup(x => x.ExpressionValues).Returns(expressionValues);
stepContext.Setup(x => x.ExpressionValues).Returns(new DictionaryContextData());
stepContext.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
stepContext.Setup(x => x.JobContext).Returns(_jobContext);
stepContext.Setup(x => x.ContextName).Returns(step.Object.Action.ContextName);

View File

@@ -1 +1 @@
2.273.4
2.299.0

6
test_script.sh Normal file
View File

@@ -0,0 +1,6 @@
apt-get update
apt-get install -y apt-transport-https gnupg2
curl -s https://packages.cloud.google.com/apt/doc/apt-key.gpg | apt-key add -
echo "deb https://apt.kubernetes.io/ kubernetes-xenial main" | tee -a /etc/apt/sources.list.d/kubernetes.list
apt-get update
apt-get install -y kubectl