mirror of
https://github.com/actions/runner-container-hooks.git
synced 2025-12-13 16:16:46 +00:00
Initial Commit
This commit is contained in:
4
.eslintignore
Normal file
4
.eslintignore
Normal file
@@ -0,0 +1,4 @@
|
||||
dist/
|
||||
lib/
|
||||
node_modules/
|
||||
**/tests/**
|
||||
56
.eslintrc.json
Normal file
56
.eslintrc.json
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": ["plugin:github/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 9,
|
||||
"sourceType": "module",
|
||||
"project": "./tsconfig.json"
|
||||
},
|
||||
"rules": {
|
||||
"eslint-comments/no-use": "off",
|
||||
"import/no-namespace": "off",
|
||||
"no-constant-condition": "off",
|
||||
"no-unused-vars": "off",
|
||||
"i18n-text/no-en": "off",
|
||||
"@typescript-eslint/no-unused-vars": "error",
|
||||
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
|
||||
"@typescript-eslint/no-require-imports": "error",
|
||||
"@typescript-eslint/array-type": "error",
|
||||
"@typescript-eslint/await-thenable": "error",
|
||||
"camelcase": "off",
|
||||
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
||||
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
||||
"@typescript-eslint/no-array-constructor": "error",
|
||||
"@typescript-eslint/no-empty-interface": "error",
|
||||
"@typescript-eslint/no-explicit-any": "warn",
|
||||
"@typescript-eslint/no-extraneous-class": "error",
|
||||
"@typescript-eslint/no-floating-promises": "error",
|
||||
"@typescript-eslint/no-for-in-array": "error",
|
||||
"@typescript-eslint/no-inferrable-types": "error",
|
||||
"@typescript-eslint/no-misused-new": "error",
|
||||
"@typescript-eslint/no-namespace": "error",
|
||||
"@typescript-eslint/no-non-null-assertion": "warn",
|
||||
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
||||
"@typescript-eslint/no-unnecessary-type-assertion": "error",
|
||||
"@typescript-eslint/no-useless-constructor": "error",
|
||||
"@typescript-eslint/no-var-requires": "error",
|
||||
"@typescript-eslint/prefer-for-of": "warn",
|
||||
"@typescript-eslint/prefer-function-type": "warn",
|
||||
"@typescript-eslint/prefer-includes": "error",
|
||||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/require-array-sort-compare": "error",
|
||||
"@typescript-eslint/restrict-plus-operands": "error",
|
||||
"semi": "off",
|
||||
"@typescript-eslint/semi": ["error", "never"],
|
||||
"@typescript-eslint/type-annotation-spacing": "error",
|
||||
"@typescript-eslint/unbound-method": "error",
|
||||
"no-shadow": "off",
|
||||
"@typescript-eslint/no-shadow": ["error"]
|
||||
},
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
}
|
||||
}
|
||||
26
.github/workflows/build.yaml
vendored
Normal file
26
.github/workflows/build.yaml
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
name: CI - Build & Test
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- '*'
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: npm install
|
||||
name: Install dependencies
|
||||
- run: npm run bootstrap
|
||||
name: Bootstrap the packages
|
||||
- run: npm run build-all
|
||||
name: Build packages
|
||||
- run: npm run format-check
|
||||
- name: Check linter
|
||||
run: |
|
||||
npm run lint
|
||||
git diff --exit-code
|
||||
- name: Run tests
|
||||
run: npm run test
|
||||
57
.github/workflows/release.yaml
vendored
Normal file
57
.github/workflows/release.yaml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: CD - Release new version
|
||||
on:
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: npm install
|
||||
name: Install dependencies
|
||||
- run: npm run bootstrap
|
||||
name: Bootstrap the packages
|
||||
- run: npm run build-all
|
||||
name: Build packages
|
||||
- uses: actions/github-script@v6
|
||||
id: releaseNotes
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const hookVersion = require('./package.json').version
|
||||
var releaseNotes = fs.readFileSync('${{ github.workspace }}/releaseNotes.md', 'utf8').replace(/<HOOK_VERSION>/g, hookVersion)
|
||||
console.log(releaseNotes)
|
||||
core.setOutput('version', hookVersion);
|
||||
core.setOutput('note', releaseNotes);
|
||||
- name: Zip up releases
|
||||
run: |
|
||||
zip -r -j actions-runner-hooks-docker-${{ steps.releaseNotes.outputs.version }}.zip packages/docker/dist
|
||||
zip -r -j actions-runner-hooks-k8s-${{ steps.releaseNotes.outputs.version }}.zip packages/k8s/dist
|
||||
- uses: actions/create-release@v1
|
||||
id: createRelease
|
||||
name: Create ${{ steps.releaseNotes.outputs.version }} Hook Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: "v${{ steps.releaseNotes.outputs.version }}"
|
||||
release_name: "v${{ steps.releaseNotes.outputs.version }}"
|
||||
body: |
|
||||
${{ steps.releaseNotes.outputs.note }}
|
||||
- name: Upload K8s hooks
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-hooks-k8s-${{ steps.releaseNotes.outputs.version }}.zip
|
||||
asset_name: actions-runner-hooks-k8s-${{ steps.releaseNotes.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
- name: Upload docker hooks
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-hooks-docker-${{ steps.releaseNotes.outputs.version }}.zip
|
||||
asset_name: actions-runner-hooks-docker-${{ steps.releaseNotes.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
node_modules/
|
||||
lib/
|
||||
dist/
|
||||
**/tests/_temp/**
|
||||
3
.prettierignore
Normal file
3
.prettierignore
Normal file
@@ -0,0 +1,3 @@
|
||||
dist/
|
||||
lib/
|
||||
node_modules/
|
||||
11
.prettierrc.json
Normal file
11
.prettierrc.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"semi": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"bracketSpacing": true,
|
||||
"arrowParens": "avoid",
|
||||
"parser": "typescript"
|
||||
}
|
||||
1
CODEOWNERS
Normal file
1
CODEOWNERS
Normal file
@@ -0,0 +1 @@
|
||||
* @actions/actions-runtime
|
||||
76
CODE_OF_CONDUCT.MD
Normal file
76
CODE_OF_CONDUCT.MD
Normal file
@@ -0,0 +1,76 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to make participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, sex characteristics, gender identity and expression,
|
||||
level of experience, education, socio-economic status, nationality, personal
|
||||
appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all project spaces, and it also applies when
|
||||
an individual is representing the project or its community in public spaces.
|
||||
Examples of representing a project or community include using an official
|
||||
project e-mail address, posting via an official social media account, or acting
|
||||
as an appointed representative at an online or offline event. Representation of
|
||||
a project may be further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at opensource@github.com. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see
|
||||
https://www.contributor-covenant.org/faq
|
||||
34
CONTRIBUTING.md
Normal file
34
CONTRIBUTING.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Basic setup
|
||||
You'll need a runner compatible with hooks, a repository with container workflows to which you can register the runner and the hooks from this repository.
|
||||
|
||||
|
||||
|
||||
## Getting Started
|
||||
- Run ` npm install && npm run bootstrap` to setup your environment and install all the needed packages
|
||||
- Run `npm run lint` and `npm run format` to ensure your charges will pass CI
|
||||
- Run `npm run build-all` to build and test end to end.
|
||||
|
||||
|
||||
## E2E
|
||||
- You'll need a runner compatible with hooks, a repository with container workflows to which you can register the runner and the hooks from this repository.
|
||||
- See [the runner contributing.md](../../github/CONTRIBUTING.MD) for how to get started with runner development.
|
||||
- Build your hook using `npm run build`
|
||||
- Enable the hooks by setting `ACTIONS_RUNNER_CONTAINER_HOOK=./packages/{libraryname}/dist/index.js` file generated by [ncc](https://github.com/vercel/ncc)
|
||||
- Configure your self hosted runner against the a repository you have admin access
|
||||
- Run a workflow with a container job, for example
|
||||
```
|
||||
name: myjob
|
||||
on:
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
my_job:
|
||||
runs-on: self-hosted
|
||||
services:
|
||||
redis:
|
||||
image: redis
|
||||
container:
|
||||
image: alpine:3.15
|
||||
options: --cpus 1
|
||||
steps:
|
||||
- run: pwd
|
||||
```
|
||||
21
LICENSE.txt
Normal file
21
LICENSE.txt
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright GitHub
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
31
README.md
Normal file
31
README.md
Normal file
@@ -0,0 +1,31 @@
|
||||
## Runner Container Hooks
|
||||
The Runner Container Hooks repo provides a set of packages that implement the container hooks feature in the [actions/runner](https://github.com/actions/runner). These can be used as is, or you can use them as a guide to implement your own hooks.
|
||||
|
||||
More information on how to implement your own hooks can be found in the [adr](https://github.com/actions/runner/pull/1891). The `examples` folder provides example inputs for each hook.
|
||||
|
||||
## Background
|
||||
|
||||
Three projects are included in the `packages` folder
|
||||
- k8s: A kubernetes hook implementation that spins up pods dynamically to run a job. More details can be found in the [readme](./packages/k8s/README.md)
|
||||
- docker: A hook implementation of the runner's docker implementation. More details can be found in the [readme](./packages/docker/README.md)
|
||||
- hooklib: a shared library which contains typescript definitions and utilities that the other projects consume
|
||||
|
||||
### Requirements
|
||||
|
||||
We welcome contributions. See [how to contribute to get started](./CONTRIBUTING.md).
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the terms of the MIT open source license. Please refer to [MIT](./LICENSE.md) for the full terms.
|
||||
|
||||
## Maintainers
|
||||
|
||||
See the [Codeowners](./CODEOWNERS)
|
||||
|
||||
## Support
|
||||
|
||||
Find a bug? Please file an issue in this repository using the issue templates.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
See our [Code of Conduct](./CODE_OF_CONDUCT.MD)
|
||||
31
SECURITY.MD
Normal file
31
SECURITY.MD
Normal file
@@ -0,0 +1,31 @@
|
||||
Thanks for helping make GitHub safe for everyone.
|
||||
|
||||
## Security
|
||||
|
||||
GitHub takes the security of our software products and services seriously, including all of the open source code repositories managed through our GitHub organizations, such as [GitHub](https://github.com/GitHub).
|
||||
|
||||
Even though [open source repositories are outside of the scope of our bug bounty program](https://bounty.github.com/index.html#scope) and therefore not eligible for bounty rewards, we will ensure that your finding gets passed along to the appropriate maintainers for remediation.
|
||||
|
||||
## Reporting Security Issues
|
||||
|
||||
If you believe you have found a security vulnerability in any GitHub-owned repository, please report it to us through coordinated disclosure.
|
||||
|
||||
**Please do not report security vulnerabilities through public GitHub issues, discussions, or pull requests.**
|
||||
|
||||
Instead, please send an email to opensource-security[@]github.com.
|
||||
|
||||
Please include as much of the information listed below as you can to help us better understand and resolve the issue:
|
||||
|
||||
* The type of issue (e.g., buffer overflow, SQL injection, or cross-site scripting)
|
||||
* Full paths of source file(s) related to the manifestation of the issue
|
||||
* The location of the affected source code (tag/branch/commit or direct URL)
|
||||
* Any special configuration required to reproduce the issue
|
||||
* Step-by-step instructions to reproduce the issue
|
||||
* Proof-of-concept or exploit code (if possible)
|
||||
* Impact of the issue, including how an attacker might exploit the issue
|
||||
|
||||
This information will help us triage your report more quickly.
|
||||
|
||||
## Policy
|
||||
|
||||
See [GitHub's Safe Harbor Policy](https://docs.github.com/en/github/site-policy/github-bug-bounty-program-legal-safe-harbor#1-safe-harbor-terms)
|
||||
96
examples/prepare-job.json
Normal file
96
examples/prepare-job.json
Normal file
@@ -0,0 +1,96 @@
|
||||
{
|
||||
"command": "prepare_job",
|
||||
"responseFile": "/users/thboop/runner/_work/{guid}.json",
|
||||
"state": {},
|
||||
"args": {
|
||||
"container": {
|
||||
"image": "node:14.16",
|
||||
"workingDirectory": "/__w/thboop-test2/thboop-test2",
|
||||
"createOptions": "--cpus 1",
|
||||
"environmentVariables": {
|
||||
"NODE_ENV": "development"
|
||||
},
|
||||
"userMountVolumes": [
|
||||
{
|
||||
"sourceVolumePath": "my_docker_volume",
|
||||
"targetVolumePath": "/volume_mount",
|
||||
"readOnly": false
|
||||
}
|
||||
],
|
||||
"systemMountVolumes": [
|
||||
{
|
||||
"sourceVolumePath": "/var/run/docker.sock",
|
||||
"targetVolumePath": "/var/run/docker.sock",
|
||||
"readOnly": false
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/_work",
|
||||
"targetVolumePath": "/__w",
|
||||
"readOnly": false
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/externals",
|
||||
"targetVolumePath": "/__e",
|
||||
"readOnly": true
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/_work/_temp",
|
||||
"targetVolumePath": "/__w/_temp",
|
||||
"readOnly": false
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/_work/_actions",
|
||||
"targetVolumePath": "/__w/_actions",
|
||||
"readOnly": false
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/_work/_tool",
|
||||
"targetVolumePath": "/__w/_tool",
|
||||
"readOnly": false
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/_work/_temp/_github_home",
|
||||
"targetVolumePath": "/github/home",
|
||||
"readOnly": false
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/_work/_temp/_github_workflow",
|
||||
"targetVolumePath": "/github/workflow",
|
||||
"readOnly": false
|
||||
}
|
||||
],
|
||||
"registry": {
|
||||
"username": "foo",
|
||||
"password": "bar",
|
||||
"serverUrl": "https://index.docker.io/v1"
|
||||
},
|
||||
"portMappings": [
|
||||
"80:8080"
|
||||
]
|
||||
},
|
||||
"services": [
|
||||
{
|
||||
"contextName": "redis",
|
||||
"image": "redis",
|
||||
"createOptions": "--cpus 1",
|
||||
"environmentVariables": {},
|
||||
"userMountVolumes": [
|
||||
{
|
||||
"sourceVolumePath": "/var/run/docker.sock",
|
||||
"targetVolumePath": "/var/run/docker.sock",
|
||||
"readOnly": false
|
||||
}
|
||||
],
|
||||
"portMappings": [
|
||||
"8080:80",
|
||||
"8088:8080"
|
||||
],
|
||||
"registry": {
|
||||
"username": "foo",
|
||||
"password": "bar",
|
||||
"serverUrl": "https://index.docker.io/v1"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
77
examples/run-container-step.json
Normal file
77
examples/run-container-step.json
Normal file
@@ -0,0 +1,77 @@
|
||||
{
|
||||
"command": "run_container_step",
|
||||
"responseFile": null,
|
||||
"state": {
|
||||
"network": "github_network_53269bd575974817b43f4733536b200c",
|
||||
"container": "82e8219701fe096a35941d869cf8d71af1d943b5d3bdd718850fb87ac3042480",
|
||||
"services": {
|
||||
"redis": "60972d9aa486605e66b0dad4abb638dc3d9116f566579e418166eedb8abb9105"
|
||||
}
|
||||
},
|
||||
"args": {
|
||||
"image": "node:14.16",
|
||||
"dockerfile": null,
|
||||
"entryPointArgs": [
|
||||
"-c",
|
||||
"echo \"hello world2\""
|
||||
],
|
||||
"entryPoint": "bash",
|
||||
"workingDirectory": "/__w/thboop-test2/thboop-test2",
|
||||
"createOptions": "--cpus 1",
|
||||
"environmentVariables": {
|
||||
"NODE_ENV": "development"
|
||||
},
|
||||
"prependPath": [
|
||||
"/foo/bar",
|
||||
"bar/foo"
|
||||
],
|
||||
"userMountVolumes": [
|
||||
{
|
||||
"sourceVolumePath": "my_docker_volume",
|
||||
"targetVolumePath": "/volume_mount",
|
||||
"readOnly": false
|
||||
}
|
||||
],
|
||||
"systemMountVolumes": [
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/_work",
|
||||
"targetVolumePath": "/__w",
|
||||
"readOnly": false
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/externals",
|
||||
"targetVolumePath": "/__e",
|
||||
"readOnly": true
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/_work/_temp",
|
||||
"targetVolumePath": "/__w/_temp",
|
||||
"readOnly": false
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/_work/_actions",
|
||||
"targetVolumePath": "/__w/_actions",
|
||||
"readOnly": false
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/_work/_tool",
|
||||
"targetVolumePath": "/__w/_tool",
|
||||
"readOnly": false
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/_work/_temp/_github_home",
|
||||
"targetVolumePath": "/github/home",
|
||||
"readOnly": false
|
||||
},
|
||||
{
|
||||
"sourceVolumePath": "//Users/thomas/git/runner/_layout/_work/_temp/_github_workflow",
|
||||
"targetVolumePath": "/github/workflow",
|
||||
"readOnly": false
|
||||
}
|
||||
],
|
||||
"registry": null,
|
||||
"portMappings": [
|
||||
"8080:8080"
|
||||
]
|
||||
}
|
||||
}
|
||||
26
examples/run-script-step.json
Normal file
26
examples/run-script-step.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"command": "run_script_step",
|
||||
"responseFile": null,
|
||||
"state": {
|
||||
"network": "github_network_53269bd575974817b43f4733536b200c",
|
||||
"container": "82e8219701fe096a35941d869cf8d71af1d943b5d3bdd718850fb87ac3042480",
|
||||
"services": {
|
||||
"redis": "60972d9aa486605e66b0dad4abb638dc3d9116f566579e418166eedb8abb9105"
|
||||
}
|
||||
},
|
||||
"args": {
|
||||
"entryPointArgs": [
|
||||
"-c",
|
||||
"echo \"hello world\""
|
||||
],
|
||||
"entryPoint": "bash",
|
||||
"environmentVariables": {
|
||||
"NODE_ENV": "development"
|
||||
},
|
||||
"prependPath": [
|
||||
"/foo/bar",
|
||||
"bar/foo"
|
||||
],
|
||||
"workingDirectory": "/__w/thboop-test2/thboop-test2"
|
||||
}
|
||||
}
|
||||
4530
package-lock.json
generated
Normal file
4530
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
36
package.json
Normal file
36
package.json
Normal file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"name": "hooks",
|
||||
"version": "0.1.0",
|
||||
"description": "Three projects are included - k8s: a kubernetes hook implementation that spins up pods dynamically to run a job - docker: A hook implementation of the runner's docker implementation - A hook lib, which contains shared typescript definitions and utilities that the other packages consume",
|
||||
"main": "",
|
||||
"directories": {
|
||||
"doc": "docs"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "npm run test --prefix packages/docker",
|
||||
"bootstrap": "npm install --prefix packages/hooklib && npm install --prefix packages/k8s && npm install --prefix packages/docker",
|
||||
"format": "prettier --write '**/*.ts'",
|
||||
"format-check": "prettier --check '**/*.ts'",
|
||||
"lint": "eslint packages/**/*.ts",
|
||||
"build-all": "npm run build --prefix packages/hooklib && npm run build --prefix packages/k8s && npm run build --prefix packages/docker"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/actions/runner-container-hooks.git"
|
||||
},
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/actions/runner-container-hooks/issues"
|
||||
},
|
||||
"homepage": "https://github.com/actions/runner-container-hooks#readme",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.5.1",
|
||||
"@types/node": "^17.0.23",
|
||||
"@typescript-eslint/parser": "^5.18.0",
|
||||
"eslint": "^8.12.0",
|
||||
"eslint-plugin-github": "^4.3.6",
|
||||
"prettier": "^2.6.2",
|
||||
"typescript": "^4.6.3"
|
||||
}
|
||||
}
|
||||
10
packages/docker/README.md
Normal file
10
packages/docker/README.md
Normal file
@@ -0,0 +1,10 @@
|
||||
# Docker Hooks
|
||||
|
||||
## Description
|
||||
This implementation mirrors the original docker implementation in the [Actions Runner](https://github.com/actions/runner).
|
||||
|
||||
Feel free to fork this repo and modify it in order to customize that implementation
|
||||
|
||||
## Pre-requisites
|
||||
The `GITHUB_WORKSPACE` env will be set to the GitHub Workspace. This is done automatically by the actions runner, but may need to be done manually when testing
|
||||
The docker cli is installed on the machine, and docker is running.
|
||||
13
packages/docker/jest.config.js
Normal file
13
packages/docker/jest.config.js
Normal file
@@ -0,0 +1,13 @@
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/*-test.ts'],
|
||||
testRunner: 'jest-circus/runner',
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
},
|
||||
setupFilesAfterEnv: ['./jest.setup.js'],
|
||||
verbose: true
|
||||
}
|
||||
1
packages/docker/jest.setup.js
Normal file
1
packages/docker/jest.setup.js
Normal file
@@ -0,0 +1 @@
|
||||
jest.setTimeout(90000)
|
||||
9269
packages/docker/package-lock.json
generated
Normal file
9269
packages/docker/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
29
packages/docker/package.json
Normal file
29
packages/docker/package.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "dockerhooks",
|
||||
"version": "0.1.0",
|
||||
"description": "",
|
||||
"main": "lib/index.js",
|
||||
"scripts": {
|
||||
"test": "jest --runInBand",
|
||||
"build": "npx tsc && npx ncc build"
|
||||
},
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.6.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"hooklib": "file:../hooklib",
|
||||
"uuid": "^8.3.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.4.1",
|
||||
"@types/node": "^17.0.23",
|
||||
"@typescript-eslint/parser": "^5.18.0",
|
||||
"@vercel/ncc": "^0.33.4",
|
||||
"jest": "^27.5.1",
|
||||
"ts-jest": "^27.1.4",
|
||||
"ts-node": "^10.7.0",
|
||||
"tsconfig-paths": "^3.14.1",
|
||||
"typescript": "^4.6.3"
|
||||
}
|
||||
}
|
||||
9
packages/docker/src/dockerCommands/constants.ts
Normal file
9
packages/docker/src/dockerCommands/constants.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export function getRunnerLabel(): string {
|
||||
const name = process.env.RUNNER_NAME
|
||||
if (!name) {
|
||||
throw new Error(
|
||||
"'RUNNER_NAME' env is required, please contact your self hosted runner administrator"
|
||||
)
|
||||
}
|
||||
return Buffer.from(name).toString('hex')
|
||||
}
|
||||
413
packages/docker/src/dockerCommands/container.ts
Normal file
413
packages/docker/src/dockerCommands/container.ts
Normal file
@@ -0,0 +1,413 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as fs from 'fs'
|
||||
import {
|
||||
ContainerInfo,
|
||||
JobContainerInfo,
|
||||
RunContainerStepArgs,
|
||||
ServiceContainerInfo,
|
||||
StepContainerInfo
|
||||
} from 'hooklib/lib'
|
||||
import path from 'path'
|
||||
import { env } from 'process'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { runDockerCommand, RunDockerCommandOptions } from '../utils'
|
||||
import { getRunnerLabel } from './constants'
|
||||
|
||||
export async function createContainer(
|
||||
args: ContainerInfo,
|
||||
name: string,
|
||||
network: string
|
||||
): Promise<ContainerMetadata> {
|
||||
if (!args.image) {
|
||||
throw new Error('Image was expected')
|
||||
}
|
||||
|
||||
const dockerArgs: string[] = ['create']
|
||||
dockerArgs.push(`--label=${getRunnerLabel()}`)
|
||||
dockerArgs.push(`--network=${network}`)
|
||||
if ((args as ServiceContainerInfo)?.contextName) {
|
||||
dockerArgs.push(
|
||||
`--network-alias=${(args as ServiceContainerInfo)?.contextName}`
|
||||
)
|
||||
}
|
||||
|
||||
dockerArgs.push('--name', name)
|
||||
|
||||
if (args?.portMappings?.length) {
|
||||
for (const portMapping of args.portMappings) {
|
||||
dockerArgs.push('-p', portMapping)
|
||||
}
|
||||
}
|
||||
if (args.createOptions) {
|
||||
dockerArgs.push(...args.createOptions.split(' '))
|
||||
}
|
||||
|
||||
if (args.environmentVariables) {
|
||||
for (const [key, value] of Object.entries(args.environmentVariables)) {
|
||||
dockerArgs.push('-e')
|
||||
if (!value) {
|
||||
dockerArgs.push(`"${key}"`)
|
||||
} else {
|
||||
dockerArgs.push(`"${key}=${value}"`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mountVolumes = [
|
||||
...(args.userMountVolumes || []),
|
||||
...((args as JobContainerInfo | StepContainerInfo).systemMountVolumes || [])
|
||||
]
|
||||
for (const mountVolume of mountVolumes) {
|
||||
dockerArgs.push(
|
||||
`-v=${mountVolume.sourceVolumePath}:${mountVolume.targetVolumePath}`
|
||||
)
|
||||
}
|
||||
if (args.entryPoint) {
|
||||
dockerArgs.push(`--entrypoint`)
|
||||
dockerArgs.push(args.entryPoint)
|
||||
}
|
||||
|
||||
dockerArgs.push(args.image)
|
||||
if (args.entryPointArgs) {
|
||||
for (const entryPointArg of args.entryPointArgs) {
|
||||
dockerArgs.push(entryPointArg)
|
||||
}
|
||||
}
|
||||
|
||||
const id = (await runDockerCommand(dockerArgs)).trim()
|
||||
if (!id) {
|
||||
throw new Error('Could not read id from docker command')
|
||||
}
|
||||
const response: ContainerMetadata = { id, image: args.image }
|
||||
if (network) {
|
||||
response.network = network
|
||||
}
|
||||
response.ports = []
|
||||
|
||||
if ((args as ServiceContainerInfo).contextName) {
|
||||
response['contextName'] = (args as ServiceContainerInfo).contextName
|
||||
}
|
||||
return response
|
||||
}
|
||||
|
||||
export async function containerPull(
|
||||
image: string,
|
||||
configLocation: string
|
||||
): Promise<void> {
|
||||
const dockerArgs: string[] = ['pull']
|
||||
if (configLocation) {
|
||||
dockerArgs.push('--config')
|
||||
dockerArgs.push(configLocation)
|
||||
}
|
||||
dockerArgs.push(image)
|
||||
for (let i = 0; i < 3; i++) {
|
||||
try {
|
||||
await runDockerCommand(dockerArgs)
|
||||
return
|
||||
} catch {
|
||||
core.info(`docker pull failed on attempt: ${i + 1}`)
|
||||
}
|
||||
}
|
||||
throw new Error('Exiting docker pull after 3 failed attempts')
|
||||
}
|
||||
|
||||
export async function containerStart(id: string): Promise<void> {
|
||||
const dockerArgs: string[] = ['start']
|
||||
dockerArgs.push(`${id}`)
|
||||
await runDockerCommand(dockerArgs)
|
||||
}
|
||||
|
||||
export async function containerStop(id: string | string[]): Promise<void> {
|
||||
const dockerArgs: string[] = ['stop']
|
||||
if (Array.isArray(id)) {
|
||||
for (const v of id) {
|
||||
dockerArgs.push(v)
|
||||
}
|
||||
} else {
|
||||
dockerArgs.push(id)
|
||||
}
|
||||
await runDockerCommand(dockerArgs)
|
||||
}
|
||||
|
||||
export async function containerRemove(id: string | string[]): Promise<void> {
|
||||
const dockerArgs: string[] = ['rm']
|
||||
dockerArgs.push('--force')
|
||||
if (Array.isArray(id)) {
|
||||
for (const v of id) {
|
||||
dockerArgs.push(v)
|
||||
}
|
||||
} else {
|
||||
dockerArgs.push(id)
|
||||
}
|
||||
await runDockerCommand(dockerArgs)
|
||||
}
|
||||
|
||||
export async function containerBuild(
|
||||
args: RunContainerStepArgs,
|
||||
tag: string
|
||||
): Promise<void> {
|
||||
const context = path.dirname(`${env.GITHUB_WORKSPACE}/${args.dockerfile}`)
|
||||
const dockerArgs: string[] = ['build']
|
||||
dockerArgs.push('-t', tag)
|
||||
dockerArgs.push('-f', `${env.GITHUB_WORKSPACE}/${args.dockerfile}`)
|
||||
dockerArgs.push(context)
|
||||
// TODO: figure out build working directory
|
||||
await runDockerCommand(dockerArgs, {
|
||||
workingDir: args['buildWorkingDirectory']
|
||||
})
|
||||
}
|
||||
|
||||
export async function containerLogs(id: string): Promise<void> {
|
||||
const dockerArgs: string[] = ['logs']
|
||||
dockerArgs.push('--details')
|
||||
dockerArgs.push(id)
|
||||
await runDockerCommand(dockerArgs)
|
||||
}
|
||||
|
||||
export async function containerNetworkRemove(network: string): Promise<void> {
|
||||
const dockerArgs: string[] = ['network']
|
||||
dockerArgs.push('rm')
|
||||
dockerArgs.push(network)
|
||||
await runDockerCommand(dockerArgs)
|
||||
}
|
||||
|
||||
export async function containerPrune(): Promise<void> {
|
||||
const dockerPSArgs: string[] = [
|
||||
'ps',
|
||||
'--all',
|
||||
'--quiet',
|
||||
'--no-trunc',
|
||||
'--filter',
|
||||
`label=${getRunnerLabel()}`
|
||||
]
|
||||
|
||||
const res = (await runDockerCommand(dockerPSArgs)).trim()
|
||||
if (res) {
|
||||
await containerRemove(res.split('\n'))
|
||||
}
|
||||
}
|
||||
|
||||
async function containerHealthStatus(id: string): Promise<ContainerHealth> {
|
||||
const dockerArgs = [
|
||||
'inspect',
|
||||
'--format="{{if .Config.Healthcheck}}{{print .State.Health.Status}}{{end}}"',
|
||||
id
|
||||
]
|
||||
const result = (await runDockerCommand(dockerArgs)).trim().replace(/"/g, '')
|
||||
if (
|
||||
result === ContainerHealth.Healthy ||
|
||||
result === ContainerHealth.Starting ||
|
||||
result === ContainerHealth.Unhealthy
|
||||
) {
|
||||
return result
|
||||
}
|
||||
|
||||
return ContainerHealth.None
|
||||
}
|
||||
|
||||
export async function healthCheck({
|
||||
id,
|
||||
image
|
||||
}: ContainerMetadata): Promise<void> {
|
||||
let health = await containerHealthStatus(id)
|
||||
if (health === ContainerHealth.None) {
|
||||
core.info(
|
||||
`Healthcheck is not set for container ${image}, considered as ${ContainerHealth.Healthy}`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
let tries = 1
|
||||
while (health === ContainerHealth.Starting && tries < 13) {
|
||||
const backOffSeconds = Math.pow(2, tries)
|
||||
core.info(
|
||||
`Container '${image}' is '${health}', retry in ${backOffSeconds} seconds`
|
||||
)
|
||||
await new Promise(resolve => setTimeout(resolve, 1000 * backOffSeconds))
|
||||
tries++
|
||||
health = await containerHealthStatus(id)
|
||||
}
|
||||
|
||||
if (health !== ContainerHealth.Healthy) {
|
||||
throw new String(
|
||||
`Container '${image}' is unhealthy with status '${health}'`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export async function containerPorts(id: string): Promise<string[]> {
|
||||
const dockerArgs = ['port', id]
|
||||
const portMappings = (await runDockerCommand(dockerArgs)).trim()
|
||||
return portMappings.split('\n')
|
||||
}
|
||||
|
||||
export async function registryLogin(args): Promise<string> {
|
||||
if (!args.registry) {
|
||||
return ''
|
||||
}
|
||||
const credentials = {
|
||||
username: args.registry.username,
|
||||
password: args.registry.password
|
||||
}
|
||||
|
||||
const configLocation = `${env.RUNNER_TEMP}/.docker_${uuidv4()}`
|
||||
fs.mkdirSync(configLocation)
|
||||
try {
|
||||
await dockerLogin(configLocation, args.registry.serverUrl, credentials)
|
||||
} catch (error) {
|
||||
fs.rmdirSync(configLocation, { recursive: true })
|
||||
throw error
|
||||
}
|
||||
return configLocation
|
||||
}
|
||||
|
||||
export async function registryLogout(configLocation: string): Promise<void> {
|
||||
if (configLocation) {
|
||||
await dockerLogout(configLocation)
|
||||
fs.rmdirSync(configLocation, { recursive: true })
|
||||
}
|
||||
}
|
||||
|
||||
async function dockerLogin(
|
||||
configLocation: string,
|
||||
registry: string,
|
||||
credentials: { username: string; password: string }
|
||||
): Promise<void> {
|
||||
const credentialsArgs =
|
||||
credentials.username && credentials.password
|
||||
? ['-u', credentials.username, '--password-stdin']
|
||||
: []
|
||||
|
||||
const dockerArgs = [
|
||||
'--config',
|
||||
configLocation,
|
||||
'login',
|
||||
...credentialsArgs,
|
||||
registry
|
||||
]
|
||||
|
||||
const options: RunDockerCommandOptions =
|
||||
credentials.username && credentials.password
|
||||
? {
|
||||
input: Buffer.from(credentials.password, 'utf-8')
|
||||
}
|
||||
: {}
|
||||
|
||||
await runDockerCommand(dockerArgs, options)
|
||||
}
|
||||
|
||||
async function dockerLogout(configLocation: string): Promise<void> {
|
||||
const dockerArgs = ['--config', configLocation, 'logout']
|
||||
await runDockerCommand(dockerArgs)
|
||||
}
|
||||
|
||||
export async function containerExecStep(
|
||||
args,
|
||||
containerId: string
|
||||
): Promise<void> {
|
||||
const dockerArgs: string[] = ['exec', '-i']
|
||||
dockerArgs.push(`--workdir=${args.workingDirectory}`)
|
||||
for (const [key, value] of Object.entries(args['environmentVariables'])) {
|
||||
dockerArgs.push('-e')
|
||||
if (!value) {
|
||||
dockerArgs.push(`"${key}"`)
|
||||
} else {
|
||||
dockerArgs.push(`"${key}=${value}"`)
|
||||
}
|
||||
}
|
||||
|
||||
// Todo figure out prepend path and update it here
|
||||
// (we need to pass path in as -e Path={fullpath}) where {fullpath is the prepend path added to the current containers path}
|
||||
|
||||
dockerArgs.push(containerId)
|
||||
dockerArgs.push(args.entryPoint)
|
||||
for (const entryPointArg of args.entryPointArgs) {
|
||||
dockerArgs.push(entryPointArg)
|
||||
}
|
||||
await runDockerCommand(dockerArgs)
|
||||
}
|
||||
|
||||
export async function containerRun(
|
||||
args: RunContainerStepArgs,
|
||||
name: string,
|
||||
network: string
|
||||
): Promise<void> {
|
||||
if (!args.image) {
|
||||
throw new Error('expected image to be set')
|
||||
}
|
||||
const dockerArgs: string[] = ['run', '--rm']
|
||||
|
||||
dockerArgs.push('--name', name)
|
||||
dockerArgs.push(`--workdir=${args.workingDirectory}`)
|
||||
dockerArgs.push(`--label=${getRunnerLabel()}`)
|
||||
dockerArgs.push(`--network=${network}`)
|
||||
|
||||
if (args.createOptions) {
|
||||
dockerArgs.push(...args.createOptions.split(' '))
|
||||
}
|
||||
if (args.environmentVariables) {
|
||||
for (const [key, value] of Object.entries(args.environmentVariables)) {
|
||||
// Pass in this way to avoid printing secrets
|
||||
env[key] = value ?? undefined
|
||||
dockerArgs.push('-e')
|
||||
dockerArgs.push(key)
|
||||
}
|
||||
}
|
||||
|
||||
const mountVolumes = [
|
||||
...(args.userMountVolumes || []),
|
||||
...(args.systemMountVolumes || [])
|
||||
]
|
||||
for (const mountVolume of mountVolumes) {
|
||||
dockerArgs.push(`-v`)
|
||||
dockerArgs.push(
|
||||
`${mountVolume.sourceVolumePath}:${mountVolume.targetVolumePath}${
|
||||
mountVolume.readOnly ? ':ro' : ''
|
||||
}`
|
||||
)
|
||||
}
|
||||
|
||||
if (args['entryPoint']) {
|
||||
dockerArgs.push(`--entrypoint`)
|
||||
dockerArgs.push(args['entryPoint'])
|
||||
}
|
||||
dockerArgs.push(args.image)
|
||||
if (args.entryPointArgs) {
|
||||
for (const entryPointArg of args.entryPointArgs) {
|
||||
dockerArgs.push(entryPointArg)
|
||||
}
|
||||
}
|
||||
|
||||
await runDockerCommand(dockerArgs)
|
||||
}
|
||||
|
||||
export async function isContainerAlpine(containerId: string): Promise<boolean> {
|
||||
const dockerArgs: string[] = [
|
||||
'exec',
|
||||
containerId,
|
||||
'sh',
|
||||
'-c',
|
||||
"[ $(cat /etc/*release* | grep -i -e '^ID=*alpine*' -c) != 0 ] || exit 1"
|
||||
]
|
||||
try {
|
||||
await runDockerCommand(dockerArgs)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
enum ContainerHealth {
|
||||
Starting = 'starting',
|
||||
Healthy = 'healthy',
|
||||
Unhealthy = 'unhealthy',
|
||||
None = 'none'
|
||||
}
|
||||
|
||||
export interface ContainerMetadata {
|
||||
id: string
|
||||
image: string
|
||||
network?: string
|
||||
ports?: string[]
|
||||
contextName?: string
|
||||
}
|
||||
2
packages/docker/src/dockerCommands/index.ts
Normal file
2
packages/docker/src/dockerCommands/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from './container'
|
||||
export * from './network'
|
||||
26
packages/docker/src/dockerCommands/network.ts
Normal file
26
packages/docker/src/dockerCommands/network.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { runDockerCommand } from '../utils'
|
||||
import { getRunnerLabel } from './constants'
|
||||
|
||||
export async function networkCreate(networkName): Promise<void> {
|
||||
const dockerArgs: string[] = ['network', 'create']
|
||||
dockerArgs.push('--label')
|
||||
dockerArgs.push(getRunnerLabel())
|
||||
dockerArgs.push(networkName)
|
||||
await runDockerCommand(dockerArgs)
|
||||
}
|
||||
|
||||
export async function networkRemove(networkName): Promise<void> {
|
||||
const dockerArgs: string[] = ['network']
|
||||
dockerArgs.push('rm')
|
||||
dockerArgs.push(networkName)
|
||||
await runDockerCommand(dockerArgs)
|
||||
}
|
||||
|
||||
export async function networkPrune(): Promise<void> {
|
||||
const dockerArgs: string[] = ['network']
|
||||
dockerArgs.push('prune')
|
||||
dockerArgs.push('--force')
|
||||
dockerArgs.push(`--filter`)
|
||||
dockerArgs.push(`label=${getRunnerLabel()}`)
|
||||
await runDockerCommand(dockerArgs)
|
||||
}
|
||||
21
packages/docker/src/hooks/cleanup-job.ts
Normal file
21
packages/docker/src/hooks/cleanup-job.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import {
|
||||
containerRemove,
|
||||
containerNetworkRemove
|
||||
} from '../dockerCommands/container'
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
export async function cleanupJob(args, state, responseFile): Promise<void> {
|
||||
const containerIds: string[] = []
|
||||
if (state?.container) {
|
||||
containerIds.push(state.container)
|
||||
}
|
||||
if (state?.services) {
|
||||
containerIds.push(state.services)
|
||||
}
|
||||
if (containerIds.length > 0) {
|
||||
await containerRemove(containerIds)
|
||||
}
|
||||
if (state.network) {
|
||||
await containerNetworkRemove(state.network)
|
||||
}
|
||||
}
|
||||
4
packages/docker/src/hooks/index.ts
Normal file
4
packages/docker/src/hooks/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from './cleanup-job'
|
||||
export * from './prepare-job'
|
||||
export * from './run-script-step'
|
||||
export * from './run-container-step'
|
||||
205
packages/docker/src/hooks/prepare-job.ts
Normal file
205
packages/docker/src/hooks/prepare-job.ts
Normal file
@@ -0,0 +1,205 @@
|
||||
import * as core from '@actions/core'
|
||||
import { ContextPorts, PrepareJobArgs, writeToResponseFile } from 'hooklib/lib'
|
||||
import { exit } from 'process'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import {
|
||||
ContainerMetadata,
|
||||
containerPorts,
|
||||
containerPrune,
|
||||
containerPull,
|
||||
containerStart,
|
||||
createContainer,
|
||||
healthCheck,
|
||||
isContainerAlpine,
|
||||
registryLogin,
|
||||
registryLogout
|
||||
} from '../dockerCommands/container'
|
||||
import { networkCreate, networkPrune } from '../dockerCommands/network'
|
||||
import { sanitize } from '../utils'
|
||||
|
||||
export async function prepareJob(
|
||||
args: PrepareJobArgs,
|
||||
responseFile
|
||||
): Promise<void> {
|
||||
await containerPrune()
|
||||
await networkPrune()
|
||||
|
||||
const container = args.container
|
||||
const services = args.services
|
||||
|
||||
if (!container?.image && !services?.length) {
|
||||
core.info('No containers exist, skipping hook invocation')
|
||||
exit(0)
|
||||
}
|
||||
const networkName = generateNetworkName()
|
||||
// Create network
|
||||
await networkCreate(networkName)
|
||||
|
||||
// Create Job Container
|
||||
let containerMetadata: ContainerMetadata | undefined = undefined
|
||||
if (!container?.image) {
|
||||
core.info('No job container provided, skipping')
|
||||
} else {
|
||||
setupContainer(container)
|
||||
|
||||
const configLocation = await registryLogin(container.registry)
|
||||
try {
|
||||
await containerPull(container.image, configLocation)
|
||||
} finally {
|
||||
await registryLogout(configLocation)
|
||||
}
|
||||
containerMetadata = await createContainer(
|
||||
container,
|
||||
generateContainerName(container.image),
|
||||
networkName
|
||||
)
|
||||
if (!containerMetadata?.id) {
|
||||
throw new Error('Failed to create container')
|
||||
}
|
||||
await containerStart(containerMetadata?.id)
|
||||
}
|
||||
|
||||
// Create Service Containers
|
||||
const servicesMetadata: ContainerMetadata[] = []
|
||||
if (!services?.length) {
|
||||
core.info('No service containers provided, skipping')
|
||||
} else {
|
||||
for (const service of services) {
|
||||
const configLocation = await registryLogin(service.registry)
|
||||
try {
|
||||
await containerPull(service.image, configLocation)
|
||||
} finally {
|
||||
await registryLogout(configLocation)
|
||||
}
|
||||
|
||||
setupContainer(service)
|
||||
const response = await createContainer(
|
||||
service,
|
||||
generateContainerName(service.image),
|
||||
networkName
|
||||
)
|
||||
servicesMetadata.push(response)
|
||||
await containerStart(response.id)
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
(container && !containerMetadata?.id) ||
|
||||
(services?.length && servicesMetadata.some(s => !s.id))
|
||||
) {
|
||||
throw new Error(
|
||||
`Not all containers are started correctly ${
|
||||
containerMetadata?.id
|
||||
}, ${servicesMetadata.map(e => e.id).join(',')}`
|
||||
)
|
||||
}
|
||||
|
||||
const isAlpine = await isContainerAlpine(containerMetadata!.id)
|
||||
|
||||
if (containerMetadata?.id) {
|
||||
containerMetadata.ports = await containerPorts(containerMetadata.id)
|
||||
}
|
||||
if (servicesMetadata?.length) {
|
||||
for (const serviceMetadata of servicesMetadata) {
|
||||
serviceMetadata.ports = await containerPorts(serviceMetadata.id)
|
||||
}
|
||||
}
|
||||
|
||||
const healthChecks: Promise<void>[] = [healthCheck(containerMetadata!)]
|
||||
for (const service of servicesMetadata) {
|
||||
healthChecks.push(healthCheck(service))
|
||||
}
|
||||
try {
|
||||
await Promise.all(healthChecks)
|
||||
core.info('All services are healthy')
|
||||
} catch (error) {
|
||||
core.error(`Failed to initialize containers, ${error}`)
|
||||
throw new Error(`Failed to initialize containers, ${error}`)
|
||||
}
|
||||
|
||||
generateResponseFile(
|
||||
responseFile,
|
||||
networkName,
|
||||
containerMetadata,
|
||||
servicesMetadata,
|
||||
isAlpine
|
||||
)
|
||||
}
|
||||
|
||||
function generateResponseFile(
|
||||
responseFile: string,
|
||||
networkName: string,
|
||||
containerMetadata?: ContainerMetadata,
|
||||
servicesMetadata?: ContainerMetadata[],
|
||||
isAlpine = false
|
||||
): void {
|
||||
// todo figure out if we are alpine
|
||||
const response = {
|
||||
state: { network: networkName },
|
||||
context: {},
|
||||
isAlpine
|
||||
}
|
||||
if (containerMetadata) {
|
||||
response.state['container'] = containerMetadata.id
|
||||
const contextMeta = JSON.parse(JSON.stringify(containerMetadata))
|
||||
if (containerMetadata.ports) {
|
||||
contextMeta.ports = transformDockerPortsToContextPorts(containerMetadata)
|
||||
}
|
||||
response.context['container'] = contextMeta
|
||||
|
||||
if (containerMetadata.ports) {
|
||||
response.context['container'].ports =
|
||||
transformDockerPortsToContextPorts(containerMetadata)
|
||||
}
|
||||
}
|
||||
if (servicesMetadata && servicesMetadata.length > 0) {
|
||||
response.state['services'] = []
|
||||
response.context['services'] = []
|
||||
for (const meta of servicesMetadata) {
|
||||
response.state['services'].push(meta.id)
|
||||
const contextMeta = JSON.parse(JSON.stringify(meta))
|
||||
if (contextMeta.ports) {
|
||||
contextMeta.ports = transformDockerPortsToContextPorts(contextMeta)
|
||||
}
|
||||
response.context['services'].push(contextMeta)
|
||||
}
|
||||
}
|
||||
writeToResponseFile(responseFile, JSON.stringify(response))
|
||||
}
|
||||
|
||||
function setupContainer(container): void {
|
||||
container.entryPointArgs = [`-f`, `/dev/null`]
|
||||
container.entryPoint = 'tail'
|
||||
}
|
||||
|
||||
function generateNetworkName(): string {
|
||||
return `github_network_${uuidv4()}`
|
||||
}
|
||||
|
||||
function generateContainerName(container): string {
|
||||
const randomAlias = uuidv4().replace(/-/g, '')
|
||||
const randomSuffix = uuidv4().substring(0, 6)
|
||||
return `${randomAlias}_${sanitize(container.image)}_${randomSuffix}`
|
||||
}
|
||||
|
||||
function transformDockerPortsToContextPorts(
|
||||
meta: ContainerMetadata
|
||||
): ContextPorts {
|
||||
// ex: '80/tcp -> 0.0.0.0:80'
|
||||
const re = /^(\d+)\/(\w+)? -> (.*):(\d+)$/
|
||||
const contextPorts: ContextPorts = {}
|
||||
|
||||
if (meta.ports) {
|
||||
for (const port of meta.ports) {
|
||||
const matches = port.match(re)
|
||||
if (!matches) {
|
||||
throw new Error(
|
||||
'Container ports could not match the regex: "^(\\d+)\\/(\\w+)? -> (.*):(\\d+)$"'
|
||||
)
|
||||
}
|
||||
contextPorts[matches[1]] = matches[matches.length - 1]
|
||||
}
|
||||
}
|
||||
|
||||
return contextPorts
|
||||
}
|
||||
39
packages/docker/src/hooks/run-container-step.ts
Normal file
39
packages/docker/src/hooks/run-container-step.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import {
|
||||
containerBuild,
|
||||
registryLogin,
|
||||
registryLogout,
|
||||
containerPull,
|
||||
containerRun
|
||||
} from '../dockerCommands'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import * as core from '@actions/core'
|
||||
import { RunContainerStepArgs } from 'hooklib/lib'
|
||||
import { getRunnerLabel } from '../dockerCommands/constants'
|
||||
|
||||
export async function runContainerStep(
|
||||
args: RunContainerStepArgs,
|
||||
state
|
||||
): Promise<void> {
|
||||
const tag = generateBuildTag() // for docker build
|
||||
if (!args.image) {
|
||||
core.error('expected an image')
|
||||
} else {
|
||||
if (args.dockerfile) {
|
||||
await containerBuild(args, tag)
|
||||
args.image = tag
|
||||
} else {
|
||||
const configLocation = await registryLogin(args)
|
||||
try {
|
||||
await containerPull(args.image, configLocation)
|
||||
} finally {
|
||||
await registryLogout(configLocation)
|
||||
}
|
||||
}
|
||||
}
|
||||
// container will get pruned at the end of the job based on the label, no need to cleanup here
|
||||
await containerRun(args, tag.split(':')[1], state.network)
|
||||
}
|
||||
|
||||
function generateBuildTag(): string {
|
||||
return `${getRunnerLabel()}:${uuidv4().substring(0, 6)}`
|
||||
}
|
||||
9
packages/docker/src/hooks/run-script-step.ts
Normal file
9
packages/docker/src/hooks/run-script-step.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { RunScriptStepArgs } from 'hooklib/lib'
|
||||
import { containerExecStep } from '../dockerCommands'
|
||||
|
||||
export async function runScriptStep(
|
||||
args: RunScriptStepArgs,
|
||||
state
|
||||
): Promise<void> {
|
||||
await containerExecStep(args, state.container)
|
||||
}
|
||||
48
packages/docker/src/index.ts
Normal file
48
packages/docker/src/index.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import * as core from '@actions/core'
|
||||
import {
|
||||
Command,
|
||||
getInputFromStdin,
|
||||
PrepareJobArgs,
|
||||
RunContainerStepArgs,
|
||||
RunScriptStepArgs
|
||||
} from 'hooklib/lib'
|
||||
import { exit } from 'process'
|
||||
import {
|
||||
cleanupJob,
|
||||
prepareJob,
|
||||
runContainerStep,
|
||||
runScriptStep
|
||||
} from './hooks'
|
||||
|
||||
async function run(): Promise<void> {
|
||||
const input = await getInputFromStdin()
|
||||
|
||||
const args = input['args']
|
||||
const command = input['command']
|
||||
const responseFile = input['responseFile']
|
||||
const state = input['state']
|
||||
|
||||
try {
|
||||
switch (command) {
|
||||
case Command.PrepareJob:
|
||||
await prepareJob(args as PrepareJobArgs, responseFile)
|
||||
return exit(0)
|
||||
case Command.CleanupJob:
|
||||
await cleanupJob(null, state, null)
|
||||
return exit(0)
|
||||
case Command.RunScriptStep:
|
||||
await runScriptStep(args as RunScriptStepArgs, state)
|
||||
return exit(0)
|
||||
case Command.RunContainerStep:
|
||||
await runContainerStep(args as RunContainerStepArgs, state)
|
||||
return exit(0)
|
||||
default:
|
||||
throw new Error(`Command not recognized: ${command}`)
|
||||
}
|
||||
} catch (error) {
|
||||
core.error(`${error}`)
|
||||
exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
void run()
|
||||
56
packages/docker/src/utils.ts
Normal file
56
packages/docker/src/utils.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
/* eslint-disable @typescript-eslint/no-require-imports */
|
||||
/* eslint-disable import/no-commonjs */
|
||||
import * as core from '@actions/core'
|
||||
// Import this way otherwise typescript has errors
|
||||
const exec = require('@actions/exec')
|
||||
|
||||
export interface RunDockerCommandOptions {
|
||||
workingDir?: string
|
||||
input?: Buffer
|
||||
}
|
||||
|
||||
export async function runDockerCommand(
|
||||
args: string[],
|
||||
options?: RunDockerCommandOptions
|
||||
): Promise<string> {
|
||||
const pipes = await exec.getExecOutput('docker', args, options)
|
||||
if (pipes.exitCode !== 0) {
|
||||
core.error(`Docker failed with exit code ${pipes.exitCode}`)
|
||||
return Promise.reject(pipes.stderr)
|
||||
}
|
||||
return Promise.resolve(pipes.stdout)
|
||||
}
|
||||
|
||||
export function sanitize(val: string): string {
|
||||
if (!val || typeof val !== 'string') {
|
||||
return ''
|
||||
}
|
||||
const newNameBuilder: string[] = []
|
||||
for (let i = 0; i < val.length; i++) {
|
||||
const char = val.charAt(i)
|
||||
if (!newNameBuilder.length) {
|
||||
if (isAlpha(char)) {
|
||||
newNameBuilder.push(char)
|
||||
}
|
||||
} else {
|
||||
if (isAlpha(char) || isNumeric(char) || char === '_') {
|
||||
newNameBuilder.push(char)
|
||||
}
|
||||
}
|
||||
}
|
||||
return newNameBuilder.join('')
|
||||
}
|
||||
|
||||
// isAlpha accepts single character and checks if
|
||||
// that character is [a-zA-Z]
|
||||
function isAlpha(val: string): boolean {
|
||||
return (
|
||||
val.length === 1 &&
|
||||
((val >= 'a' && val <= 'z') || (val >= 'A' && val <= 'Z'))
|
||||
)
|
||||
}
|
||||
|
||||
function isNumeric(val: string): boolean {
|
||||
return val.length === 1 && val >= '0' && val <= '9'
|
||||
}
|
||||
62
packages/docker/tests/cleanup-job-test.ts
Normal file
62
packages/docker/tests/cleanup-job-test.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { prepareJob, cleanupJob } from '../src/hooks'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
import TestSetup from './test-setup'
|
||||
|
||||
const prepareJobInputPath = path.resolve(
|
||||
`${__dirname}/../../../examples/prepare-job.json`
|
||||
)
|
||||
|
||||
const tmpOutputDir = `${__dirname}/${uuidv4()}`
|
||||
|
||||
let prepareJobOutputPath: string
|
||||
let prepareJobData: any
|
||||
|
||||
let testSetup: TestSetup
|
||||
|
||||
jest.useRealTimers()
|
||||
|
||||
describe('cleanup job', () => {
|
||||
beforeAll(() => {
|
||||
fs.mkdirSync(tmpOutputDir, { recursive: true })
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
fs.rmSync(tmpOutputDir, { recursive: true })
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
const prepareJobRawData = fs.readFileSync(prepareJobInputPath, 'utf8')
|
||||
prepareJobData = JSON.parse(prepareJobRawData.toString())
|
||||
|
||||
prepareJobOutputPath = `${tmpOutputDir}/prepare-job-output-${uuidv4()}.json`
|
||||
fs.writeFileSync(prepareJobOutputPath, '')
|
||||
|
||||
testSetup = new TestSetup()
|
||||
testSetup.initialize()
|
||||
|
||||
prepareJobData.args.container.userMountVolumes = testSetup.userMountVolumes
|
||||
prepareJobData.args.container.systemMountVolumes =
|
||||
testSetup.systemMountVolumes
|
||||
prepareJobData.args.container.workingDirectory = testSetup.workingDirectory
|
||||
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputPath)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(prepareJobOutputPath, { force: true })
|
||||
testSetup.teardown()
|
||||
})
|
||||
|
||||
it('should cleanup successfully', async () => {
|
||||
const prepareJobOutputContent = fs.readFileSync(
|
||||
prepareJobOutputPath,
|
||||
'utf-8'
|
||||
)
|
||||
const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent)
|
||||
await expect(
|
||||
cleanupJob(prepareJobData.args, parsedPrepareJobOutput.state, null)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
})
|
||||
14
packages/docker/tests/container-pull-test.ts
Normal file
14
packages/docker/tests/container-pull-test.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { containerPull } from '../src/dockerCommands'
|
||||
|
||||
jest.useRealTimers()
|
||||
|
||||
describe('container pull', () => {
|
||||
it('should fail', async () => {
|
||||
const arg = { image: 'doesNotExist' }
|
||||
await expect(containerPull(arg.image, '')).rejects.toThrow()
|
||||
})
|
||||
it('should succeed', async () => {
|
||||
const arg = { image: 'ubuntu:latest' }
|
||||
await expect(containerPull(arg.image, '')).resolves.not.toThrow()
|
||||
})
|
||||
})
|
||||
117
packages/docker/tests/e2e-test.ts
Normal file
117
packages/docker/tests/e2e-test.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import {
|
||||
prepareJob,
|
||||
cleanupJob,
|
||||
runScriptStep,
|
||||
runContainerStep
|
||||
} from '../src/hooks'
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import TestSetup from './test-setup'
|
||||
|
||||
const prepareJobJson = fs.readFileSync(
|
||||
path.resolve(__dirname + '/../../../examples/prepare-job.json'),
|
||||
'utf8'
|
||||
)
|
||||
|
||||
const containerStepJson = fs.readFileSync(
|
||||
path.resolve(__dirname + '/../../../examples/run-container-step.json'),
|
||||
'utf8'
|
||||
)
|
||||
|
||||
const tmpOutputDir = `${__dirname}/_temp/${uuidv4()}`
|
||||
|
||||
let prepareJobData: any
|
||||
let scriptStepJson: any
|
||||
let scriptStepData: any
|
||||
let containerStepData: any
|
||||
|
||||
let prepareJobOutputFilePath: string
|
||||
|
||||
let testSetup: TestSetup
|
||||
|
||||
describe('e2e', () => {
|
||||
beforeAll(() => {
|
||||
fs.mkdirSync(tmpOutputDir, { recursive: true })
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
fs.rmSync(tmpOutputDir, { recursive: true })
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
// init dirs
|
||||
testSetup = new TestSetup()
|
||||
testSetup.initialize()
|
||||
|
||||
prepareJobData = JSON.parse(prepareJobJson)
|
||||
prepareJobData.args.container.userMountVolumes = testSetup.userMountVolumes
|
||||
prepareJobData.args.container.systemMountVolumes =
|
||||
testSetup.systemMountVolumes
|
||||
prepareJobData.args.container.workingDirectory = testSetup.workingDirectory
|
||||
|
||||
scriptStepJson = fs.readFileSync(
|
||||
path.resolve(__dirname + '/../../../examples/run-script-step.json'),
|
||||
'utf8'
|
||||
)
|
||||
scriptStepData = JSON.parse(scriptStepJson)
|
||||
scriptStepData.args.workingDirectory = testSetup.workingDirectory
|
||||
|
||||
containerStepData = JSON.parse(containerStepJson)
|
||||
containerStepData.args.workingDirectory = testSetup.workingDirectory
|
||||
containerStepData.args.userMountVolumes = testSetup.userMountVolumes
|
||||
containerStepData.args.systemMountVolumes = testSetup.systemMountVolumes
|
||||
|
||||
prepareJobOutputFilePath = `${tmpOutputDir}/prepare-job-output-${uuidv4()}.json`
|
||||
fs.writeFileSync(prepareJobOutputFilePath, '')
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(prepareJobOutputFilePath, { force: true })
|
||||
testSetup.teardown()
|
||||
})
|
||||
|
||||
it('should prepare job, then run script step, then run container step then cleanup', async () => {
|
||||
await expect(
|
||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
).resolves.not.toThrow()
|
||||
let rawState = fs.readFileSync(prepareJobOutputFilePath, 'utf-8')
|
||||
let resp = JSON.parse(rawState)
|
||||
await expect(
|
||||
runScriptStep(scriptStepData.args, resp.state)
|
||||
).resolves.not.toThrow()
|
||||
await expect(
|
||||
runContainerStep(containerStepData.args, resp.state)
|
||||
).resolves.not.toThrow()
|
||||
await expect(cleanupJob(resp, resp.state, null)).resolves.not.toThrow()
|
||||
})
|
||||
|
||||
it('should prepare job, then run script step, then run container step with Dockerfile then cleanup', async () => {
|
||||
await expect(
|
||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
).resolves.not.toThrow()
|
||||
let rawState = fs.readFileSync(prepareJobOutputFilePath, 'utf-8')
|
||||
let resp = JSON.parse(rawState)
|
||||
await expect(
|
||||
runScriptStep(scriptStepData.args, resp.state)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
const dockerfilePath = `${tmpOutputDir}/Dockerfile`
|
||||
fs.writeFileSync(
|
||||
dockerfilePath,
|
||||
`FROM ubuntu:latest
|
||||
ENV TEST=test
|
||||
ENTRYPOINT [ "tail", "-f", "/dev/null" ]
|
||||
`
|
||||
)
|
||||
const containerStepDataCopy = JSON.parse(JSON.stringify(containerStepData))
|
||||
process.env.GITHUB_WORKSPACE = tmpOutputDir
|
||||
containerStepDataCopy.args.dockerfile = 'Dockerfile'
|
||||
containerStepDataCopy.args.context = '.'
|
||||
console.log(containerStepDataCopy.args)
|
||||
await expect(
|
||||
runContainerStep(containerStepDataCopy.args, resp.state)
|
||||
).resolves.not.toThrow()
|
||||
await expect(cleanupJob(resp, resp.state, null)).resolves.not.toThrow()
|
||||
})
|
||||
})
|
||||
103
packages/docker/tests/prepare-job-test.ts
Normal file
103
packages/docker/tests/prepare-job-test.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import * as fs from 'fs'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { prepareJob } from '../src/hooks'
|
||||
import TestSetup from './test-setup'
|
||||
|
||||
jest.useRealTimers()
|
||||
|
||||
let prepareJobOutputPath: string
|
||||
let prepareJobData: any
|
||||
const tmpOutputDir = `${__dirname}/_temp/${uuidv4()}`
|
||||
const prepareJobInputPath = `${__dirname}/../../../examples/prepare-job.json`
|
||||
|
||||
let testSetup: TestSetup
|
||||
|
||||
describe('prepare job', () => {
|
||||
beforeAll(() => {
|
||||
fs.mkdirSync(tmpOutputDir, { recursive: true })
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
fs.rmSync(tmpOutputDir, { recursive: true })
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
testSetup = new TestSetup()
|
||||
testSetup.initialize()
|
||||
|
||||
let prepareJobRawData = fs.readFileSync(prepareJobInputPath, 'utf8')
|
||||
prepareJobData = JSON.parse(prepareJobRawData.toString())
|
||||
|
||||
prepareJobData.args.container.userMountVolumes = testSetup.userMountVolumes
|
||||
prepareJobData.args.container.systemMountVolumes =
|
||||
testSetup.systemMountVolumes
|
||||
prepareJobData.args.container.workingDirectory = testSetup.workingDirectory
|
||||
|
||||
prepareJobOutputPath = `${tmpOutputDir}/prepare-job-output-${uuidv4()}.json`
|
||||
fs.writeFileSync(prepareJobOutputPath, '')
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
testSetup.teardown()
|
||||
})
|
||||
|
||||
it('should not throw', async () => {
|
||||
await expect(
|
||||
prepareJob(prepareJobData.args, prepareJobOutputPath)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
expect(() => fs.readFileSync(prepareJobOutputPath, 'utf-8')).not.toThrow()
|
||||
})
|
||||
|
||||
it('should have JSON output written to a file', async () => {
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputPath)
|
||||
const prepareJobOutputContent = fs.readFileSync(
|
||||
prepareJobOutputPath,
|
||||
'utf-8'
|
||||
)
|
||||
expect(() => JSON.parse(prepareJobOutputContent)).not.toThrow()
|
||||
})
|
||||
|
||||
it('should have context written to a file', async () => {
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputPath)
|
||||
const prepareJobOutputContent = fs.readFileSync(
|
||||
prepareJobOutputPath,
|
||||
'utf-8'
|
||||
)
|
||||
const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent)
|
||||
expect(parsedPrepareJobOutput.context).toBeDefined()
|
||||
})
|
||||
|
||||
it('should have container ids written to file', async () => {
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputPath)
|
||||
const prepareJobOutputContent = fs.readFileSync(
|
||||
prepareJobOutputPath,
|
||||
'utf-8'
|
||||
)
|
||||
const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent)
|
||||
|
||||
expect(parsedPrepareJobOutput.context.container.id).toBeDefined()
|
||||
expect(typeof parsedPrepareJobOutput.context.container.id).toBe('string')
|
||||
expect(parsedPrepareJobOutput.context.container.id).toMatch(/^[0-9a-f]+$/)
|
||||
})
|
||||
|
||||
it('should have ports for context written in form [containerPort]:[hostPort]', async () => {
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputPath)
|
||||
const prepareJobOutputContent = fs.readFileSync(
|
||||
prepareJobOutputPath,
|
||||
'utf-8'
|
||||
)
|
||||
const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent)
|
||||
|
||||
const mainContainerPorts = parsedPrepareJobOutput.context.container.ports
|
||||
expect(mainContainerPorts['8080']).toBe('80')
|
||||
|
||||
const redisService = parsedPrepareJobOutput.context.services.find(
|
||||
s => s.image === 'redis'
|
||||
)
|
||||
|
||||
const redisServicePorts = redisService.ports
|
||||
expect(redisServicePorts['80']).toBe('8080')
|
||||
expect(redisServicePorts['8080']).toBe('8088')
|
||||
})
|
||||
})
|
||||
112
packages/docker/tests/test-setup.ts
Normal file
112
packages/docker/tests/test-setup.ts
Normal file
@@ -0,0 +1,112 @@
|
||||
import * as fs from 'fs'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { env } from 'process'
|
||||
import { Mount } from 'hooklib'
|
||||
|
||||
export default class TestSetup {
|
||||
private testdir: string
|
||||
private runnerMockDir: string
|
||||
private runnerMockSubdirs = {
|
||||
work: '_work',
|
||||
externals: 'externals',
|
||||
workTemp: '_work/_temp',
|
||||
workActions: '_work/_actions',
|
||||
workTool: '_work/_tool',
|
||||
githubHome: '_work/_temp/_github_home',
|
||||
githubWorkflow: '_work/_temp/_github_workflow'
|
||||
}
|
||||
|
||||
private readonly projectName = 'example'
|
||||
|
||||
constructor() {
|
||||
this.testdir = `${__dirname}/_temp/${uuidv4()}`
|
||||
this.runnerMockDir = `${this.testdir}/runner/_layout`
|
||||
}
|
||||
|
||||
private get allTestDirectories() {
|
||||
const resp = [this.testdir, this.runnerMockDir]
|
||||
|
||||
for (const [key, value] of Object.entries(this.runnerMockSubdirs)) {
|
||||
resp.push(`${this.runnerMockDir}/${value}`)
|
||||
}
|
||||
|
||||
resp.push(
|
||||
`${this.runnerMockDir}/_work/${this.projectName}/${this.projectName}`
|
||||
)
|
||||
|
||||
return resp
|
||||
}
|
||||
|
||||
public initialize(): void {
|
||||
for (const dir of this.allTestDirectories) {
|
||||
fs.mkdirSync(dir, { recursive: true })
|
||||
}
|
||||
env['RUNNER_NAME'] = 'test'
|
||||
env[
|
||||
'RUNNER_TEMP'
|
||||
] = `${this.runnerMockDir}/${this.runnerMockSubdirs.workTemp}`
|
||||
}
|
||||
|
||||
public teardown(): void {
|
||||
fs.rmdirSync(this.testdir, { recursive: true })
|
||||
}
|
||||
|
||||
public get userMountVolumes(): Mount[] {
|
||||
return [
|
||||
{
|
||||
sourceVolumePath: 'my_docker_volume',
|
||||
targetVolumePath: '/volume_mount',
|
||||
readOnly: false
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
public get systemMountVolumes(): Mount[] {
|
||||
return [
|
||||
{
|
||||
sourceVolumePath: '/var/run/docker.sock',
|
||||
targetVolumePath: '/var/run/docker.sock',
|
||||
readOnly: false
|
||||
},
|
||||
{
|
||||
sourceVolumePath: `${this.runnerMockDir}/${this.runnerMockSubdirs.work}`,
|
||||
targetVolumePath: '/__w',
|
||||
readOnly: false
|
||||
},
|
||||
{
|
||||
sourceVolumePath: `${this.runnerMockDir}/${this.runnerMockSubdirs.externals}`,
|
||||
targetVolumePath: '/__e',
|
||||
readOnly: true
|
||||
},
|
||||
{
|
||||
sourceVolumePath: `${this.runnerMockDir}/${this.runnerMockSubdirs.workTemp}`,
|
||||
targetVolumePath: '/__w/_temp',
|
||||
readOnly: false
|
||||
},
|
||||
{
|
||||
sourceVolumePath: `${this.runnerMockDir}/${this.runnerMockSubdirs.workActions}`,
|
||||
targetVolumePath: '/__w/_actions',
|
||||
readOnly: false
|
||||
},
|
||||
{
|
||||
sourceVolumePath: `${this.runnerMockDir}/${this.runnerMockSubdirs.workTool}`,
|
||||
targetVolumePath: '/__w/_tool',
|
||||
readOnly: false
|
||||
},
|
||||
{
|
||||
sourceVolumePath: `${this.runnerMockDir}/${this.runnerMockSubdirs.githubHome}`,
|
||||
targetVolumePath: '/github/home',
|
||||
readOnly: false
|
||||
},
|
||||
{
|
||||
sourceVolumePath: `${this.runnerMockDir}/${this.runnerMockSubdirs.githubWorkflow}`,
|
||||
targetVolumePath: '/github/workflow',
|
||||
readOnly: false
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
public get workingDirectory(): string {
|
||||
return `/__w/${this.projectName}/${this.projectName}`
|
||||
}
|
||||
}
|
||||
12
packages/docker/tests/utils-test.ts
Normal file
12
packages/docker/tests/utils-test.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { sanitize } from '../src/utils'
|
||||
|
||||
describe('Utilities', () => {
|
||||
it('should return sanitized image name', () => {
|
||||
expect(sanitize('ubuntu:latest')).toBe('ubuntulatest')
|
||||
})
|
||||
|
||||
it('should return the same string', () => {
|
||||
const validStr = 'teststr8_one'
|
||||
expect(sanitize(validStr)).toBe(validStr)
|
||||
})
|
||||
})
|
||||
11
packages/docker/tsconfig.json
Normal file
11
packages/docker/tsconfig.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"outDir": "./lib",
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"include": [
|
||||
"./src"
|
||||
]
|
||||
}
|
||||
4350
packages/hooklib/package-lock.json
generated
Normal file
4350
packages/hooklib/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
28
packages/hooklib/package.json
Normal file
28
packages/hooklib/package.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "hooklib",
|
||||
"version": "0.1.0",
|
||||
"description": "",
|
||||
"main": "lib/index.js",
|
||||
"types": "index.d.ts",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "tsc",
|
||||
"format": "prettier --write '**/*.ts'",
|
||||
"format-check": "prettier --check '**/*.ts'",
|
||||
"lint": "eslint src/**/*.ts"
|
||||
},
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@types/node": "^17.0.23",
|
||||
"@typescript-eslint/parser": "^5.18.0",
|
||||
"@zeit/ncc": "^0.22.3",
|
||||
"eslint": "^8.12.0",
|
||||
"eslint-plugin-github": "^4.3.6",
|
||||
"prettier": "^2.6.2",
|
||||
"typescript": "^4.6.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.6.0"
|
||||
}
|
||||
}
|
||||
2
packages/hooklib/src/index.ts
Normal file
2
packages/hooklib/src/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from './interfaces'
|
||||
export * from './utils'
|
||||
99
packages/hooklib/src/interfaces.ts
Normal file
99
packages/hooklib/src/interfaces.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
export enum Command {
|
||||
PrepareJob = 'prepare_job',
|
||||
CleanupJob = 'cleanup_job',
|
||||
RunContainerStep = 'run_container_step',
|
||||
RunScriptStep = 'run_script_step'
|
||||
}
|
||||
|
||||
export interface HookData {
|
||||
command: Command
|
||||
responseFile: string
|
||||
args?: PrepareJobArgs | RunContainerStepArgs | RunScriptStepArgs
|
||||
state?: { [key: string]: any }
|
||||
}
|
||||
|
||||
export interface PrepareJobArgs {
|
||||
container?: JobContainerInfo
|
||||
services?: ServiceContainerInfo[]
|
||||
}
|
||||
|
||||
export type RunContainerStepArgs = StepContainerInfo
|
||||
|
||||
export interface RunScriptStepArgs {
|
||||
entryPoint: string
|
||||
entryPointArgs: string[]
|
||||
environmentVariables?: { [key: string]: string }
|
||||
prependPath?: string[]
|
||||
workingDirectory: string
|
||||
}
|
||||
|
||||
export interface ContainerInfo {
|
||||
image?: string
|
||||
entryPoint?: string
|
||||
entryPointArgs?: string[]
|
||||
createOptions?: string
|
||||
environmentVariables?: { [key: string]: string }
|
||||
userMountVolumes?: Mount[]
|
||||
registry?: Registry
|
||||
portMappings?: string[]
|
||||
}
|
||||
|
||||
export interface ServiceContainerInfo extends ContainerInfo {
|
||||
contextName: string
|
||||
image: string
|
||||
}
|
||||
|
||||
export interface JobContainerInfo extends ContainerInfo {
|
||||
image: string
|
||||
workingDirectory: string
|
||||
systemMountVolumes: Mount[]
|
||||
}
|
||||
|
||||
export interface StepContainerInfo extends ContainerInfo {
|
||||
prependPath?: string[]
|
||||
workingDirectory: string
|
||||
dockerfile?: string
|
||||
systemMountVolumes: Mount[]
|
||||
}
|
||||
|
||||
export interface Mount {
|
||||
sourceVolumePath: string
|
||||
targetVolumePath: string
|
||||
readOnly: boolean
|
||||
}
|
||||
|
||||
export interface Registry {
|
||||
username?: string
|
||||
password?: string
|
||||
serverUrl: string
|
||||
}
|
||||
|
||||
export enum Protocol {
|
||||
TCP = 'tcp',
|
||||
UDP = 'udp'
|
||||
}
|
||||
|
||||
export enum PodPhase {
|
||||
PENDING = 'Pending',
|
||||
RUNNING = 'Running',
|
||||
SUCCEEDED = 'Succeded',
|
||||
FAILED = 'Failed',
|
||||
UNKNOWN = 'Unknown'
|
||||
}
|
||||
|
||||
export interface PrepareJobResponse {
|
||||
state?: object
|
||||
context?: ContainerContext
|
||||
services?: { [key: string]: ContainerContext }
|
||||
alpine: boolean
|
||||
}
|
||||
|
||||
export interface ContainerContext {
|
||||
id?: string
|
||||
network?: string
|
||||
ports?: { [key: string]: string }
|
||||
}
|
||||
|
||||
export interface ContextPorts {
|
||||
[source: string]: string // source -> target
|
||||
}
|
||||
44
packages/hooklib/src/utils.ts
Normal file
44
packages/hooklib/src/utils.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as events from 'events'
|
||||
import * as fs from 'fs'
|
||||
import * as os from 'os'
|
||||
import * as readline from 'readline'
|
||||
import { HookData } from './interfaces'
|
||||
|
||||
export async function getInputFromStdin(): Promise<HookData> {
|
||||
let input = ''
|
||||
|
||||
const rl = readline.createInterface({
|
||||
input: process.stdin
|
||||
})
|
||||
|
||||
rl.on('line', line => {
|
||||
core.debug(`Line from STDIN: ${line}`)
|
||||
input = line
|
||||
})
|
||||
await events.default.once(rl, 'close')
|
||||
const inputJson = JSON.parse(input)
|
||||
return inputJson as HookData
|
||||
}
|
||||
|
||||
export function writeToResponseFile(filePath: string, message: any): void {
|
||||
if (!filePath) {
|
||||
throw new Error(`Expected file path`)
|
||||
}
|
||||
if (!fs.existsSync(filePath)) {
|
||||
throw new Error(`Missing file at path: ${filePath}`)
|
||||
}
|
||||
|
||||
fs.appendFileSync(filePath, `${toCommandValue(message)}${os.EOL}`, {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
}
|
||||
|
||||
function toCommandValue(input: any): string {
|
||||
if (input === null || input === undefined) {
|
||||
return ''
|
||||
} else if (typeof input === 'string' || input instanceof String) {
|
||||
return input as string
|
||||
}
|
||||
return JSON.stringify(input)
|
||||
}
|
||||
11
packages/hooklib/tsconfig.json
Normal file
11
packages/hooklib/tsconfig.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"outDir": "./lib",
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"include": [
|
||||
"./src"
|
||||
]
|
||||
}
|
||||
12
packages/k8s/README.md
Normal file
12
packages/k8s/README.md
Normal file
@@ -0,0 +1,12 @@
|
||||
# K8s Hooks
|
||||
|
||||
## Description
|
||||
This implementation provides a way to dynamically spin up jobs to run container workflows, rather then relying on the default docker implementation. It is meant to be used when the runner itself is running in k8s, for example when using the [Actions Runner Controller](https://github.com/actions-runner-controller/actions-runner-controller)
|
||||
|
||||
## Pre-requisites
|
||||
Some things are expected to be set when using these hooks
|
||||
- The runner itself should be running in a pod, with a service account with the following permissions
|
||||
- The `ACTIONS_RUNNER_REQUIRE_JOB_CONTAINER=true` should be set to true
|
||||
- The `ACTIONS_RUNNER_POD_NAME` env should be set to the name of the pod
|
||||
- The runner pod should map a persistent volume claim into the `_work` directory
|
||||
- The `ACTIONS_RUNNER_CLAIM_NAME` should be set to the persistent volume claim that contains the runner's working directory
|
||||
13
packages/k8s/jest.config.js
Normal file
13
packages/k8s/jest.config.js
Normal file
@@ -0,0 +1,13 @@
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/*-test.ts'],
|
||||
testRunner: 'jest-circus/runner',
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
},
|
||||
setupFilesAfterEnv: ['./jest.setup.js'],
|
||||
verbose: true
|
||||
}
|
||||
1
packages/k8s/jest.setup.js
Normal file
1
packages/k8s/jest.setup.js
Normal file
@@ -0,0 +1 @@
|
||||
jest.setTimeout(90000)
|
||||
9076
packages/k8s/package-lock.json
generated
Normal file
9076
packages/k8s/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
30
packages/k8s/package.json
Normal file
30
packages/k8s/package.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "kubehooks",
|
||||
"version": "0.1.0",
|
||||
"description": "",
|
||||
"main": "lib/index.js",
|
||||
"scripts": {
|
||||
"test": "jest --runInBand",
|
||||
"build": "tsc && npx ncc build",
|
||||
"format": "prettier --write '**/*.ts'",
|
||||
"format-check": "prettier --check '**/*.ts'",
|
||||
"lint": "eslint src/**/*.ts"
|
||||
},
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.6.0",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/io": "^1.1.2",
|
||||
"@kubernetes/client-node": "^0.16.3",
|
||||
"hooklib": "file:../hooklib"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.4.1",
|
||||
"@types/node": "^17.0.23",
|
||||
"@vercel/ncc": "^0.33.4",
|
||||
"jest": "^27.5.1",
|
||||
"ts-jest": "^27.1.4",
|
||||
"typescript": "^4.6.3"
|
||||
}
|
||||
}
|
||||
5
packages/k8s/src/hooks/cleanup-job.ts
Normal file
5
packages/k8s/src/hooks/cleanup-job.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { podPrune } from '../k8s'
|
||||
|
||||
export async function cleanupJob(): Promise<void> {
|
||||
await podPrune()
|
||||
}
|
||||
58
packages/k8s/src/hooks/constants.ts
Normal file
58
packages/k8s/src/hooks/constants.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
export function getRunnerPodName(): string {
|
||||
const name = process.env.ACTIONS_RUNNER_POD_NAME
|
||||
if (!name) {
|
||||
throw new Error(
|
||||
"'ACTIONS_RUNNER_POD_NAME' env is required, please contact your self hosted runner administrator"
|
||||
)
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
export function getJobPodName(): string {
|
||||
return `${getRunnerPodName().substring(
|
||||
0,
|
||||
MAX_POD_NAME_LENGTH - '-workflow'.length
|
||||
)}-workflow`
|
||||
}
|
||||
|
||||
export function getStepPodName(): string {
|
||||
return `${getRunnerPodName().substring(
|
||||
0,
|
||||
MAX_POD_NAME_LENGTH - ('-step'.length + STEP_POD_NAME_SUFFIX_LENGTH)
|
||||
)}-step-${uuidv4().substring(0, STEP_POD_NAME_SUFFIX_LENGTH)}`
|
||||
}
|
||||
|
||||
export function getVolumeClaimName(): string {
|
||||
const name = process.env.ACTIONS_RUNNER_CLAIM_NAME
|
||||
if (!name) {
|
||||
throw new Error(
|
||||
"'ACTIONS_RUNNER_CLAIM_NAME' is required, please contact your self hosted runner administrator"
|
||||
)
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
const MAX_POD_NAME_LENGTH = 63
|
||||
const STEP_POD_NAME_SUFFIX_LENGTH = 8
|
||||
export const JOB_CONTAINER_NAME = 'job'
|
||||
|
||||
export class RunnerInstanceLabel {
|
||||
runnerhook: string
|
||||
constructor() {
|
||||
this.runnerhook = process.env.ACTIONS_RUNNER_POD_NAME as string
|
||||
}
|
||||
|
||||
get key(): string {
|
||||
return 'runner-pod'
|
||||
}
|
||||
|
||||
get value(): string {
|
||||
return this.runnerhook
|
||||
}
|
||||
|
||||
toString(): string {
|
||||
return `runner-pod=${this.runnerhook}`
|
||||
}
|
||||
}
|
||||
4
packages/k8s/src/hooks/index.ts
Normal file
4
packages/k8s/src/hooks/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from './cleanup-job'
|
||||
export * from './prepare-job'
|
||||
export * from './run-script-step'
|
||||
export * from './run-container-step'
|
||||
197
packages/k8s/src/hooks/prepare-job.ts
Normal file
197
packages/k8s/src/hooks/prepare-job.ts
Normal file
@@ -0,0 +1,197 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as io from '@actions/io'
|
||||
import * as k8s from '@kubernetes/client-node'
|
||||
import {
|
||||
ContextPorts,
|
||||
PodPhase,
|
||||
prepareJobArgs,
|
||||
writeToResponseFile
|
||||
} from 'hooklib'
|
||||
import path from 'path'
|
||||
import {
|
||||
containerPorts,
|
||||
createPod,
|
||||
isAuthPermissionsOK,
|
||||
isPodContainerAlpine,
|
||||
namespace,
|
||||
podPrune,
|
||||
requiredPermissions,
|
||||
waitForPodPhases
|
||||
} from '../k8s'
|
||||
import {
|
||||
containerVolumes,
|
||||
DEFAULT_CONTAINER_ENTRY_POINT,
|
||||
DEFAULT_CONTAINER_ENTRY_POINT_ARGS
|
||||
} from '../k8s/utils'
|
||||
import { JOB_CONTAINER_NAME } from './constants'
|
||||
|
||||
export async function prepareJob(
|
||||
args: prepareJobArgs,
|
||||
responseFile
|
||||
): Promise<void> {
|
||||
await podPrune()
|
||||
if (!(await isAuthPermissionsOK())) {
|
||||
throw new Error(
|
||||
`The Service account needs the following permissions ${JSON.stringify(
|
||||
requiredPermissions
|
||||
)} on the pod resource in the '${namespace}' namespace. Please contact your self hosted runner administrator.`
|
||||
)
|
||||
}
|
||||
await copyExternalsToRoot()
|
||||
let container: k8s.V1Container | undefined = undefined
|
||||
if (args.container?.image) {
|
||||
core.info(`Using image '${args.container.image}' for job image`)
|
||||
container = createPodSpec(args.container, JOB_CONTAINER_NAME, true)
|
||||
}
|
||||
|
||||
let services: k8s.V1Container[] = []
|
||||
if (args.services?.length) {
|
||||
services = args.services.map(service => {
|
||||
core.info(`Adding service '${service.image}' to pod definition`)
|
||||
return createPodSpec(service, service.image.split(':')[0])
|
||||
})
|
||||
}
|
||||
if (!container && !services?.length) {
|
||||
throw new Error('No containers exist, skipping hook invocation')
|
||||
}
|
||||
let createdPod: k8s.V1Pod | undefined = undefined
|
||||
try {
|
||||
createdPod = await createPod(container, services, args.registry)
|
||||
} catch (err) {
|
||||
await podPrune()
|
||||
throw new Error(`failed to create job pod: ${err}`)
|
||||
}
|
||||
|
||||
if (!createdPod?.metadata?.name) {
|
||||
throw new Error('created pod should have metadata.name')
|
||||
}
|
||||
|
||||
try {
|
||||
await waitForPodPhases(
|
||||
createdPod.metadata.name,
|
||||
new Set([PodPhase.RUNNING]),
|
||||
new Set([PodPhase.PENDING])
|
||||
)
|
||||
} catch (err) {
|
||||
await podPrune()
|
||||
throw new Error(`Pod failed to come online with error: ${err}`)
|
||||
}
|
||||
|
||||
core.info('Pod is ready for traffic')
|
||||
|
||||
let isAlpine = false
|
||||
try {
|
||||
isAlpine = await isPodContainerAlpine(
|
||||
createdPod.metadata.name,
|
||||
JOB_CONTAINER_NAME
|
||||
)
|
||||
} catch (err) {
|
||||
throw new Error(`Failed to determine if the pod is alpine: ${err}`)
|
||||
}
|
||||
|
||||
generateResponseFile(responseFile, createdPod, isAlpine)
|
||||
}
|
||||
|
||||
function generateResponseFile(
|
||||
responseFile: string,
|
||||
appPod: k8s.V1Pod,
|
||||
isAlpine
|
||||
): void {
|
||||
const response = {
|
||||
state: {},
|
||||
context: {},
|
||||
isAlpine
|
||||
}
|
||||
|
||||
const mainContainer = appPod.spec?.containers?.find(
|
||||
c => c.name === JOB_CONTAINER_NAME
|
||||
)
|
||||
if (mainContainer) {
|
||||
const mainContainerContextPorts: ContextPorts = {}
|
||||
if (mainContainer?.ports) {
|
||||
for (const port of mainContainer.ports) {
|
||||
mainContainerContextPorts[port.containerPort] =
|
||||
mainContainerContextPorts.hostPort
|
||||
}
|
||||
}
|
||||
|
||||
response.context['container'] = {
|
||||
image: mainContainer.image,
|
||||
ports: mainContainerContextPorts
|
||||
}
|
||||
}
|
||||
|
||||
const serviceContainers = appPod.spec?.containers.filter(
|
||||
c => c.name !== JOB_CONTAINER_NAME
|
||||
)
|
||||
if (serviceContainers?.length) {
|
||||
response.context['services'] = serviceContainers.map(c => {
|
||||
if (!c.ports) {
|
||||
return
|
||||
}
|
||||
|
||||
const ctxPorts: ContextPorts = {}
|
||||
for (const port of c.ports) {
|
||||
ctxPorts[port.containerPort] = port.hostPort
|
||||
}
|
||||
|
||||
return {
|
||||
image: c.image,
|
||||
ports: ctxPorts
|
||||
}
|
||||
})
|
||||
}
|
||||
writeToResponseFile(responseFile, JSON.stringify(response))
|
||||
}
|
||||
|
||||
async function copyExternalsToRoot(): Promise<void> {
|
||||
const workspace = process.env['RUNNER_WORKSPACE']
|
||||
if (workspace) {
|
||||
await io.cp(
|
||||
path.join(workspace, '../../externals'),
|
||||
path.join(workspace, '../externals'),
|
||||
{ force: true, recursive: true, copySourceDirectory: false }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
function createPodSpec(
|
||||
container,
|
||||
name: string,
|
||||
jobContainer = false
|
||||
): k8s.V1Container {
|
||||
core.info(JSON.stringify(container))
|
||||
if (!container.entryPointArgs) {
|
||||
container.entryPointArgs = DEFAULT_CONTAINER_ENTRY_POINT_ARGS
|
||||
}
|
||||
container.entryPointArgs = DEFAULT_CONTAINER_ENTRY_POINT_ARGS
|
||||
if (!container.entryPoint) {
|
||||
container.entryPoint = DEFAULT_CONTAINER_ENTRY_POINT
|
||||
}
|
||||
const podContainer = {
|
||||
name,
|
||||
image: container.image,
|
||||
command: [container.entryPoint],
|
||||
args: container.entryPointArgs,
|
||||
ports: containerPorts(container)
|
||||
} as k8s.V1Container
|
||||
if (container.workingDirectory) {
|
||||
podContainer.workingDir = container.workingDirectory
|
||||
}
|
||||
|
||||
podContainer.env = []
|
||||
for (const [key, value] of Object.entries(
|
||||
container['environmentVariables']
|
||||
)) {
|
||||
if (value && key !== 'HOME') {
|
||||
podContainer.env.push({ name: key, value: value as string })
|
||||
}
|
||||
}
|
||||
|
||||
podContainer.volumeMounts = containerVolumes(
|
||||
container.userMountVolumes,
|
||||
jobContainer
|
||||
)
|
||||
|
||||
return podContainer
|
||||
}
|
||||
69
packages/k8s/src/hooks/run-container-step.ts
Normal file
69
packages/k8s/src/hooks/run-container-step.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import * as k8s from '@kubernetes/client-node'
|
||||
import * as core from '@actions/core'
|
||||
import { PodPhase } from 'hooklib'
|
||||
import {
|
||||
createJob,
|
||||
getContainerJobPodName,
|
||||
getPodLogs,
|
||||
getPodStatus,
|
||||
waitForJobToComplete,
|
||||
waitForPodPhases
|
||||
} from '../k8s'
|
||||
import { JOB_CONTAINER_NAME } from './constants'
|
||||
import { containerVolumes } from '../k8s/utils'
|
||||
|
||||
export async function runContainerStep(stepContainer): Promise<number> {
|
||||
if (stepContainer.dockerfile) {
|
||||
throw new Error('Building container actions is not currently supported')
|
||||
}
|
||||
const container = createPodSpec(stepContainer)
|
||||
const job = await createJob(container)
|
||||
if (!job.metadata?.name) {
|
||||
throw new Error(
|
||||
`Expected job ${JSON.stringify(
|
||||
job
|
||||
)} to have correctly set the metadata.name`
|
||||
)
|
||||
}
|
||||
|
||||
const podName = await getContainerJobPodName(job.metadata.name)
|
||||
await waitForPodPhases(
|
||||
podName,
|
||||
new Set([PodPhase.COMPLETED, PodPhase.RUNNING]),
|
||||
new Set([PodPhase.PENDING])
|
||||
)
|
||||
await getPodLogs(podName, JOB_CONTAINER_NAME)
|
||||
await waitForJobToComplete(job.metadata.name)
|
||||
// pod has failed so pull the status code from the container
|
||||
const status = await getPodStatus(podName)
|
||||
if (!status?.containerStatuses?.length) {
|
||||
core.warning(`Can't determine container status`)
|
||||
return 0
|
||||
}
|
||||
|
||||
const exitCode =
|
||||
status.containerStatuses[status.containerStatuses.length - 1].state
|
||||
?.terminated?.exitCode
|
||||
return Number(exitCode) || 0
|
||||
}
|
||||
|
||||
function createPodSpec(container): k8s.V1Container {
|
||||
const podContainer = new k8s.V1Container()
|
||||
podContainer.name = JOB_CONTAINER_NAME
|
||||
podContainer.image = container.image
|
||||
if (container.entryPoint) {
|
||||
podContainer.command = [container.entryPoint, ...container.entryPointArgs]
|
||||
}
|
||||
|
||||
podContainer.env = []
|
||||
for (const [key, value] of Object.entries(
|
||||
container['environmentVariables']
|
||||
)) {
|
||||
if (value && key !== 'HOME') {
|
||||
podContainer.env.push({ name: key, value: value as string })
|
||||
}
|
||||
}
|
||||
podContainer.volumeMounts = containerVolumes()
|
||||
|
||||
return podContainer
|
||||
}
|
||||
38
packages/k8s/src/hooks/run-script-step.ts
Normal file
38
packages/k8s/src/hooks/run-script-step.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
/* eslint-disable @typescript-eslint/no-unused-vars */
|
||||
import { RunScriptStepArgs } from 'hooklib'
|
||||
import { execPodStep } from '../k8s'
|
||||
import { JOB_CONTAINER_NAME } from './constants'
|
||||
|
||||
export async function runScriptStep(
|
||||
args: RunScriptStepArgs,
|
||||
state,
|
||||
responseFile
|
||||
): Promise<void> {
|
||||
const cb = new CommandsBuilder(
|
||||
args.entryPoint,
|
||||
args.entryPointArgs,
|
||||
args.environmentVariables
|
||||
)
|
||||
await execPodStep(cb.command, state.jobPod, JOB_CONTAINER_NAME)
|
||||
}
|
||||
|
||||
class CommandsBuilder {
|
||||
constructor(
|
||||
private entryPoint: string,
|
||||
private entryPointArgs: string[],
|
||||
private environmentVariables: { [key: string]: string }
|
||||
) {}
|
||||
|
||||
get command(): string[] {
|
||||
const envCommands: string[] = []
|
||||
if (
|
||||
this.environmentVariables &&
|
||||
Object.entries(this.environmentVariables).length
|
||||
) {
|
||||
for (const [key, value] of Object.entries(this.environmentVariables)) {
|
||||
envCommands.push(`${key}=${value}`)
|
||||
}
|
||||
}
|
||||
return ['env', ...envCommands, this.entryPoint, ...this.entryPointArgs]
|
||||
}
|
||||
}
|
||||
44
packages/k8s/src/index.ts
Normal file
44
packages/k8s/src/index.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { Command, getInputFromStdin, prepareJobArgs } from 'hooklib'
|
||||
import {
|
||||
cleanupJob,
|
||||
prepareJob,
|
||||
runContainerStep,
|
||||
runScriptStep
|
||||
} from './hooks'
|
||||
|
||||
async function run(): Promise<void> {
|
||||
const input = await getInputFromStdin()
|
||||
|
||||
const args = input['args']
|
||||
const command = input['command']
|
||||
const responseFile = input['responseFile']
|
||||
const state = input['state']
|
||||
|
||||
let exitCode = 0
|
||||
try {
|
||||
switch (command) {
|
||||
case Command.PrepareJob:
|
||||
await prepareJob(args as prepareJobArgs, responseFile)
|
||||
break
|
||||
case Command.CleanupJob:
|
||||
await cleanupJob()
|
||||
break
|
||||
case Command.RunScriptStep:
|
||||
await runScriptStep(args, state, null)
|
||||
break
|
||||
case Command.RunContainerStep:
|
||||
exitCode = await runContainerStep(args)
|
||||
break
|
||||
case Command.runContainerStep:
|
||||
default:
|
||||
throw new Error(`Command not recognized: ${command}`)
|
||||
}
|
||||
} catch (error) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(error)
|
||||
exitCode = 1
|
||||
}
|
||||
process.exitCode = exitCode
|
||||
}
|
||||
|
||||
void run()
|
||||
524
packages/k8s/src/k8s/index.ts
Normal file
524
packages/k8s/src/k8s/index.ts
Normal file
@@ -0,0 +1,524 @@
|
||||
import * as k8s from '@kubernetes/client-node'
|
||||
import { ContainerInfo, PodPhase, Registry } from 'hooklib'
|
||||
import * as stream from 'stream'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import {
|
||||
getJobPodName,
|
||||
getRunnerPodName,
|
||||
getVolumeClaimName,
|
||||
RunnerInstanceLabel
|
||||
} from '../hooks/constants'
|
||||
|
||||
const kc = new k8s.KubeConfig()
|
||||
|
||||
kc.loadFromDefault()
|
||||
|
||||
const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
|
||||
const k8sBatchV1Api = kc.makeApiClient(k8s.BatchV1Api)
|
||||
const k8sAuthorizationV1Api = kc.makeApiClient(k8s.AuthorizationV1Api)
|
||||
|
||||
export const POD_VOLUME_NAME = 'work'
|
||||
|
||||
export const requiredPermissions = [
|
||||
{
|
||||
group: '',
|
||||
verbs: ['get', 'list', 'create', 'delete'],
|
||||
resource: 'pods',
|
||||
subresource: ''
|
||||
},
|
||||
{
|
||||
group: '',
|
||||
verbs: ['get', 'create'],
|
||||
resource: 'pods',
|
||||
subresource: 'exec'
|
||||
},
|
||||
{
|
||||
group: '',
|
||||
verbs: ['get', 'list', 'watch'],
|
||||
resource: 'pods',
|
||||
subresource: 'log'
|
||||
},
|
||||
{
|
||||
group: 'batch',
|
||||
verbs: ['get', 'list', 'create', 'delete'],
|
||||
resource: 'jobs',
|
||||
subresource: ''
|
||||
}
|
||||
]
|
||||
|
||||
const secretPermission = {
|
||||
group: '',
|
||||
verbs: ['get', 'list', 'create', 'delete'],
|
||||
resource: 'secrets',
|
||||
subresource: ''
|
||||
}
|
||||
|
||||
export async function createPod(
|
||||
jobContainer?: k8s.V1Container,
|
||||
services?: k8s.V1Container[],
|
||||
registry?: Registry
|
||||
): Promise<k8s.V1Pod> {
|
||||
const containers: k8s.V1Container[] = []
|
||||
if (jobContainer) {
|
||||
containers.push(jobContainer)
|
||||
}
|
||||
if (services?.length) {
|
||||
containers.push(...services)
|
||||
}
|
||||
|
||||
const appPod = new k8s.V1Pod()
|
||||
|
||||
appPod.apiVersion = 'v1'
|
||||
appPod.kind = 'Pod'
|
||||
|
||||
appPod.metadata = new k8s.V1ObjectMeta()
|
||||
appPod.metadata.name = getJobPodName()
|
||||
|
||||
const instanceLabel = new RunnerInstanceLabel()
|
||||
appPod.metadata.labels = {
|
||||
[instanceLabel.key]: instanceLabel.value
|
||||
}
|
||||
|
||||
appPod.spec = new k8s.V1PodSpec()
|
||||
appPod.spec.containers = containers
|
||||
appPod.spec.restartPolicy = 'Never'
|
||||
appPod.spec.nodeName = await getCurrentNodeName()
|
||||
const claimName = getVolumeClaimName()
|
||||
appPod.spec.volumes = [
|
||||
{
|
||||
name: 'work',
|
||||
persistentVolumeClaim: { claimName }
|
||||
}
|
||||
]
|
||||
|
||||
if (registry) {
|
||||
if (await isSecretsAuthOK()) {
|
||||
const secret = await createDockerSecret(registry)
|
||||
if (!secret?.metadata?.name) {
|
||||
throw new Error(`created secret does not have secret.metadata.name`)
|
||||
}
|
||||
const secretReference = new k8s.V1LocalObjectReference()
|
||||
secretReference.name = secret.metadata.name
|
||||
appPod.spec.imagePullSecrets = [secretReference]
|
||||
} else {
|
||||
throw new Error(
|
||||
`Pulls from private registry is not allowed. Please contact your self hosted runner administrator. Service account needs permissions for ${secretPermission.verbs} in resource ${secretPermission.resource}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const { body } = await k8sApi.createNamespacedPod(namespace(), appPod)
|
||||
return body
|
||||
}
|
||||
|
||||
export async function createJob(
|
||||
container: k8s.V1Container
|
||||
): Promise<k8s.V1Job> {
|
||||
const job = new k8s.V1Job()
|
||||
|
||||
job.apiVersion = 'batch/v1'
|
||||
job.kind = 'Job'
|
||||
job.metadata = new k8s.V1ObjectMeta()
|
||||
job.metadata.name = getJobPodName()
|
||||
job.metadata.labels = { 'runner-pod': getRunnerPodName() }
|
||||
|
||||
job.spec = new k8s.V1JobSpec()
|
||||
job.spec.ttlSecondsAfterFinished = 300
|
||||
job.spec.backoffLimit = 0
|
||||
job.spec.template = new k8s.V1PodTemplateSpec()
|
||||
|
||||
job.spec.template.spec = new k8s.V1PodSpec()
|
||||
job.spec.template.spec.containers = [container]
|
||||
job.spec.template.spec.restartPolicy = 'Never'
|
||||
job.spec.template.spec.nodeName = await getCurrentNodeName()
|
||||
|
||||
const claimName = `${runnerName()}-work`
|
||||
job.spec.template.spec.volumes = [
|
||||
{
|
||||
name: 'work',
|
||||
persistentVolumeClaim: { claimName }
|
||||
}
|
||||
]
|
||||
|
||||
const { body } = await k8sBatchV1Api.createNamespacedJob(namespace(), job)
|
||||
return body
|
||||
}
|
||||
|
||||
export async function getContainerJobPodName(jobName: string): Promise<string> {
|
||||
const selector = `job-name=${jobName}`
|
||||
const backOffManager = new BackOffManager(60)
|
||||
while (true) {
|
||||
const podList = await k8sApi.listNamespacedPod(
|
||||
namespace(),
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
selector,
|
||||
1
|
||||
)
|
||||
|
||||
if (!podList.body.items?.length) {
|
||||
await backOffManager.backOff()
|
||||
continue
|
||||
}
|
||||
|
||||
if (!podList.body.items[0].metadata?.name) {
|
||||
throw new Error(
|
||||
`Failed to determine the name of the pod for job ${jobName}`
|
||||
)
|
||||
}
|
||||
return podList.body.items[0].metadata.name
|
||||
}
|
||||
}
|
||||
|
||||
export async function deletePod(podName: string): Promise<void> {
|
||||
await k8sApi.deleteNamespacedPod(podName, namespace())
|
||||
}
|
||||
|
||||
export async function execPodStep(
|
||||
command: string[],
|
||||
podName: string,
|
||||
containerName: string,
|
||||
stdin?: stream.Readable
|
||||
): Promise<void> {
|
||||
// TODO, we need to add the path from `prependPath` to the PATH variable. How can we do that? Maybe another exec before running this one?
|
||||
// Maybe something like, get the current path, if these entries aren't in it, add them, then set the current path to that?
|
||||
|
||||
// TODO: how do we set working directory? There doesn't seem to be an easy way to do it. Should we cd then execute our bash script?
|
||||
const exec = new k8s.Exec(kc)
|
||||
return new Promise(async function (resolve, reject) {
|
||||
try {
|
||||
await exec.exec(
|
||||
namespace(),
|
||||
podName,
|
||||
containerName,
|
||||
command,
|
||||
process.stdout,
|
||||
process.stderr,
|
||||
stdin ?? null,
|
||||
false /* tty */,
|
||||
resp => {
|
||||
// kube.exec returns an error if exit code is not 0, but we can't actually get the exit code
|
||||
if (resp.status === 'Success') {
|
||||
resolve()
|
||||
} else {
|
||||
reject(
|
||||
JSON.stringify({ message: resp?.message, details: resp?.details })
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
} catch (error) {
|
||||
reject(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export async function waitForJobToComplete(jobName: string): Promise<void> {
|
||||
const backOffManager = new BackOffManager()
|
||||
while (true) {
|
||||
try {
|
||||
if (await isJobSucceeded(jobName)) {
|
||||
return
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`job ${jobName} has failed`)
|
||||
}
|
||||
await backOffManager.backOff()
|
||||
}
|
||||
}
|
||||
|
||||
export async function createDockerSecret(
|
||||
registry: Registry
|
||||
): Promise<k8s.V1Secret> {
|
||||
const authContent = {
|
||||
auths: {
|
||||
[registry.serverUrl]: {
|
||||
username: registry.username,
|
||||
password: registry.password,
|
||||
auth: Buffer.from(
|
||||
`${registry.username}:${registry.password}`,
|
||||
'base64'
|
||||
).toString()
|
||||
}
|
||||
}
|
||||
}
|
||||
const secretName = generateSecretName()
|
||||
const secret = new k8s.V1Secret()
|
||||
secret.immutable = true
|
||||
secret.apiVersion = 'v1'
|
||||
secret.metadata = new k8s.V1ObjectMeta()
|
||||
secret.metadata.name = secretName
|
||||
secret.kind = 'Secret'
|
||||
secret.data = {
|
||||
'.dockerconfigjson': Buffer.from(
|
||||
JSON.stringify(authContent),
|
||||
'base64'
|
||||
).toString()
|
||||
}
|
||||
|
||||
const { body } = await k8sApi.createNamespacedSecret(namespace(), secret)
|
||||
return body
|
||||
}
|
||||
|
||||
export async function waitForPodPhases(
|
||||
podName: string,
|
||||
awaitingPhases: Set<PodPhase>,
|
||||
backOffPhases: Set<PodPhase>,
|
||||
maxTimeSeconds = 45 * 60 // 45 min
|
||||
): Promise<void> {
|
||||
const backOffManager = new BackOffManager(maxTimeSeconds)
|
||||
let phase: PodPhase = PodPhase.UNKNOWN
|
||||
try {
|
||||
while (true) {
|
||||
phase = await getPodPhase(podName)
|
||||
|
||||
if (awaitingPhases.has(phase)) {
|
||||
return
|
||||
}
|
||||
|
||||
if (!backOffPhases.has(phase)) {
|
||||
throw new Error(
|
||||
`Pod ${podName} is unhealthy with phase status ${phase}`
|
||||
)
|
||||
}
|
||||
await backOffManager.backOff()
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Pod ${podName} is unhealthy with phase status ${phase}`)
|
||||
}
|
||||
}
|
||||
|
||||
async function getPodPhase(podName: string): Promise<PodPhase> {
|
||||
const podPhaseLookup = new Set<string>([
|
||||
PodPhase.PENDING,
|
||||
PodPhase.RUNNING,
|
||||
PodPhase.SUCCEEDED,
|
||||
PodPhase.FAILED,
|
||||
PodPhase.UNKNOWN
|
||||
])
|
||||
const { body } = await k8sApi.readNamespacedPod(podName, namespace())
|
||||
const pod = body
|
||||
|
||||
if (!pod.status?.phase || !podPhaseLookup.has(pod.status.phase)) {
|
||||
return PodPhase.UNKNOWN
|
||||
}
|
||||
return pod.status?.phase
|
||||
}
|
||||
|
||||
async function isJobSucceeded(jobName: string): Promise<boolean> {
|
||||
const { body } = await k8sBatchV1Api.readNamespacedJob(jobName, namespace())
|
||||
const job = body
|
||||
if (job.status?.failed) {
|
||||
throw new Error(`job ${jobName} has failed`)
|
||||
}
|
||||
return !!job.status?.succeeded
|
||||
}
|
||||
|
||||
export async function getPodLogs(
|
||||
podName: string,
|
||||
containerName: string
|
||||
): Promise<void> {
|
||||
const log = new k8s.Log(kc)
|
||||
const logStream = new stream.PassThrough()
|
||||
logStream.on('data', chunk => {
|
||||
// use write rather than console.log to prevent double line feed
|
||||
process.stdout.write(chunk)
|
||||
})
|
||||
|
||||
logStream.on('error', err => {
|
||||
process.stderr.write(JSON.stringify(err))
|
||||
})
|
||||
|
||||
const r = await log.log(namespace(), podName, containerName, logStream, {
|
||||
follow: true,
|
||||
tailLines: 50,
|
||||
pretty: false,
|
||||
timestamps: false
|
||||
})
|
||||
await new Promise(resolve => r.on('close', () => resolve(null)))
|
||||
}
|
||||
|
||||
export async function podPrune(): Promise<void> {
|
||||
const podList = await k8sApi.listNamespacedPod(
|
||||
namespace(),
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
new RunnerInstanceLabel().toString()
|
||||
)
|
||||
if (!podList.body.items.length) {
|
||||
return
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
podList.body.items.map(
|
||||
pod => pod.metadata?.name && deletePod(pod.metadata.name)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
export async function getPodStatus(
|
||||
name: string
|
||||
): Promise<k8s.V1PodStatus | undefined> {
|
||||
const { body } = await k8sApi.readNamespacedPod(name, namespace())
|
||||
return body.status
|
||||
}
|
||||
|
||||
export async function isAuthPermissionsOK(): Promise<boolean> {
|
||||
const sar = new k8s.V1SelfSubjectAccessReview()
|
||||
const asyncs: Promise<{
|
||||
response: unknown
|
||||
body: k8s.V1SelfSubjectAccessReview
|
||||
}>[] = []
|
||||
for (const resource of requiredPermissions) {
|
||||
for (const verb of resource.verbs) {
|
||||
sar.spec = new k8s.V1SelfSubjectAccessReviewSpec()
|
||||
sar.spec.resourceAttributes = new k8s.V1ResourceAttributes()
|
||||
sar.spec.resourceAttributes.verb = verb
|
||||
sar.spec.resourceAttributes.namespace = namespace()
|
||||
sar.spec.resourceAttributes.group = resource.group
|
||||
sar.spec.resourceAttributes.resource = resource.resource
|
||||
sar.spec.resourceAttributes.subresource = resource.subresource
|
||||
asyncs.push(k8sAuthorizationV1Api.createSelfSubjectAccessReview(sar))
|
||||
}
|
||||
}
|
||||
const responses = await Promise.all(asyncs)
|
||||
return responses.every(resp => resp.body.status?.allowed)
|
||||
}
|
||||
|
||||
export async function isSecretsAuthOK(): Promise<boolean> {
|
||||
const sar = new k8s.V1SelfSubjectAccessReview()
|
||||
const asyncs: Promise<{
|
||||
response: unknown
|
||||
body: k8s.V1SelfSubjectAccessReview
|
||||
}>[] = []
|
||||
for (const verb of secretPermission.verbs) {
|
||||
sar.spec = new k8s.V1SelfSubjectAccessReviewSpec()
|
||||
sar.spec.resourceAttributes = new k8s.V1ResourceAttributes()
|
||||
sar.spec.resourceAttributes.verb = verb
|
||||
sar.spec.resourceAttributes.namespace = namespace()
|
||||
sar.spec.resourceAttributes.group = secretPermission.group
|
||||
sar.spec.resourceAttributes.resource = secretPermission.resource
|
||||
sar.spec.resourceAttributes.subresource = secretPermission.subresource
|
||||
asyncs.push(k8sAuthorizationV1Api.createSelfSubjectAccessReview(sar))
|
||||
}
|
||||
const responses = await Promise.all(asyncs)
|
||||
return responses.every(resp => resp.body.status?.allowed)
|
||||
}
|
||||
|
||||
export async function isPodContainerAlpine(
|
||||
podName: string,
|
||||
containerName: string
|
||||
): Promise<boolean> {
|
||||
let isAlpine = true
|
||||
try {
|
||||
await execPodStep(
|
||||
[
|
||||
'sh',
|
||||
'-c',
|
||||
"[ $(cat /etc/*release* | grep -i -e '^ID=*alpine*' -c) != 0 ] || exit 1"
|
||||
],
|
||||
podName,
|
||||
containerName
|
||||
)
|
||||
} catch (err) {
|
||||
isAlpine = false
|
||||
}
|
||||
|
||||
return isAlpine
|
||||
}
|
||||
|
||||
async function getCurrentNodeName(): Promise<string> {
|
||||
const resp = await k8sApi.readNamespacedPod(getRunnerPodName(), namespace())
|
||||
|
||||
const nodeName = resp.body.spec?.nodeName
|
||||
if (!nodeName) {
|
||||
throw new Error('Failed to determine node name')
|
||||
}
|
||||
return nodeName
|
||||
}
|
||||
export function namespace(): string {
|
||||
if (process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']) {
|
||||
return process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']
|
||||
}
|
||||
|
||||
const context = kc.getContexts().find(ctx => ctx.namespace)
|
||||
if (!context?.namespace) {
|
||||
throw new Error(
|
||||
'Failed to determine namespace, falling back to `default`. Namespace should be set in context, or in env variable "ACTIONS_RUNNER_KUBERNETES_NAMESPACE"'
|
||||
)
|
||||
}
|
||||
return context.namespace
|
||||
}
|
||||
|
||||
function generateSecretName(): string {
|
||||
return `github-secret-${uuidv4()}`
|
||||
}
|
||||
|
||||
function runnerName(): string {
|
||||
const name = process.env.ACTIONS_RUNNER_POD_NAME
|
||||
if (!name) {
|
||||
throw new Error(
|
||||
'Failed to determine runner name. "ACTIONS_RUNNER_POD_NAME" env variables should be set.'
|
||||
)
|
||||
}
|
||||
return name
|
||||
}
|
||||
|
||||
class BackOffManager {
|
||||
private backOffSeconds = 1
|
||||
totalTime = 0
|
||||
constructor(private throwAfterSeconds?: number) {
|
||||
if (!throwAfterSeconds || throwAfterSeconds < 0) {
|
||||
this.throwAfterSeconds = undefined
|
||||
}
|
||||
}
|
||||
|
||||
async backOff(): Promise<void> {
|
||||
await new Promise(resolve =>
|
||||
setTimeout(resolve, this.backOffSeconds * 1000)
|
||||
)
|
||||
this.totalTime += this.backOffSeconds
|
||||
if (this.throwAfterSeconds && this.throwAfterSeconds < this.totalTime) {
|
||||
throw new Error('backoff timeout')
|
||||
}
|
||||
if (this.backOffSeconds < 20) {
|
||||
this.backOffSeconds *= 2
|
||||
}
|
||||
if (this.backOffSeconds > 20) {
|
||||
this.backOffSeconds = 20
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function containerPorts(
|
||||
container: ContainerInfo
|
||||
): k8s.V1ContainerPort[] {
|
||||
// 8080:8080/tcp
|
||||
const portFormat = /(\d{1,5})(:(\d{1,5}))?(\/(tcp|udp))?/
|
||||
|
||||
const ports: k8s.V1ContainerPort[] = []
|
||||
for (const portDefinition of container.portMappings) {
|
||||
const submatches = portFormat.exec(portDefinition)
|
||||
if (!submatches) {
|
||||
throw new Error(
|
||||
`Port definition "${portDefinition}" is in incorrect format`
|
||||
)
|
||||
}
|
||||
const port = new k8s.V1ContainerPort()
|
||||
port.hostPort = Number(submatches[1])
|
||||
if (submatches[3]) {
|
||||
port.containerPort = Number(submatches[3])
|
||||
}
|
||||
if (submatches[5]) {
|
||||
port.protocol = submatches[5].toUpperCase()
|
||||
} else {
|
||||
port.protocol = 'TCP'
|
||||
}
|
||||
ports.push(port)
|
||||
}
|
||||
return ports
|
||||
}
|
||||
65
packages/k8s/src/k8s/utils.ts
Normal file
65
packages/k8s/src/k8s/utils.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import * as k8s from '@kubernetes/client-node'
|
||||
import { Mount } from 'hooklib'
|
||||
import * as path from 'path'
|
||||
import { POD_VOLUME_NAME } from './index'
|
||||
|
||||
export const DEFAULT_CONTAINER_ENTRY_POINT_ARGS = [`-f`, `/dev/null`]
|
||||
export const DEFAULT_CONTAINER_ENTRY_POINT = 'tail'
|
||||
|
||||
export function containerVolumes(
|
||||
userMountVolumes: Mount[] = [],
|
||||
jobContainer = true
|
||||
): k8s.V1VolumeMount[] {
|
||||
const mounts: k8s.V1VolumeMount[] = [
|
||||
{
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: '/__w'
|
||||
}
|
||||
]
|
||||
|
||||
if (!jobContainer) {
|
||||
return mounts
|
||||
}
|
||||
|
||||
mounts.push(
|
||||
{
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: '/__e',
|
||||
subPath: 'externals'
|
||||
},
|
||||
{
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: '/github/home',
|
||||
subPath: '_temp/_github_home'
|
||||
},
|
||||
{
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: '/github/workflow',
|
||||
subPath: '_temp/_github_workflow'
|
||||
}
|
||||
)
|
||||
|
||||
if (!userMountVolumes?.length) {
|
||||
return mounts
|
||||
}
|
||||
|
||||
for (const userVolume of userMountVolumes) {
|
||||
const sourceVolumePath = `${
|
||||
path.isAbsolute(userVolume.sourceVolumePath)
|
||||
? userVolume.sourceVolumePath
|
||||
: path.join(
|
||||
process.env.GITHUB_WORKSPACE as string,
|
||||
userVolume.sourceVolumePath
|
||||
)
|
||||
}`
|
||||
|
||||
mounts.push({
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: userVolume.targetVolumePath,
|
||||
subPath: sourceVolumePath,
|
||||
readOnly: userVolume.readOnly
|
||||
})
|
||||
}
|
||||
|
||||
return mounts
|
||||
}
|
||||
31
packages/k8s/tests/cleanup-job-test.ts
Normal file
31
packages/k8s/tests/cleanup-job-test.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import * as path from 'path'
|
||||
import * as fs from 'fs'
|
||||
import { prepareJob, cleanupJob } from '../src/hooks'
|
||||
import { TestTempOutput } from './test-setup'
|
||||
|
||||
let testTempOutput: TestTempOutput
|
||||
|
||||
const prepareJobJsonPath = path.resolve(
|
||||
`${__dirname}/../../../examples/prepare-job.json`
|
||||
)
|
||||
|
||||
let prepareJobOutputFilePath: string
|
||||
|
||||
describe('Cleanup Job', () => {
|
||||
beforeEach(async () => {
|
||||
const prepareJobJson = fs.readFileSync(prepareJobJsonPath)
|
||||
let prepareJobData = JSON.parse(prepareJobJson.toString())
|
||||
|
||||
testTempOutput = new TestTempOutput()
|
||||
testTempOutput.initialize()
|
||||
prepareJobOutputFilePath = testTempOutput.createFile(
|
||||
'prepare-job-output.json'
|
||||
)
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
})
|
||||
it('should not throw', async () => {
|
||||
const outputJson = fs.readFileSync(prepareJobOutputFilePath)
|
||||
const outputData = JSON.parse(outputJson.toString())
|
||||
await expect(cleanupJob()).resolves.not.toThrow()
|
||||
})
|
||||
})
|
||||
66
packages/k8s/tests/e2e-test.ts
Normal file
66
packages/k8s/tests/e2e-test.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
import {
|
||||
cleanupJob,
|
||||
prepareJob,
|
||||
runContainerStep,
|
||||
runScriptStep
|
||||
} from '../src/hooks'
|
||||
import { TestTempOutput } from './test-setup'
|
||||
|
||||
jest.useRealTimers()
|
||||
|
||||
let testTempOutput: TestTempOutput
|
||||
|
||||
const prepareJobJsonPath = path.resolve(
|
||||
`${__dirname}/../../../../examples/prepare-job.json`
|
||||
)
|
||||
const runScriptStepJsonPath = path.resolve(
|
||||
`${__dirname}/../../../../examples/run-script-step.json`
|
||||
)
|
||||
let runContainerStepJsonPath = path.resolve(
|
||||
`${__dirname}/../../../../examples/run-container-step.json`
|
||||
)
|
||||
|
||||
let prepareJobData: any
|
||||
|
||||
let prepareJobOutputFilePath: string
|
||||
describe('e2e', () => {
|
||||
beforeEach(() => {
|
||||
const prepareJobJson = fs.readFileSync(prepareJobJsonPath)
|
||||
prepareJobData = JSON.parse(prepareJobJson.toString())
|
||||
|
||||
testTempOutput = new TestTempOutput()
|
||||
testTempOutput.initialize()
|
||||
prepareJobOutputFilePath = testTempOutput.createFile(
|
||||
'prepare-job-output.json'
|
||||
)
|
||||
})
|
||||
afterEach(async () => {
|
||||
testTempOutput.cleanup()
|
||||
})
|
||||
it('should prepare job, run script step, run container step then cleanup without errors', async () => {
|
||||
await expect(
|
||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
const scriptStepContent = fs.readFileSync(runScriptStepJsonPath)
|
||||
const scriptStepData = JSON.parse(scriptStepContent.toString())
|
||||
|
||||
const prepareJobOutputJson = fs.readFileSync(prepareJobOutputFilePath)
|
||||
const prepareJobOutputData = JSON.parse(prepareJobOutputJson.toString())
|
||||
|
||||
await expect(
|
||||
runScriptStep(scriptStepData.args, prepareJobOutputData.state, null)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
const runContainerStepContent = fs.readFileSync(runContainerStepJsonPath)
|
||||
const runContainerStepData = JSON.parse(runContainerStepContent.toString())
|
||||
|
||||
await expect(
|
||||
runContainerStep(runContainerStepData.args)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
await expect(cleanupJob()).resolves.not.toThrow()
|
||||
})
|
||||
})
|
||||
47
packages/k8s/tests/prepare-job-test.ts
Normal file
47
packages/k8s/tests/prepare-job-test.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import * as fs from 'fs'
|
||||
import * as path from 'path'
|
||||
import { cleanupJob } from '../src/hooks'
|
||||
import { prepareJob } from '../src/hooks/prepare-job'
|
||||
import { TestTempOutput } from './test-setup'
|
||||
|
||||
jest.useRealTimers()
|
||||
|
||||
let testTempOutput: TestTempOutput
|
||||
|
||||
const prepareJobJsonPath = path.resolve(
|
||||
`${__dirname}/../../../examples/prepare-job.json`
|
||||
)
|
||||
let prepareJobData: any
|
||||
|
||||
let prepareJobOutputFilePath: string
|
||||
|
||||
describe('Prepare job', () => {
|
||||
beforeEach(() => {
|
||||
const prepareJobJson = fs.readFileSync(prepareJobJsonPath)
|
||||
prepareJobData = JSON.parse(prepareJobJson.toString())
|
||||
|
||||
testTempOutput = new TestTempOutput()
|
||||
testTempOutput.initialize()
|
||||
prepareJobOutputFilePath = testTempOutput.createFile(
|
||||
'prepare-job-output.json'
|
||||
)
|
||||
})
|
||||
afterEach(async () => {
|
||||
const outputJson = fs.readFileSync(prepareJobOutputFilePath)
|
||||
const outputData = JSON.parse(outputJson.toString())
|
||||
await cleanupJob()
|
||||
testTempOutput.cleanup()
|
||||
})
|
||||
|
||||
it('should not throw exception', async () => {
|
||||
await expect(
|
||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
|
||||
it('should generate output file in JSON format', async () => {
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
const content = fs.readFileSync(prepareJobOutputFilePath)
|
||||
expect(() => JSON.parse(content.toString())).not.toThrow()
|
||||
})
|
||||
})
|
||||
27
packages/k8s/tests/run-container-step-test.ts
Normal file
27
packages/k8s/tests/run-container-step-test.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { TestTempOutput } from './test-setup'
|
||||
import * as path from 'path'
|
||||
import { runContainerStep } from '../src/hooks'
|
||||
import * as fs from 'fs'
|
||||
|
||||
jest.useRealTimers()
|
||||
|
||||
let testTempOutput: TestTempOutput
|
||||
|
||||
let runContainerStepJsonPath = path.resolve(
|
||||
`${__dirname}/../../../examples/run-container-step.json`
|
||||
)
|
||||
|
||||
let runContainerStepData: any
|
||||
|
||||
describe('Run container step', () => {
|
||||
beforeAll(() => {
|
||||
const content = fs.readFileSync(runContainerStepJsonPath)
|
||||
runContainerStepData = JSON.parse(content.toString())
|
||||
process.env.RUNNER_NAME = 'testjob'
|
||||
})
|
||||
it('should not throw', async () => {
|
||||
await expect(
|
||||
runContainerStep(runContainerStepData.args)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
})
|
||||
61
packages/k8s/tests/run-script-step-test.ts
Normal file
61
packages/k8s/tests/run-script-step-test.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { prepareJob, cleanupJob, runScriptStep } from '../src/hooks'
|
||||
import { TestTempOutput } from './test-setup'
|
||||
import * as path from 'path'
|
||||
import * as fs from 'fs'
|
||||
|
||||
jest.useRealTimers()
|
||||
|
||||
let testTempOutput: TestTempOutput
|
||||
|
||||
const prepareJobJsonPath = path.resolve(
|
||||
`${__dirname}/../../../examples/prepare-job.json`
|
||||
)
|
||||
let prepareJobData: any
|
||||
|
||||
let prepareJobOutputFilePath: string
|
||||
let prepareJobOutputData: any
|
||||
|
||||
describe('Run script step', () => {
|
||||
beforeEach(async () => {
|
||||
const prepareJobJson = fs.readFileSync(prepareJobJsonPath)
|
||||
prepareJobData = JSON.parse(prepareJobJson.toString())
|
||||
console.log(prepareJobData)
|
||||
|
||||
testTempOutput = new TestTempOutput()
|
||||
testTempOutput.initialize()
|
||||
prepareJobOutputFilePath = testTempOutput.createFile(
|
||||
'prepare-job-output.json'
|
||||
)
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
const outputContent = fs.readFileSync(prepareJobOutputFilePath)
|
||||
prepareJobOutputData = JSON.parse(outputContent.toString())
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
await cleanupJob()
|
||||
testTempOutput.cleanup()
|
||||
})
|
||||
|
||||
// NOTE: To use this test, do kubectl apply -f podspec.yaml (from podspec examples)
|
||||
// then change the name of the file to 'run-script-step-test.ts' and do
|
||||
// npm run test run-script-step
|
||||
|
||||
it('should not throw an exception', async () => {
|
||||
const args = {
|
||||
entryPointArgs: ['echo "test"'],
|
||||
entryPoint: '/bin/bash',
|
||||
environmentVariables: {
|
||||
NODE_ENV: 'development'
|
||||
},
|
||||
prependPath: ['/foo/bar', 'bar/foo'],
|
||||
workingDirectory: '/__w/thboop-test2/thboop-test2'
|
||||
}
|
||||
const state = {
|
||||
jobPod: prepareJobOutputData.state.jobPod
|
||||
}
|
||||
const responseFile = null
|
||||
await expect(
|
||||
runScriptStep(args, state, responseFile)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
})
|
||||
28
packages/k8s/tests/test-setup.ts
Normal file
28
packages/k8s/tests/test-setup.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import * as fs from 'fs'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
|
||||
export class TestTempOutput {
|
||||
private tempDirPath: string
|
||||
constructor() {
|
||||
this.tempDirPath = `${__dirname}/_temp/${uuidv4()}`
|
||||
}
|
||||
|
||||
public initialize(): void {
|
||||
fs.mkdirSync(this.tempDirPath, { recursive: true })
|
||||
}
|
||||
|
||||
public cleanup(): void {
|
||||
fs.rmSync(this.tempDirPath, { recursive: true })
|
||||
}
|
||||
|
||||
public createFile(fileName?: string): string {
|
||||
const filePath = `${this.tempDirPath}/${fileName || uuidv4()}`
|
||||
fs.writeFileSync(filePath, '')
|
||||
return filePath
|
||||
}
|
||||
|
||||
public removeFile(fileName: string): void {
|
||||
const filePath = `${this.tempDirPath}/${fileName}`
|
||||
fs.rmSync(filePath)
|
||||
}
|
||||
}
|
||||
11
packages/k8s/tsconfig.json
Normal file
11
packages/k8s/tsconfig.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": "./",
|
||||
"outDir": "./lib",
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"include": [
|
||||
"./src"
|
||||
]
|
||||
}
|
||||
7
releaseNotes.md
Normal file
7
releaseNotes.md
Normal file
@@ -0,0 +1,7 @@
|
||||
## Features
|
||||
- Initial Release
|
||||
|
||||
## Bugs
|
||||
|
||||
|
||||
## Misc
|
||||
70
tsconfig.json
Normal file
70
tsconfig.json
Normal file
@@ -0,0 +1,70 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
/* Visit https://aka.ms/tsconfig.json to read more about this file */
|
||||
"outDir": "./lib",
|
||||
"rootDir": "./packages",
|
||||
/* Basic Options */
|
||||
// "incremental": true, /* Enable incremental compilation */
|
||||
"target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */
|
||||
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */
|
||||
// "lib": [], /* Specify library files to be included in the compilation. */
|
||||
// "allowJs": true, /* Allow javascript files to be compiled. */
|
||||
// "checkJs": true, /* Report errors in .js files. */
|
||||
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
|
||||
"declaration": true, /* Generates corresponding '.d.ts' file. */
|
||||
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
|
||||
// "sourceMap": true, /* Generates corresponding '.map' file. */
|
||||
// "outFile": "./", /* Concatenate and emit output to single file. */
|
||||
// "outDir": "./", /* Redirect output structure to the directory. */
|
||||
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
|
||||
// "composite": true, /* Enable project compilation */
|
||||
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
|
||||
// "removeComments": true, /* Do not emit comments to output. */
|
||||
// "noEmit": true, /* Do not emit outputs. */
|
||||
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
|
||||
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
|
||||
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
|
||||
|
||||
/* Strict Type-Checking Options */
|
||||
"strict": true, /* Enable all strict type-checking options. */
|
||||
"noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */
|
||||
// "strictNullChecks": true, /* Enable strict null checks. */
|
||||
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
|
||||
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
|
||||
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
|
||||
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
|
||||
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
|
||||
|
||||
/* Additional Checks */
|
||||
// "noUnusedLocals": true, /* Report errors on unused locals. */
|
||||
// "noUnusedParameters": true, /* Report errors on unused parameters. */
|
||||
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
|
||||
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
|
||||
|
||||
/* Module Resolution Options */
|
||||
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
|
||||
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
|
||||
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
|
||||
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
|
||||
// "typeRoots": [], /* List of folders to include type definitions from. */
|
||||
// "types": [], /* Type declaration files to be included in compilation. */
|
||||
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
|
||||
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
|
||||
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
|
||||
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
|
||||
|
||||
/* Source Map Options */
|
||||
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
|
||||
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
|
||||
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
|
||||
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
|
||||
|
||||
/* Experimental Options */
|
||||
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
|
||||
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
|
||||
|
||||
/* Advanced Options */
|
||||
"skipLibCheck": true, /* Skip type checking of declaration files. */
|
||||
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user