mirror of
https://github.com/actions/runner-container-hooks.git
synced 2025-12-16 17:56:44 +00:00
Compare commits
28 Commits
v0.7.0
...
copilot/su
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5f503f27d3 | ||
|
|
287a0458a1 | ||
|
|
b8af7ebe0e | ||
|
|
f8e1cae677 | ||
|
|
996cc75daf | ||
|
|
adf5e34937 | ||
|
|
4041f8648c | ||
|
|
1f60eaf940 | ||
|
|
c3d8e2ab20 | ||
|
|
3f829eef9e | ||
|
|
011ffb284e | ||
|
|
0951cc73e4 | ||
|
|
15e808935c | ||
|
|
ad9cb43c31 | ||
|
|
2934de33f8 | ||
|
|
ea25fd1b3e | ||
|
|
c03a5fb3c1 | ||
|
|
96c35e7cc6 | ||
|
|
c67938c536 | ||
|
|
464be47642 | ||
|
|
74ce64c1d0 | ||
|
|
9a71a3a7e9 | ||
|
|
9a858922c8 | ||
|
|
605551ff1c | ||
|
|
878781f9c4 | ||
|
|
1e051b849b | ||
|
|
589414ea69 | ||
|
|
dd4f7dae2c |
@@ -1,4 +0,0 @@
|
|||||||
dist/
|
|
||||||
lib/
|
|
||||||
node_modules/
|
|
||||||
**/tests/**
|
|
||||||
@@ -1,56 +0,0 @@
|
|||||||
{
|
|
||||||
"plugins": ["@typescript-eslint"],
|
|
||||||
"extends": ["plugin:github/recommended"],
|
|
||||||
"parser": "@typescript-eslint/parser",
|
|
||||||
"parserOptions": {
|
|
||||||
"ecmaVersion": 9,
|
|
||||||
"sourceType": "module",
|
|
||||||
"project": "./tsconfig.json"
|
|
||||||
},
|
|
||||||
"rules": {
|
|
||||||
"eslint-comments/no-use": "off",
|
|
||||||
"import/no-namespace": "off",
|
|
||||||
"no-constant-condition": "off",
|
|
||||||
"no-unused-vars": "off",
|
|
||||||
"i18n-text/no-en": "off",
|
|
||||||
"@typescript-eslint/no-unused-vars": "error",
|
|
||||||
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
|
|
||||||
"@typescript-eslint/no-require-imports": "error",
|
|
||||||
"@typescript-eslint/array-type": "error",
|
|
||||||
"@typescript-eslint/await-thenable": "error",
|
|
||||||
"camelcase": "off",
|
|
||||||
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
|
||||||
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
|
||||||
"@typescript-eslint/no-array-constructor": "error",
|
|
||||||
"@typescript-eslint/no-empty-interface": "error",
|
|
||||||
"@typescript-eslint/no-explicit-any": "warn",
|
|
||||||
"@typescript-eslint/no-extraneous-class": "error",
|
|
||||||
"@typescript-eslint/no-floating-promises": "error",
|
|
||||||
"@typescript-eslint/no-for-in-array": "error",
|
|
||||||
"@typescript-eslint/no-inferrable-types": "error",
|
|
||||||
"@typescript-eslint/no-misused-new": "error",
|
|
||||||
"@typescript-eslint/no-namespace": "error",
|
|
||||||
"@typescript-eslint/no-non-null-assertion": "warn",
|
|
||||||
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
|
||||||
"@typescript-eslint/no-unnecessary-type-assertion": "error",
|
|
||||||
"@typescript-eslint/no-useless-constructor": "error",
|
|
||||||
"@typescript-eslint/no-var-requires": "error",
|
|
||||||
"@typescript-eslint/prefer-for-of": "warn",
|
|
||||||
"@typescript-eslint/prefer-function-type": "warn",
|
|
||||||
"@typescript-eslint/prefer-includes": "error",
|
|
||||||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
|
||||||
"@typescript-eslint/promise-function-async": "error",
|
|
||||||
"@typescript-eslint/require-array-sort-compare": "error",
|
|
||||||
"@typescript-eslint/restrict-plus-operands": "error",
|
|
||||||
"semi": "off",
|
|
||||||
"@typescript-eslint/semi": ["error", "never"],
|
|
||||||
"@typescript-eslint/type-annotation-spacing": "error",
|
|
||||||
"@typescript-eslint/unbound-method": "error",
|
|
||||||
"no-shadow": "off",
|
|
||||||
"@typescript-eslint/no-shadow": ["error"]
|
|
||||||
},
|
|
||||||
"env": {
|
|
||||||
"node": true,
|
|
||||||
"es6": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
28
.github/dependabot.yml
vendored
Normal file
28
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
version: 2
|
||||||
|
|
||||||
|
updates:
|
||||||
|
# Group updates into a single PR per workspace package
|
||||||
|
- package-ecosystem: npm
|
||||||
|
directory: "/packages/docker"
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
groups:
|
||||||
|
all-dependencies:
|
||||||
|
patterns:
|
||||||
|
- "*"
|
||||||
|
- package-ecosystem: npm
|
||||||
|
directory: "/packages/hooklib"
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
groups:
|
||||||
|
all-dependencies:
|
||||||
|
patterns:
|
||||||
|
- "*"
|
||||||
|
- package-ecosystem: npm
|
||||||
|
directory: "/packages/k8s"
|
||||||
|
schedule:
|
||||||
|
interval: weekly
|
||||||
|
groups:
|
||||||
|
all-dependencies:
|
||||||
|
patterns:
|
||||||
|
- "*"
|
||||||
51
.github/workflows/build.yaml
vendored
51
.github/workflows/build.yaml
vendored
@@ -6,14 +6,50 @@ on:
|
|||||||
paths-ignore:
|
paths-ignore:
|
||||||
- '**.md'
|
- '**.md'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
format-and-lint:
|
||||||
|
name: Format & Lint Checks
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
|
- run: npm install
|
||||||
|
name: Install dependencies
|
||||||
|
- run: npm run bootstrap
|
||||||
|
name: Bootstrap the packages
|
||||||
|
- run: npm run build-all
|
||||||
|
name: Build packages
|
||||||
|
- run: npm run format-check
|
||||||
|
name: Check formatting
|
||||||
|
- name: Check linter
|
||||||
|
run: |
|
||||||
|
npm run lint
|
||||||
|
git diff --exit-code -- . ':!packages/k8s/tests/test-kind.yaml'
|
||||||
|
|
||||||
|
docker-tests:
|
||||||
|
name: Docker Hook Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: format-and-lint
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
|
- run: npm install
|
||||||
|
name: Install dependencies
|
||||||
|
- run: npm run bootstrap
|
||||||
|
name: Bootstrap the packages
|
||||||
|
- run: npm run build-all
|
||||||
|
name: Build packages
|
||||||
|
- name: Run Docker tests
|
||||||
|
run: npm run test --prefix packages/docker
|
||||||
|
|
||||||
|
k8s-tests:
|
||||||
|
name: Kubernetes Hook Tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: format-and-lint
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v5
|
||||||
- run: sed -i "s|{{PATHTOREPO}}|$(pwd)|" packages/k8s/tests/test-kind.yaml
|
- run: sed -i "s|{{PATHTOREPO}}|$(pwd)|" packages/k8s/tests/test-kind.yaml
|
||||||
name: Setup kind cluster yaml config
|
name: Setup kind cluster yaml config
|
||||||
- uses: helm/kind-action@v1.2.0
|
- uses: helm/kind-action@v1.12.0
|
||||||
with:
|
with:
|
||||||
config: packages/k8s/tests/test-kind.yaml
|
config: packages/k8s/tests/test-kind.yaml
|
||||||
- run: npm install
|
- run: npm install
|
||||||
@@ -22,10 +58,5 @@ jobs:
|
|||||||
name: Bootstrap the packages
|
name: Bootstrap the packages
|
||||||
- run: npm run build-all
|
- run: npm run build-all
|
||||||
name: Build packages
|
name: Build packages
|
||||||
- run: npm run format-check
|
- name: Run Kubernetes tests
|
||||||
- name: Check linter
|
run: npm run test --prefix packages/k8s
|
||||||
run: |
|
|
||||||
npm run lint
|
|
||||||
git diff --exit-code -- ':!packages/k8s/tests/test-kind.yaml'
|
|
||||||
- name: Run tests
|
|
||||||
run: npm run test
|
|
||||||
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v5
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
|
|||||||
6
.github/workflows/release.yaml
vendored
6
.github/workflows/release.yaml
vendored
@@ -10,7 +10,7 @@ jobs:
|
|||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v5
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: npm install
|
run: npm install
|
||||||
@@ -21,7 +21,7 @@ jobs:
|
|||||||
- name: Build packages
|
- name: Build packages
|
||||||
run: npm run build-all
|
run: npm run build-all
|
||||||
|
|
||||||
- uses: actions/github-script@v7
|
- uses: actions/github-script@v8
|
||||||
id: releaseVersion
|
id: releaseVersion
|
||||||
with:
|
with:
|
||||||
result-encoding: string
|
result-encoding: string
|
||||||
@@ -47,7 +47,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Create release notes
|
- name: Create release notes
|
||||||
id: releaseNotes
|
id: releaseNotes
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
* @actions/actions-launch
|
* @actions/actions-compute @nikola-jokic
|
||||||
|
|||||||
24
README.md
24
README.md
@@ -3,6 +3,24 @@ The Runner Container Hooks repo provides a set of packages that implement the co
|
|||||||
|
|
||||||
More information on how to implement your own hooks can be found in the [adr](https://github.com/actions/runner/pull/1891). The `examples` folder provides example inputs for each hook.
|
More information on how to implement your own hooks can be found in the [adr](https://github.com/actions/runner/pull/1891). The `examples` folder provides example inputs for each hook.
|
||||||
|
|
||||||
|
### Note
|
||||||
|
|
||||||
|
Thank you for your interest in this GitHub action, however, right now we are not taking contributions.
|
||||||
|
|
||||||
|
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features we’re working on and what stage they’re in.
|
||||||
|
|
||||||
|
We are taking the following steps to better direct requests related to GitHub Actions, including:
|
||||||
|
|
||||||
|
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
|
||||||
|
|
||||||
|
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
|
||||||
|
|
||||||
|
3. Security Issues should be handled as per our [security.md](security.md)
|
||||||
|
|
||||||
|
We will still provide security updates for this project and fix major breaking changes during this time.
|
||||||
|
|
||||||
|
You are welcome to still raise bugs in this repo.
|
||||||
|
|
||||||
## Background
|
## Background
|
||||||
|
|
||||||
Three projects are included in the `packages` folder
|
Three projects are included in the `packages` folder
|
||||||
@@ -10,10 +28,6 @@ Three projects are included in the `packages` folder
|
|||||||
- docker: A hook implementation of the runner's docker implementation. More details can be found in the [readme](./packages/docker/README.md)
|
- docker: A hook implementation of the runner's docker implementation. More details can be found in the [readme](./packages/docker/README.md)
|
||||||
- hooklib: a shared library which contains typescript definitions and utilities that the other projects consume
|
- hooklib: a shared library which contains typescript definitions and utilities that the other projects consume
|
||||||
|
|
||||||
### Requirements
|
|
||||||
|
|
||||||
We welcome contributions. See [how to contribute to get started](./CONTRIBUTING.md).
|
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This project is licensed under the terms of the MIT open source license. Please refer to [MIT](./LICENSE.md) for the full terms.
|
This project is licensed under the terms of the MIT open source license. Please refer to [MIT](./LICENSE.md) for the full terms.
|
||||||
@@ -28,4 +42,4 @@ Find a bug? Please file an issue in this repository using the issue templates.
|
|||||||
|
|
||||||
## Code of Conduct
|
## Code of Conduct
|
||||||
|
|
||||||
See our [Code of Conduct](./CODE_OF_CONDUCT.MD)
|
See our [Code of Conduct](./CODE_OF_CONDUCT.MD)
|
||||||
|
|||||||
122
eslint.config.js
Normal file
122
eslint.config.js
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
const eslint = require('@eslint/js');
|
||||||
|
const tseslint = require('@typescript-eslint/eslint-plugin');
|
||||||
|
const tsparser = require('@typescript-eslint/parser');
|
||||||
|
const globals = require('globals');
|
||||||
|
const pluginJest = require('eslint-plugin-jest');
|
||||||
|
|
||||||
|
module.exports = [
|
||||||
|
eslint.configs.recommended,
|
||||||
|
{
|
||||||
|
files: ['**/*.ts'],
|
||||||
|
languageOptions: {
|
||||||
|
parser: tsparser,
|
||||||
|
parserOptions: {
|
||||||
|
ecmaVersion: 2018,
|
||||||
|
sourceType: 'module',
|
||||||
|
project: ['./tsconfig.json', './packages/*/tsconfig.json']
|
||||||
|
},
|
||||||
|
globals: {
|
||||||
|
...globals.node,
|
||||||
|
...globals.es6
|
||||||
|
}
|
||||||
|
},
|
||||||
|
plugins: {
|
||||||
|
'@typescript-eslint': tseslint,
|
||||||
|
},
|
||||||
|
rules: {
|
||||||
|
// Disabled rules from original config
|
||||||
|
'eslint-comments/no-use': 'off',
|
||||||
|
'import/no-namespace': 'off',
|
||||||
|
'no-constant-condition': 'off',
|
||||||
|
'no-unused-vars': 'off',
|
||||||
|
'i18n-text/no-en': 'off',
|
||||||
|
'camelcase': 'off',
|
||||||
|
'semi': 'off',
|
||||||
|
'no-shadow': 'off',
|
||||||
|
|
||||||
|
// TypeScript ESLint rules
|
||||||
|
'@typescript-eslint/no-unused-vars': 'error',
|
||||||
|
'@typescript-eslint/explicit-member-accessibility': ['error', { accessibility: 'no-public' }],
|
||||||
|
'@typescript-eslint/no-require-imports': 'error',
|
||||||
|
'@typescript-eslint/array-type': 'error',
|
||||||
|
'@typescript-eslint/await-thenable': 'error',
|
||||||
|
'@typescript-eslint/explicit-function-return-type': ['error', { allowExpressions: true }],
|
||||||
|
'@typescript-eslint/no-array-constructor': 'error',
|
||||||
|
'@typescript-eslint/no-empty-interface': 'error',
|
||||||
|
'@typescript-eslint/no-explicit-any': 'off', // Fixed: removed duplicate and kept only this one
|
||||||
|
'@typescript-eslint/no-extraneous-class': 'error',
|
||||||
|
'@typescript-eslint/no-floating-promises': 'error',
|
||||||
|
'@typescript-eslint/no-for-in-array': 'error',
|
||||||
|
'@typescript-eslint/no-inferrable-types': 'error',
|
||||||
|
'@typescript-eslint/no-misused-new': 'error',
|
||||||
|
'@typescript-eslint/no-namespace': 'error',
|
||||||
|
'@typescript-eslint/no-non-null-assertion': 'warn',
|
||||||
|
'@typescript-eslint/no-unnecessary-qualifier': 'error',
|
||||||
|
'@typescript-eslint/no-unnecessary-type-assertion': 'error',
|
||||||
|
'@typescript-eslint/no-useless-constructor': 'error',
|
||||||
|
'@typescript-eslint/no-var-requires': 'error',
|
||||||
|
'@typescript-eslint/prefer-for-of': 'warn',
|
||||||
|
'@typescript-eslint/prefer-function-type': 'warn',
|
||||||
|
'@typescript-eslint/prefer-includes': 'error',
|
||||||
|
'@typescript-eslint/prefer-string-starts-ends-with': 'error',
|
||||||
|
'@typescript-eslint/promise-function-async': 'error',
|
||||||
|
'@typescript-eslint/require-array-sort-compare': 'error',
|
||||||
|
'@typescript-eslint/restrict-plus-operands': 'error',
|
||||||
|
'@typescript-eslint/unbound-method': 'error',
|
||||||
|
'@typescript-eslint/no-shadow': ['error']
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
// Test files configuration - Fixed file pattern to match .ts files
|
||||||
|
files: ['**/*test*.ts', '**/*spec*.ts', '**/tests/**/*.ts'],
|
||||||
|
languageOptions: {
|
||||||
|
parser: tsparser,
|
||||||
|
parserOptions: {
|
||||||
|
ecmaVersion: 2018,
|
||||||
|
sourceType: 'module',
|
||||||
|
project: ['./tsconfig.json', './packages/*/tsconfig.json']
|
||||||
|
},
|
||||||
|
globals: {
|
||||||
|
...globals.node,
|
||||||
|
...globals.es6,
|
||||||
|
// Fixed Jest globals
|
||||||
|
describe: 'readonly',
|
||||||
|
it: 'readonly',
|
||||||
|
test: 'readonly',
|
||||||
|
expect: 'readonly',
|
||||||
|
beforeEach: 'readonly',
|
||||||
|
afterEach: 'readonly',
|
||||||
|
beforeAll: 'readonly',
|
||||||
|
afterAll: 'readonly',
|
||||||
|
jest: 'readonly'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
plugins: {
|
||||||
|
'@typescript-eslint': tseslint,
|
||||||
|
jest: pluginJest
|
||||||
|
},
|
||||||
|
rules: {
|
||||||
|
// Disable no-undef for test files since Jest globals are handled above
|
||||||
|
'no-undef': 'off',
|
||||||
|
// Relax some rules for test files
|
||||||
|
'@typescript-eslint/no-explicit-any': 'off',
|
||||||
|
'@typescript-eslint/no-non-null-assertion': 'off',
|
||||||
|
'@typescript-eslint/explicit-function-return-type': 'off'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
files: ['**/jest.config.js', '**/jest.setup.js'],
|
||||||
|
languageOptions: {
|
||||||
|
globals: {
|
||||||
|
...globals.node,
|
||||||
|
jest: 'readonly',
|
||||||
|
module: 'writable'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
rules: {
|
||||||
|
'@typescript-eslint/no-require-imports': 'off',
|
||||||
|
'@typescript-eslint/no-var-requires': 'off',
|
||||||
|
'import/no-commonjs': 'off'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
];
|
||||||
@@ -4,9 +4,6 @@ metadata:
|
|||||||
labels:
|
labels:
|
||||||
labeled-by: "extension"
|
labeled-by: "extension"
|
||||||
spec:
|
spec:
|
||||||
securityContext:
|
|
||||||
runAsUser: 1000
|
|
||||||
runAsGroup: 3000
|
|
||||||
restartPolicy: Never
|
restartPolicy: Never
|
||||||
containers:
|
containers:
|
||||||
- name: $job # overwrites job container
|
- name: $job # overwrites job container
|
||||||
|
|||||||
6191
package-lock.json
generated
6191
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
23
package.json
23
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "hooks",
|
"name": "hooks",
|
||||||
"version": "0.7.0",
|
"version": "0.8.0",
|
||||||
"description": "Three projects are included - k8s: a kubernetes hook implementation that spins up pods dynamically to run a job - docker: A hook implementation of the runner's docker implementation - A hook lib, which contains shared typescript definitions and utilities that the other packages consume",
|
"description": "Three projects are included - k8s: a kubernetes hook implementation that spins up pods dynamically to run a job - docker: A hook implementation of the runner's docker implementation - A hook lib, which contains shared typescript definitions and utilities that the other packages consume",
|
||||||
"main": "",
|
"main": "",
|
||||||
"directories": {
|
"directories": {
|
||||||
@@ -12,6 +12,7 @@
|
|||||||
"format": "prettier --write '**/*.ts'",
|
"format": "prettier --write '**/*.ts'",
|
||||||
"format-check": "prettier --check '**/*.ts'",
|
"format-check": "prettier --check '**/*.ts'",
|
||||||
"lint": "eslint packages/**/*.ts",
|
"lint": "eslint packages/**/*.ts",
|
||||||
|
"lint:fix": "eslint packages/**/*.ts --fix",
|
||||||
"build-all": "npm run build --prefix packages/hooklib && npm run build --prefix packages/k8s && npm run build --prefix packages/docker"
|
"build-all": "npm run build --prefix packages/hooklib && npm run build --prefix packages/k8s && npm run build --prefix packages/docker"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
@@ -25,12 +26,18 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://github.com/actions/runner-container-hooks#readme",
|
"homepage": "https://github.com/actions/runner-container-hooks#readme",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^27.5.1",
|
"@eslint/js": "^9.31.0",
|
||||||
"@types/node": "^17.0.23",
|
"@types/jest": "^30.0.0",
|
||||||
"@typescript-eslint/parser": "^5.18.0",
|
"@types/node": "^24.0.14",
|
||||||
"eslint": "^8.12.0",
|
"@typescript-eslint/eslint-plugin": "^8.37.0",
|
||||||
"eslint-plugin-github": "^4.3.6",
|
"@typescript-eslint/parser": "^8.37.0",
|
||||||
"prettier": "^2.6.2",
|
"eslint": "^9.31.0",
|
||||||
"typescript": "^4.6.3"
|
"eslint-plugin-github": "^6.0.0",
|
||||||
|
"globals": "^15.12.0",
|
||||||
|
"prettier": "^3.6.2",
|
||||||
|
"typescript": "^5.8.3"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"eslint-plugin-jest": "^29.0.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,13 +1,26 @@
|
|||||||
// eslint-disable-next-line import/no-commonjs
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
clearMocks: true,
|
clearMocks: true,
|
||||||
|
preset: 'ts-jest',
|
||||||
moduleFileExtensions: ['js', 'ts'],
|
moduleFileExtensions: ['js', 'ts'],
|
||||||
testEnvironment: 'node',
|
testEnvironment: 'node',
|
||||||
testMatch: ['**/*-test.ts'],
|
testMatch: ['**/*-test.ts'],
|
||||||
testRunner: 'jest-circus/runner',
|
testRunner: 'jest-circus/runner',
|
||||||
|
verbose: true,
|
||||||
transform: {
|
transform: {
|
||||||
'^.+\\.ts$': 'ts-jest'
|
'^.+\\.ts$': [
|
||||||
|
'ts-jest',
|
||||||
|
{
|
||||||
|
tsconfig: 'tsconfig.test.json'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
// Transform ESM modules to CommonJS
|
||||||
|
'^.+\\.(js|mjs)$': ['babel-jest', {
|
||||||
|
presets: [['@babel/preset-env', { targets: { node: 'current' } }]]
|
||||||
|
}]
|
||||||
},
|
},
|
||||||
setupFilesAfterEnv: ['./jest.setup.js'],
|
transformIgnorePatterns: [
|
||||||
verbose: true
|
// Transform these ESM packages
|
||||||
|
'node_modules/(?!(shlex|@kubernetes/client-node|openid-client|oauth4webapi|jose|uuid)/)'
|
||||||
|
],
|
||||||
|
setupFilesAfterEnv: ['./jest.setup.js']
|
||||||
}
|
}
|
||||||
|
|||||||
11296
packages/docker/package-lock.json
generated
11296
packages/docker/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -13,21 +13,23 @@
|
|||||||
"author": "",
|
"author": "",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.9.1",
|
"@actions/core": "^1.11.1",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^2.0.0",
|
||||||
"hooklib": "file:../hooklib",
|
"hooklib": "file:../hooklib",
|
||||||
"shlex": "^2.1.2",
|
"shlex": "^3.0.0",
|
||||||
"uuid": "^8.3.2"
|
"uuid": "^13.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^27.4.1",
|
"@babel/core": "^7.28.5",
|
||||||
"@types/node": "^17.0.23",
|
"@babel/preset-env": "^7.28.5",
|
||||||
"@typescript-eslint/parser": "^5.18.0",
|
"@types/jest": "^30.0.0",
|
||||||
"@vercel/ncc": "^0.33.4",
|
"@types/node": "^24.0.14",
|
||||||
"jest": "^27.5.1",
|
"@typescript-eslint/parser": "^8.49.0",
|
||||||
"ts-jest": "^27.1.4",
|
"@vercel/ncc": "^0.38.3",
|
||||||
"ts-node": "^10.7.0",
|
"jest": "^30.0.4",
|
||||||
"tsconfig-paths": "^3.14.1",
|
"ts-jest": "^29.4.6",
|
||||||
"typescript": "^4.6.3"
|
"ts-node": "^10.9.2",
|
||||||
|
"tsconfig-paths": "^4.2.0",
|
||||||
|
"typescript": "^5.8.3"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -59,7 +59,9 @@ export async function createContainer(
|
|||||||
]
|
]
|
||||||
for (const mountVolume of mountVolumes) {
|
for (const mountVolume of mountVolumes) {
|
||||||
dockerArgs.push(
|
dockerArgs.push(
|
||||||
`-v=${mountVolume.sourceVolumePath}:${mountVolume.targetVolumePath}`
|
`-v=${mountVolume.sourceVolumePath}:${mountVolume.targetVolumePath}${
|
||||||
|
mountVolume.readOnly ? ':ro' : ''
|
||||||
|
}`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
if (args.entryPoint) {
|
if (args.entryPoint) {
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||||
/* eslint-disable @typescript-eslint/no-require-imports */
|
/* eslint-disable @typescript-eslint/no-require-imports */
|
||||||
/* eslint-disable import/no-commonjs */
|
|
||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import { env } from 'process'
|
import { env } from 'process'
|
||||||
// Import this way otherwise typescript has errors
|
// Import this way otherwise typescript has errors
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ export default class TestSetup {
|
|||||||
private get allTestDirectories() {
|
private get allTestDirectories() {
|
||||||
const resp = [this.testdir, this.runnerMockDir, this.runnerOutputDir]
|
const resp = [this.testdir, this.runnerMockDir, this.runnerOutputDir]
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(this.runnerMockSubdirs)) {
|
for (const [, value] of Object.entries(this.runnerMockSubdirs)) {
|
||||||
resp.push(`${this.runnerMockDir}/${value}`)
|
resp.push(`${this.runnerMockDir}/${value}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -42,12 +42,11 @@ export default class TestSetup {
|
|||||||
return resp
|
return resp
|
||||||
}
|
}
|
||||||
|
|
||||||
public initialize(): void {
|
initialize(): void {
|
||||||
env['GITHUB_WORKSPACE'] = this.workingDirectory
|
env['GITHUB_WORKSPACE'] = this.workingDirectory
|
||||||
env['RUNNER_NAME'] = 'test'
|
env['RUNNER_NAME'] = 'test'
|
||||||
env[
|
env['RUNNER_TEMP'] =
|
||||||
'RUNNER_TEMP'
|
`${this.runnerMockDir}/${this.runnerMockSubdirs.workTemp}`
|
||||||
] = `${this.runnerMockDir}/${this.runnerMockSubdirs.workTemp}`
|
|
||||||
|
|
||||||
for (const dir of this.allTestDirectories) {
|
for (const dir of this.allTestDirectories) {
|
||||||
fs.mkdirSync(dir, { recursive: true })
|
fs.mkdirSync(dir, { recursive: true })
|
||||||
@@ -59,7 +58,7 @@ export default class TestSetup {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
public teardown(): void {
|
teardown(): void {
|
||||||
fs.rmdirSync(this.testdir, { recursive: true })
|
fs.rmdirSync(this.testdir, { recursive: true })
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -108,21 +107,21 @@ export default class TestSetup {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
public createOutputFile(name: string): string {
|
createOutputFile(name: string): string {
|
||||||
let filePath = path.join(this.runnerOutputDir, name || `${uuidv4()}.json`)
|
let filePath = path.join(this.runnerOutputDir, name || `${uuidv4()}.json`)
|
||||||
fs.writeFileSync(filePath, '')
|
fs.writeFileSync(filePath, '')
|
||||||
return filePath
|
return filePath
|
||||||
}
|
}
|
||||||
|
|
||||||
public get workingDirectory(): string {
|
get workingDirectory(): string {
|
||||||
return `${this.runnerMockDir}/_work/${this.projectName}/${this.projectName}`
|
return `${this.runnerMockDir}/_work/${this.projectName}/${this.projectName}`
|
||||||
}
|
}
|
||||||
|
|
||||||
public get containerWorkingDirectory(): string {
|
get containerWorkingDirectory(): string {
|
||||||
return `/__w/${this.projectName}/${this.projectName}`
|
return `/__w/${this.projectName}/${this.projectName}`
|
||||||
}
|
}
|
||||||
|
|
||||||
public initializeDockerAction(): string {
|
initializeDockerAction(): string {
|
||||||
const actionPath = `${this.testdir}/_actions/example-handle/example-repo/example-branch/mock-directory`
|
const actionPath = `${this.testdir}/_actions/example-handle/example-repo/example-branch/mock-directory`
|
||||||
fs.mkdirSync(actionPath, { recursive: true })
|
fs.mkdirSync(actionPath, { recursive: true })
|
||||||
this.writeDockerfile(actionPath)
|
this.writeDockerfile(actionPath)
|
||||||
@@ -147,7 +146,7 @@ echo "::set-output name=time::$time"`
|
|||||||
fs.chmodSync(entryPointPath, 0o755)
|
fs.chmodSync(entryPointPath, 0o755)
|
||||||
}
|
}
|
||||||
|
|
||||||
public getPrepareJobDefinition(): HookData {
|
getPrepareJobDefinition(): HookData {
|
||||||
const prepareJob = JSON.parse(
|
const prepareJob = JSON.parse(
|
||||||
fs.readFileSync(
|
fs.readFileSync(
|
||||||
path.resolve(__dirname + '/../../../examples/prepare-job.json'),
|
path.resolve(__dirname + '/../../../examples/prepare-job.json'),
|
||||||
@@ -166,7 +165,7 @@ echo "::set-output name=time::$time"`
|
|||||||
return prepareJob
|
return prepareJob
|
||||||
}
|
}
|
||||||
|
|
||||||
public getRunScriptStepDefinition(): HookData {
|
getRunScriptStepDefinition(): HookData {
|
||||||
const runScriptStep = JSON.parse(
|
const runScriptStep = JSON.parse(
|
||||||
fs.readFileSync(
|
fs.readFileSync(
|
||||||
path.resolve(__dirname + '/../../../examples/run-script-step.json'),
|
path.resolve(__dirname + '/../../../examples/run-script-step.json'),
|
||||||
@@ -178,7 +177,7 @@ echo "::set-output name=time::$time"`
|
|||||||
return runScriptStep
|
return runScriptStep
|
||||||
}
|
}
|
||||||
|
|
||||||
public getRunContainerStepDefinition(): HookData {
|
getRunContainerStepDefinition(): HookData {
|
||||||
const runContainerStep = JSON.parse(
|
const runContainerStep = JSON.parse(
|
||||||
fs.readFileSync(
|
fs.readFileSync(
|
||||||
path.resolve(__dirname + '/../../../examples/run-container-step.json'),
|
path.resolve(__dirname + '/../../../examples/run-container-step.json'),
|
||||||
|
|||||||
6
packages/docker/tsconfig.test.json
Normal file
6
packages/docker/tsconfig.test.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"allowJs": true
|
||||||
|
},
|
||||||
|
"extends": "./tsconfig.json"
|
||||||
|
}
|
||||||
5420
packages/hooklib/package-lock.json
generated
5420
packages/hooklib/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@
|
|||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"description": "",
|
"description": "",
|
||||||
"main": "lib/index.js",
|
"main": "lib/index.js",
|
||||||
"types": "index.d.ts",
|
"types": "lib/index.d.ts",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "echo \"Error: no test specified\" && exit 1",
|
"test": "echo \"Error: no test specified\" && exit 1",
|
||||||
"build": "tsc",
|
"build": "tsc",
|
||||||
@@ -14,15 +14,14 @@
|
|||||||
"author": "",
|
"author": "",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^17.0.23",
|
"@types/node": "^24.0.14",
|
||||||
"@typescript-eslint/parser": "^5.18.0",
|
|
||||||
"@zeit/ncc": "^0.22.3",
|
"@zeit/ncc": "^0.22.3",
|
||||||
"eslint": "^8.12.0",
|
"eslint": "^9.31.0",
|
||||||
"eslint-plugin-github": "^4.3.6",
|
"eslint-plugin-github": "^6.0.0",
|
||||||
"prettier": "^2.6.2",
|
"prettier": "^3.6.2",
|
||||||
"typescript": "^4.6.3"
|
"typescript": "^5.8.3"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.9.1"
|
"@actions/core": "^1.11.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,9 +22,6 @@ rules:
|
|||||||
- apiGroups: [""]
|
- apiGroups: [""]
|
||||||
resources: ["pods/log"]
|
resources: ["pods/log"]
|
||||||
verbs: ["get", "list", "watch",]
|
verbs: ["get", "list", "watch",]
|
||||||
- apiGroups: ["batch"]
|
|
||||||
resources: ["jobs"]
|
|
||||||
verbs: ["get", "list", "create", "delete"]
|
|
||||||
- apiGroups: [""]
|
- apiGroups: [""]
|
||||||
resources: ["secrets"]
|
resources: ["secrets"]
|
||||||
verbs: ["get", "list", "create", "delete"]
|
verbs: ["get", "list", "create", "delete"]
|
||||||
@@ -43,3 +40,5 @@ rules:
|
|||||||
- Building container actions from a dockerfile is not supported at this time
|
- Building container actions from a dockerfile is not supported at this time
|
||||||
- Container actions will not have access to the services network or job container network
|
- Container actions will not have access to the services network or job container network
|
||||||
- Docker [create options](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idcontaineroptions) are not supported
|
- Docker [create options](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idcontaineroptions) are not supported
|
||||||
|
- Container actions will have to specify the entrypoint, since the default entrypoint will be overridden to run the commands from the workflow.
|
||||||
|
- Container actions need to have the following binaries in their container image: `sh`, `env`, `tail`.
|
||||||
|
|||||||
@@ -1,13 +1,26 @@
|
|||||||
// eslint-disable-next-line import/no-commonjs
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
clearMocks: true,
|
clearMocks: true,
|
||||||
|
preset: 'ts-jest',
|
||||||
moduleFileExtensions: ['js', 'ts'],
|
moduleFileExtensions: ['js', 'ts'],
|
||||||
testEnvironment: 'node',
|
testEnvironment: 'node',
|
||||||
testMatch: ['**/*-test.ts'],
|
testMatch: ['**/*-test.ts'],
|
||||||
testRunner: 'jest-circus/runner',
|
testRunner: 'jest-circus/runner',
|
||||||
|
verbose: true,
|
||||||
transform: {
|
transform: {
|
||||||
'^.+\\.ts$': 'ts-jest'
|
'^.+\\.ts$': [
|
||||||
|
'ts-jest',
|
||||||
|
{
|
||||||
|
tsconfig: 'tsconfig.test.json'
|
||||||
|
}
|
||||||
|
],
|
||||||
|
// Transform ESM modules to CommonJS
|
||||||
|
'^.+\\.(js|mjs)$': ['babel-jest', {
|
||||||
|
presets: [['@babel/preset-env', { targets: { node: 'current' } }]]
|
||||||
|
}]
|
||||||
},
|
},
|
||||||
setupFilesAfterEnv: ['./jest.setup.js'],
|
transformIgnorePatterns: [
|
||||||
verbose: true
|
// Transform these ESM packages
|
||||||
|
'node_modules/(?!(shlex|@kubernetes/client-node|openid-client|oauth4webapi|jose|uuid)/)'
|
||||||
|
],
|
||||||
|
setupFilesAfterEnv: ['./jest.setup.js']
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1 +1,2 @@
|
|||||||
|
// eslint-disable-next-line filenames/match-regex, no-undef
|
||||||
jest.setTimeout(500000)
|
jest.setTimeout(500000)
|
||||||
|
|||||||
10682
packages/k8s/package-lock.json
generated
10682
packages/k8s/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -13,20 +13,25 @@
|
|||||||
"author": "",
|
"author": "",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.9.1",
|
"@actions/core": "^1.11.1",
|
||||||
"@actions/exec": "^1.1.1",
|
"@actions/exec": "^1.1.1",
|
||||||
"@actions/io": "^1.1.2",
|
"@actions/io": "^1.1.3",
|
||||||
"@kubernetes/client-node": "^0.22.2",
|
"@kubernetes/client-node": "^1.3.0",
|
||||||
"hooklib": "file:../hooklib",
|
"hooklib": "file:../hooklib",
|
||||||
"js-yaml": "^4.1.0",
|
"js-yaml": "^4.1.0",
|
||||||
"shlex": "^2.1.2"
|
"shlex": "^3.0.0",
|
||||||
|
"tar-fs": "^3.1.0",
|
||||||
|
"uuid": "^11.1.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^27.4.1",
|
"@babel/core": "^7.28.3",
|
||||||
"@types/node": "^17.0.23",
|
"@babel/preset-env": "^7.28.3",
|
||||||
"@vercel/ncc": "^0.33.4",
|
"@types/jest": "^30.0.0",
|
||||||
"jest": "^27.5.1",
|
"@types/node": "^24.3.0",
|
||||||
"ts-jest": "^27.1.4",
|
"@vercel/ncc": "^0.38.3",
|
||||||
"typescript": "^4.6.3"
|
"babel-jest": "^30.1.1",
|
||||||
|
"jest": "^30.1.1",
|
||||||
|
"ts-jest": "^29.4.1",
|
||||||
|
"typescript": "^5.9.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,32 +1,39 @@
|
|||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import * as io from '@actions/io'
|
|
||||||
import * as k8s from '@kubernetes/client-node'
|
import * as k8s from '@kubernetes/client-node'
|
||||||
import {
|
import {
|
||||||
JobContainerInfo,
|
JobContainerInfo,
|
||||||
ContextPorts,
|
ContextPorts,
|
||||||
PrepareJobArgs,
|
PrepareJobArgs,
|
||||||
writeToResponseFile
|
writeToResponseFile,
|
||||||
|
ServiceContainerInfo
|
||||||
} from 'hooklib'
|
} from 'hooklib'
|
||||||
import path from 'path'
|
|
||||||
import {
|
import {
|
||||||
containerPorts,
|
containerPorts,
|
||||||
createPod,
|
createJobPod,
|
||||||
isPodContainerAlpine,
|
isPodContainerAlpine,
|
||||||
prunePods,
|
prunePods,
|
||||||
waitForPodPhases,
|
waitForPodPhases,
|
||||||
getPrepareJobTimeoutSeconds
|
getPrepareJobTimeoutSeconds,
|
||||||
|
execCpToPod,
|
||||||
|
execPodStep
|
||||||
} from '../k8s'
|
} from '../k8s'
|
||||||
import {
|
import {
|
||||||
containerVolumes,
|
CONTAINER_VOLUMES,
|
||||||
DEFAULT_CONTAINER_ENTRY_POINT,
|
DEFAULT_CONTAINER_ENTRY_POINT,
|
||||||
DEFAULT_CONTAINER_ENTRY_POINT_ARGS,
|
DEFAULT_CONTAINER_ENTRY_POINT_ARGS,
|
||||||
generateContainerName,
|
generateContainerName,
|
||||||
mergeContainerWithOptions,
|
mergeContainerWithOptions,
|
||||||
readExtensionFromFile,
|
readExtensionFromFile,
|
||||||
PodPhase,
|
PodPhase,
|
||||||
fixArgs
|
fixArgs,
|
||||||
|
prepareJobScript
|
||||||
} from '../k8s/utils'
|
} from '../k8s/utils'
|
||||||
import { CONTAINER_EXTENSION_PREFIX, JOB_CONTAINER_NAME } from './constants'
|
import {
|
||||||
|
CONTAINER_EXTENSION_PREFIX,
|
||||||
|
getJobPodName,
|
||||||
|
JOB_CONTAINER_NAME
|
||||||
|
} from './constants'
|
||||||
|
import { dirname } from 'path'
|
||||||
|
|
||||||
export async function prepareJob(
|
export async function prepareJob(
|
||||||
args: PrepareJobArgs,
|
args: PrepareJobArgs,
|
||||||
@@ -39,11 +46,9 @@ export async function prepareJob(
|
|||||||
await prunePods()
|
await prunePods()
|
||||||
|
|
||||||
const extension = readExtensionFromFile()
|
const extension = readExtensionFromFile()
|
||||||
await copyExternalsToRoot()
|
|
||||||
|
|
||||||
let container: k8s.V1Container | undefined = undefined
|
let container: k8s.V1Container | undefined = undefined
|
||||||
if (args.container?.image) {
|
if (args.container?.image) {
|
||||||
core.debug(`Using image '${args.container.image}' for job image`)
|
|
||||||
container = createContainerSpec(
|
container = createContainerSpec(
|
||||||
args.container,
|
args.container,
|
||||||
JOB_CONTAINER_NAME,
|
JOB_CONTAINER_NAME,
|
||||||
@@ -55,7 +60,6 @@ export async function prepareJob(
|
|||||||
let services: k8s.V1Container[] = []
|
let services: k8s.V1Container[] = []
|
||||||
if (args.services?.length) {
|
if (args.services?.length) {
|
||||||
services = args.services.map(service => {
|
services = args.services.map(service => {
|
||||||
core.debug(`Adding service '${service.image}' to pod definition`)
|
|
||||||
return createContainerSpec(
|
return createContainerSpec(
|
||||||
service,
|
service,
|
||||||
generateContainerName(service.image),
|
generateContainerName(service.image),
|
||||||
@@ -71,7 +75,8 @@ export async function prepareJob(
|
|||||||
|
|
||||||
let createdPod: k8s.V1Pod | undefined = undefined
|
let createdPod: k8s.V1Pod | undefined = undefined
|
||||||
try {
|
try {
|
||||||
createdPod = await createPod(
|
createdPod = await createJobPod(
|
||||||
|
getJobPodName(),
|
||||||
container,
|
container,
|
||||||
services,
|
services,
|
||||||
args.container.registry,
|
args.container.registry,
|
||||||
@@ -91,6 +96,13 @@ export async function prepareJob(
|
|||||||
`Job pod created, waiting for it to come online ${createdPod?.metadata?.name}`
|
`Job pod created, waiting for it to come online ${createdPod?.metadata?.name}`
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const runnerWorkspace = dirname(process.env.RUNNER_WORKSPACE as string)
|
||||||
|
|
||||||
|
let prepareScript: { containerPath: string; runnerPath: string } | undefined
|
||||||
|
if (args.container?.userMountVolumes?.length) {
|
||||||
|
prepareScript = prepareJobScript(args.container.userMountVolumes || [])
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
await waitForPodPhases(
|
await waitForPodPhases(
|
||||||
createdPod.metadata.name,
|
createdPod.metadata.name,
|
||||||
@@ -103,6 +115,28 @@ export async function prepareJob(
|
|||||||
throw new Error(`pod failed to come online with error: ${err}`)
|
throw new Error(`pod failed to come online with error: ${err}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await execCpToPod(createdPod.metadata.name, runnerWorkspace, '/__w')
|
||||||
|
|
||||||
|
if (prepareScript) {
|
||||||
|
await execPodStep(
|
||||||
|
['sh', '-e', prepareScript.containerPath],
|
||||||
|
createdPod.metadata.name,
|
||||||
|
JOB_CONTAINER_NAME
|
||||||
|
)
|
||||||
|
|
||||||
|
const promises: Promise<void>[] = []
|
||||||
|
for (const vol of args?.container?.userMountVolumes || []) {
|
||||||
|
promises.push(
|
||||||
|
execCpToPod(
|
||||||
|
createdPod.metadata.name,
|
||||||
|
vol.sourceVolumePath,
|
||||||
|
vol.targetVolumePath
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
await Promise.all(promises)
|
||||||
|
}
|
||||||
|
|
||||||
core.debug('Job pod is ready for traffic')
|
core.debug('Job pod is ready for traffic')
|
||||||
|
|
||||||
let isAlpine = false
|
let isAlpine = false
|
||||||
@@ -126,7 +160,7 @@ function generateResponseFile(
|
|||||||
responseFile: string,
|
responseFile: string,
|
||||||
args: PrepareJobArgs,
|
args: PrepareJobArgs,
|
||||||
appPod: k8s.V1Pod,
|
appPod: k8s.V1Pod,
|
||||||
isAlpine
|
isAlpine: boolean
|
||||||
): void {
|
): void {
|
||||||
if (!appPod.metadata?.name) {
|
if (!appPod.metadata?.name) {
|
||||||
throw new Error('app pod must have metadata.name specified')
|
throw new Error('app pod must have metadata.name specified')
|
||||||
@@ -167,7 +201,9 @@ function generateResponseFile(
|
|||||||
const ctxPorts: ContextPorts = {}
|
const ctxPorts: ContextPorts = {}
|
||||||
if (c.ports?.length) {
|
if (c.ports?.length) {
|
||||||
for (const port of c.ports) {
|
for (const port of c.ports) {
|
||||||
ctxPorts[port.containerPort] = port.hostPort
|
if (port.containerPort && port.hostPort) {
|
||||||
|
ctxPorts[port.containerPort.toString()] = port.hostPort.toString()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -181,19 +217,8 @@ function generateResponseFile(
|
|||||||
writeToResponseFile(responseFile, JSON.stringify(response))
|
writeToResponseFile(responseFile, JSON.stringify(response))
|
||||||
}
|
}
|
||||||
|
|
||||||
async function copyExternalsToRoot(): Promise<void> {
|
|
||||||
const workspace = process.env['RUNNER_WORKSPACE']
|
|
||||||
if (workspace) {
|
|
||||||
await io.cp(
|
|
||||||
path.join(workspace, '../../externals'),
|
|
||||||
path.join(workspace, '../externals'),
|
|
||||||
{ force: true, recursive: true, copySourceDirectory: false }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createContainerSpec(
|
export function createContainerSpec(
|
||||||
container: JobContainerInfo,
|
container: JobContainerInfo | ServiceContainerInfo,
|
||||||
name: string,
|
name: string,
|
||||||
jobContainer = false,
|
jobContainer = false,
|
||||||
extension?: k8s.V1PodTemplateSpec
|
extension?: k8s.V1PodTemplateSpec
|
||||||
@@ -208,24 +233,24 @@ export function createContainerSpec(
|
|||||||
image: container.image,
|
image: container.image,
|
||||||
ports: containerPorts(container)
|
ports: containerPorts(container)
|
||||||
} as k8s.V1Container
|
} as k8s.V1Container
|
||||||
if (container.workingDirectory) {
|
if (container['workingDirectory']) {
|
||||||
podContainer.workingDir = container.workingDirectory
|
podContainer.workingDir = container['workingDirectory']
|
||||||
}
|
}
|
||||||
|
|
||||||
if (container.entryPoint) {
|
if (container.entryPoint) {
|
||||||
podContainer.command = [container.entryPoint]
|
podContainer.command = [container.entryPoint]
|
||||||
}
|
}
|
||||||
|
|
||||||
if (container.entryPointArgs?.length > 0) {
|
if (container.entryPointArgs && container.entryPointArgs.length > 0) {
|
||||||
podContainer.args = fixArgs(container.entryPointArgs)
|
podContainer.args = fixArgs(container.entryPointArgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
podContainer.env = []
|
podContainer.env = []
|
||||||
for (const [key, value] of Object.entries(
|
for (const [key, value] of Object.entries(
|
||||||
container['environmentVariables']
|
container['environmentVariables'] || {}
|
||||||
)) {
|
)) {
|
||||||
if (value && key !== 'HOME') {
|
if (value && key !== 'HOME') {
|
||||||
podContainer.env.push({ name: key, value: value as string })
|
podContainer.env.push({ name: key, value })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -234,17 +259,14 @@ export function createContainerSpec(
|
|||||||
value: 'true'
|
value: 'true'
|
||||||
})
|
})
|
||||||
|
|
||||||
if (!('CI' in container['environmentVariables'])) {
|
if (!('CI' in (container['environmentVariables'] || {}))) {
|
||||||
podContainer.env.push({
|
podContainer.env.push({
|
||||||
name: 'CI',
|
name: 'CI',
|
||||||
value: 'true'
|
value: 'true'
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
podContainer.volumeMounts = containerVolumes(
|
podContainer.volumeMounts = CONTAINER_VOLUMES
|
||||||
container.userMountVolumes,
|
|
||||||
jobContainer
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!extension) {
|
if (!extension) {
|
||||||
return podContainer
|
return podContainer
|
||||||
|
|||||||
@@ -1,23 +1,31 @@
|
|||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
|
import * as fs from 'fs'
|
||||||
import * as k8s from '@kubernetes/client-node'
|
import * as k8s from '@kubernetes/client-node'
|
||||||
import { RunContainerStepArgs } from 'hooklib'
|
import { RunContainerStepArgs } from 'hooklib'
|
||||||
|
import { dirname } from 'path'
|
||||||
import {
|
import {
|
||||||
createJob,
|
createContainerStepPod,
|
||||||
createSecretForEnvs,
|
deletePod,
|
||||||
getContainerJobPodName,
|
execCpFromPod,
|
||||||
getPodLogs,
|
execCpToPod,
|
||||||
getPodStatus,
|
execPodStep,
|
||||||
waitForJobToComplete,
|
getPrepareJobTimeoutSeconds,
|
||||||
waitForPodPhases
|
waitForPodPhases
|
||||||
} from '../k8s'
|
} from '../k8s'
|
||||||
import {
|
import {
|
||||||
containerVolumes,
|
CONTAINER_VOLUMES,
|
||||||
fixArgs,
|
|
||||||
mergeContainerWithOptions,
|
mergeContainerWithOptions,
|
||||||
PodPhase,
|
PodPhase,
|
||||||
readExtensionFromFile
|
readExtensionFromFile,
|
||||||
|
DEFAULT_CONTAINER_ENTRY_POINT_ARGS,
|
||||||
|
writeContainerStepScript
|
||||||
} from '../k8s/utils'
|
} from '../k8s/utils'
|
||||||
import { JOB_CONTAINER_EXTENSION_NAME, JOB_CONTAINER_NAME } from './constants'
|
import {
|
||||||
|
getJobPodName,
|
||||||
|
getStepPodName,
|
||||||
|
JOB_CONTAINER_EXTENSION_NAME,
|
||||||
|
JOB_CONTAINER_NAME
|
||||||
|
} from './constants'
|
||||||
|
|
||||||
export async function runContainerStep(
|
export async function runContainerStep(
|
||||||
stepContainer: RunContainerStepArgs
|
stepContainer: RunContainerStepArgs
|
||||||
@@ -26,119 +34,109 @@ export async function runContainerStep(
|
|||||||
throw new Error('Building container actions is not currently supported')
|
throw new Error('Building container actions is not currently supported')
|
||||||
}
|
}
|
||||||
|
|
||||||
let secretName: string | undefined = undefined
|
if (!stepContainer.entryPoint) {
|
||||||
if (stepContainer.environmentVariables) {
|
throw new Error(
|
||||||
try {
|
'failed to start the container since the entrypoint is overwritten'
|
||||||
const envs = JSON.parse(
|
)
|
||||||
JSON.stringify(stepContainer.environmentVariables)
|
}
|
||||||
)
|
|
||||||
envs['GITHUB_ACTIONS'] = 'true'
|
const envs = stepContainer.environmentVariables || {}
|
||||||
if (!('CI' in envs)) {
|
envs['GITHUB_ACTIONS'] = 'true'
|
||||||
envs.CI = 'true'
|
if (!('CI' in envs)) {
|
||||||
}
|
envs.CI = 'true'
|
||||||
secretName = await createSecretForEnvs(envs)
|
|
||||||
} catch (err) {
|
|
||||||
core.debug(`createSecretForEnvs failed: ${JSON.stringify(err)}`)
|
|
||||||
const message = (err as any)?.response?.body?.message || err
|
|
||||||
throw new Error(`failed to create script environment: ${message}`)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const extension = readExtensionFromFile()
|
const extension = readExtensionFromFile()
|
||||||
|
|
||||||
core.debug(`Created secret ${secretName} for container job envs`)
|
const container = createContainerSpec(stepContainer, extension)
|
||||||
const container = createContainerSpec(stepContainer, secretName, extension)
|
|
||||||
|
|
||||||
let job: k8s.V1Job
|
let pod: k8s.V1Pod
|
||||||
try {
|
try {
|
||||||
job = await createJob(container, extension)
|
pod = await createContainerStepPod(getStepPodName(), container, extension)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
core.debug(`createJob failed: ${JSON.stringify(err)}`)
|
core.debug(`createJob failed: ${JSON.stringify(err)}`)
|
||||||
const message = (err as any)?.response?.body?.message || err
|
const message = (err as any)?.response?.body?.message || err
|
||||||
throw new Error(`failed to run script step: ${message}`)
|
throw new Error(`failed to run script step: ${message}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!job.metadata?.name) {
|
if (!pod.metadata?.name) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Expected job ${JSON.stringify(
|
`Expected job ${JSON.stringify(
|
||||||
job
|
pod
|
||||||
)} to have correctly set the metadata.name`
|
)} to have correctly set the metadata.name`
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
core.debug(`Job created, waiting for pod to start: ${job.metadata?.name}`)
|
const podName = pod.metadata.name
|
||||||
|
|
||||||
let podName: string
|
|
||||||
try {
|
try {
|
||||||
podName = await getContainerJobPodName(job.metadata.name)
|
await waitForPodPhases(
|
||||||
} catch (err) {
|
podName,
|
||||||
core.debug(`getContainerJobPodName failed: ${JSON.stringify(err)}`)
|
new Set([PodPhase.RUNNING]),
|
||||||
const message = (err as any)?.response?.body?.message || err
|
new Set([PodPhase.PENDING, PodPhase.UNKNOWN]),
|
||||||
throw new Error(`failed to get container job pod name: ${message}`)
|
getPrepareJobTimeoutSeconds()
|
||||||
}
|
|
||||||
|
|
||||||
await waitForPodPhases(
|
|
||||||
podName,
|
|
||||||
new Set([
|
|
||||||
PodPhase.COMPLETED,
|
|
||||||
PodPhase.RUNNING,
|
|
||||||
PodPhase.SUCCEEDED,
|
|
||||||
PodPhase.FAILED
|
|
||||||
]),
|
|
||||||
new Set([PodPhase.PENDING, PodPhase.UNKNOWN])
|
|
||||||
)
|
|
||||||
core.debug('Container step is running or complete, pulling logs')
|
|
||||||
|
|
||||||
await getPodLogs(podName, JOB_CONTAINER_NAME)
|
|
||||||
|
|
||||||
core.debug('Waiting for container job to complete')
|
|
||||||
await waitForJobToComplete(job.metadata.name)
|
|
||||||
|
|
||||||
// pod has failed so pull the status code from the container
|
|
||||||
const status = await getPodStatus(podName)
|
|
||||||
if (status?.phase === 'Succeeded') {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
if (!status?.containerStatuses?.length) {
|
|
||||||
core.error(
|
|
||||||
`Can't determine container status from response: ${JSON.stringify(
|
|
||||||
status
|
|
||||||
)}`
|
|
||||||
)
|
)
|
||||||
return 1
|
|
||||||
|
const runnerWorkspace = dirname(process.env.RUNNER_WORKSPACE as string)
|
||||||
|
const githubWorkspace = process.env.GITHUB_WORKSPACE as string
|
||||||
|
const parts = githubWorkspace.split('/').slice(-2)
|
||||||
|
if (parts.length !== 2) {
|
||||||
|
throw new Error(`Invalid github workspace directory: ${githubWorkspace}`)
|
||||||
|
}
|
||||||
|
const relativeWorkspace = parts.join('/')
|
||||||
|
|
||||||
|
core.debug(
|
||||||
|
`Copying files from pod ${getJobPodName()} to ${runnerWorkspace}/${relativeWorkspace}`
|
||||||
|
)
|
||||||
|
await execCpFromPod(getJobPodName(), `/__w`, `${runnerWorkspace}`)
|
||||||
|
|
||||||
|
const { containerPath, runnerPath } = writeContainerStepScript(
|
||||||
|
`${runnerWorkspace}/__w/_temp`,
|
||||||
|
githubWorkspace,
|
||||||
|
stepContainer.entryPoint,
|
||||||
|
stepContainer.entryPointArgs,
|
||||||
|
envs
|
||||||
|
)
|
||||||
|
|
||||||
|
await execCpToPod(podName, `${runnerWorkspace}/__w`, '/__w')
|
||||||
|
|
||||||
|
fs.rmSync(`${runnerWorkspace}/__w`, { recursive: true, force: true })
|
||||||
|
|
||||||
|
try {
|
||||||
|
core.debug(`Executing container step script in pod ${podName}`)
|
||||||
|
return await execPodStep(
|
||||||
|
['sh', '-e', containerPath],
|
||||||
|
pod.metadata.name,
|
||||||
|
JOB_CONTAINER_NAME
|
||||||
|
)
|
||||||
|
} catch (err) {
|
||||||
|
core.debug(`execPodStep failed: ${JSON.stringify(err)}`)
|
||||||
|
const message = (err as any)?.response?.body?.message || err
|
||||||
|
throw new Error(`failed to run script step: ${message}`)
|
||||||
|
} finally {
|
||||||
|
fs.rmSync(runnerPath, { force: true })
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
core.error(`Failed to run container step: ${error}`)
|
||||||
|
throw error
|
||||||
|
} finally {
|
||||||
|
await deletePod(podName).catch(err => {
|
||||||
|
core.error(`Failed to delete step pod ${podName}: ${err}`)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
const exitCode =
|
|
||||||
status.containerStatuses[status.containerStatuses.length - 1].state
|
|
||||||
?.terminated?.exitCode
|
|
||||||
return Number(exitCode) || 1
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function createContainerSpec(
|
function createContainerSpec(
|
||||||
container: RunContainerStepArgs,
|
container: RunContainerStepArgs,
|
||||||
secretName?: string,
|
|
||||||
extension?: k8s.V1PodTemplateSpec
|
extension?: k8s.V1PodTemplateSpec
|
||||||
): k8s.V1Container {
|
): k8s.V1Container {
|
||||||
const podContainer = new k8s.V1Container()
|
const podContainer = new k8s.V1Container()
|
||||||
podContainer.name = JOB_CONTAINER_NAME
|
podContainer.name = JOB_CONTAINER_NAME
|
||||||
podContainer.image = container.image
|
podContainer.image = container.image
|
||||||
podContainer.workingDir = container.workingDirectory
|
podContainer.workingDir = '/__w'
|
||||||
podContainer.command = container.entryPoint
|
podContainer.command = ['tail']
|
||||||
? [container.entryPoint]
|
podContainer.args = DEFAULT_CONTAINER_ENTRY_POINT_ARGS
|
||||||
: undefined
|
|
||||||
podContainer.args = container.entryPointArgs?.length
|
|
||||||
? fixArgs(container.entryPointArgs)
|
|
||||||
: undefined
|
|
||||||
|
|
||||||
if (secretName) {
|
podContainer.volumeMounts = CONTAINER_VOLUMES
|
||||||
podContainer.envFrom = [
|
|
||||||
{
|
|
||||||
secretRef: {
|
|
||||||
name: secretName,
|
|
||||||
optional: false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
podContainer.volumeMounts = containerVolumes(undefined, false, true)
|
|
||||||
|
|
||||||
if (!extension) {
|
if (!extension) {
|
||||||
return podContainer
|
return podContainer
|
||||||
|
|||||||
@@ -2,17 +2,19 @@
|
|||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import { RunScriptStepArgs } from 'hooklib'
|
import { RunScriptStepArgs } from 'hooklib'
|
||||||
import { execPodStep } from '../k8s'
|
import { execCpFromPod, execCpToPod, execPodStep } from '../k8s'
|
||||||
import { writeEntryPointScript } from '../k8s/utils'
|
import { writeRunScript, sleep, listDirAllCommand } from '../k8s/utils'
|
||||||
import { JOB_CONTAINER_NAME } from './constants'
|
import { JOB_CONTAINER_NAME } from './constants'
|
||||||
|
import { dirname } from 'path'
|
||||||
|
import * as shlex from 'shlex'
|
||||||
|
|
||||||
export async function runScriptStep(
|
export async function runScriptStep(
|
||||||
args: RunScriptStepArgs,
|
args: RunScriptStepArgs,
|
||||||
state,
|
state
|
||||||
responseFile
|
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
|
// Write the entrypoint first. This will be later coppied to the workflow pod
|
||||||
const { entryPoint, entryPointArgs, environmentVariables } = args
|
const { entryPoint, entryPointArgs, environmentVariables } = args
|
||||||
const { containerPath, runnerPath } = writeEntryPointScript(
|
const { containerPath, runnerPath } = writeRunScript(
|
||||||
args.workingDirectory,
|
args.workingDirectory,
|
||||||
entryPoint,
|
entryPoint,
|
||||||
entryPointArgs,
|
entryPointArgs,
|
||||||
@@ -20,6 +22,55 @@ export async function runScriptStep(
|
|||||||
environmentVariables
|
environmentVariables
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const workdir = dirname(process.env.RUNNER_WORKSPACE as string)
|
||||||
|
const runnerTemp = `${workdir}/_temp`
|
||||||
|
const containerTemp = '/__w/_temp'
|
||||||
|
const containerTempSrc = '/__w/_temp_pre'
|
||||||
|
// Ensure base and staging dirs exist before copying
|
||||||
|
await execPodStep(
|
||||||
|
[
|
||||||
|
'sh',
|
||||||
|
'-c',
|
||||||
|
'mkdir -p /__w && mkdir -p /__w/_temp && mkdir -p /__w/_temp_pre'
|
||||||
|
],
|
||||||
|
state.jobPod,
|
||||||
|
JOB_CONTAINER_NAME
|
||||||
|
)
|
||||||
|
await execCpToPod(state.jobPod, runnerTemp, containerTempSrc)
|
||||||
|
|
||||||
|
// Copy GitHub directories from temp to /github
|
||||||
|
// Merge strategy:
|
||||||
|
// - Overwrite files in _runner_file_commands
|
||||||
|
// - Append files not already present elsewhere
|
||||||
|
const mergeCommands = [
|
||||||
|
'set -e',
|
||||||
|
'mkdir -p /__w/_temp /__w/_temp_pre',
|
||||||
|
'SRC=/__w/_temp_pre',
|
||||||
|
'DST=/__w/_temp',
|
||||||
|
// Overwrite _runner_file_commands
|
||||||
|
`find "$SRC" -type f ! -path "*/_runner_file_commands/*" -exec sh -c '
|
||||||
|
rel="\${1#$2/}"
|
||||||
|
target="$3/$rel"
|
||||||
|
mkdir -p "$(dirname "$target")"
|
||||||
|
cp -a "$1" "$target"
|
||||||
|
' _ {} "$SRC" "$DST" \\;`,
|
||||||
|
// Remove _temp_pre after merging
|
||||||
|
'rm -rf /__w/_temp_pre'
|
||||||
|
]
|
||||||
|
|
||||||
|
try {
|
||||||
|
await execPodStep(
|
||||||
|
['sh', '-c', mergeCommands.join(' && ')],
|
||||||
|
state.jobPod,
|
||||||
|
JOB_CONTAINER_NAME
|
||||||
|
)
|
||||||
|
} catch (err) {
|
||||||
|
core.debug(`Failed to merge temp directories: ${JSON.stringify(err)}`)
|
||||||
|
const message = (err as any)?.response?.body?.message || err
|
||||||
|
throw new Error(`failed to merge temp dirs: ${message}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute the entrypoint script
|
||||||
args.entryPoint = 'sh'
|
args.entryPoint = 'sh'
|
||||||
args.entryPointArgs = ['-e', containerPath]
|
args.entryPointArgs = ['-e', containerPath]
|
||||||
try {
|
try {
|
||||||
@@ -33,6 +84,23 @@ export async function runScriptStep(
|
|||||||
const message = (err as any)?.response?.body?.message || err
|
const message = (err as any)?.response?.body?.message || err
|
||||||
throw new Error(`failed to run script step: ${message}`)
|
throw new Error(`failed to run script step: ${message}`)
|
||||||
} finally {
|
} finally {
|
||||||
fs.rmSync(runnerPath)
|
try {
|
||||||
|
fs.rmSync(runnerPath, { force: true })
|
||||||
|
} catch (removeErr) {
|
||||||
|
core.debug(`Failed to remove file ${runnerPath}: ${removeErr}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
core.debug(
|
||||||
|
`Copying from job pod '${state.jobPod}' ${containerTemp} to ${runnerTemp}`
|
||||||
|
)
|
||||||
|
await execCpFromPod(
|
||||||
|
state.jobPod,
|
||||||
|
`${containerTemp}/_runner_file_commands`,
|
||||||
|
`${workdir}/_temp`
|
||||||
|
)
|
||||||
|
} catch (error) {
|
||||||
|
core.warning('Failed to copy _temp from pod')
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,5 +1,11 @@
|
|||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import { Command, getInputFromStdin, prepareJobArgs } from 'hooklib'
|
import {
|
||||||
|
Command,
|
||||||
|
getInputFromStdin,
|
||||||
|
PrepareJobArgs,
|
||||||
|
RunContainerStepArgs,
|
||||||
|
RunScriptStepArgs
|
||||||
|
} from 'hooklib'
|
||||||
import {
|
import {
|
||||||
cleanupJob,
|
cleanupJob,
|
||||||
prepareJob,
|
prepareJob,
|
||||||
@@ -27,16 +33,16 @@ async function run(): Promise<void> {
|
|||||||
let exitCode = 0
|
let exitCode = 0
|
||||||
switch (command) {
|
switch (command) {
|
||||||
case Command.PrepareJob:
|
case Command.PrepareJob:
|
||||||
await prepareJob(args as prepareJobArgs, responseFile)
|
await prepareJob(args as PrepareJobArgs, responseFile)
|
||||||
return process.exit(0)
|
return process.exit(0)
|
||||||
case Command.CleanupJob:
|
case Command.CleanupJob:
|
||||||
await cleanupJob()
|
await cleanupJob()
|
||||||
return process.exit(0)
|
return process.exit(0)
|
||||||
case Command.RunScriptStep:
|
case Command.RunScriptStep:
|
||||||
await runScriptStep(args, state, null)
|
await runScriptStep(args as RunScriptStepArgs, state)
|
||||||
return process.exit(0)
|
return process.exit(0)
|
||||||
case Command.RunContainerStep:
|
case Command.RunContainerStep:
|
||||||
exitCode = await runContainerStep(args)
|
exitCode = await runContainerStep(args as RunContainerStepArgs)
|
||||||
return process.exit(exitCode)
|
return process.exit(exitCode)
|
||||||
default:
|
default:
|
||||||
throw new Error(`Command not recognized: ${command}`)
|
throw new Error(`Command not recognized: ${command}`)
|
||||||
|
|||||||
@@ -1,22 +1,29 @@
|
|||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
|
import * as path from 'path'
|
||||||
|
import { spawn } from 'child_process'
|
||||||
import * as k8s from '@kubernetes/client-node'
|
import * as k8s from '@kubernetes/client-node'
|
||||||
import { ContainerInfo, Registry } from 'hooklib'
|
import tar from 'tar-fs'
|
||||||
import * as stream from 'stream'
|
import * as stream from 'stream'
|
||||||
|
import { WritableStreamBuffer } from 'stream-buffers'
|
||||||
|
import { createHash } from 'crypto'
|
||||||
|
import type { ContainerInfo, Registry } from 'hooklib'
|
||||||
import {
|
import {
|
||||||
getJobPodName,
|
|
||||||
getRunnerPodName,
|
|
||||||
getSecretName,
|
getSecretName,
|
||||||
getStepPodName,
|
JOB_CONTAINER_NAME,
|
||||||
getVolumeClaimName,
|
|
||||||
RunnerInstanceLabel
|
RunnerInstanceLabel
|
||||||
} from '../hooks/constants'
|
} from '../hooks/constants'
|
||||||
import {
|
import {
|
||||||
PodPhase,
|
PodPhase,
|
||||||
mergePodSpecWithOptions,
|
mergePodSpecWithOptions,
|
||||||
mergeObjectMeta,
|
mergeObjectMeta,
|
||||||
useKubeScheduler,
|
fixArgs,
|
||||||
fixArgs
|
listDirAllCommand,
|
||||||
|
sleep,
|
||||||
|
EXTERNALS_VOLUME_NAME,
|
||||||
|
GITHUB_VOLUME_NAME,
|
||||||
|
WORK_VOLUME
|
||||||
} from './utils'
|
} from './utils'
|
||||||
|
import * as shlex from 'shlex'
|
||||||
|
|
||||||
const kc = new k8s.KubeConfig()
|
const kc = new k8s.KubeConfig()
|
||||||
|
|
||||||
@@ -28,8 +35,6 @@ const k8sAuthorizationV1Api = kc.makeApiClient(k8s.AuthorizationV1Api)
|
|||||||
|
|
||||||
const DEFAULT_WAIT_FOR_POD_TIME_SECONDS = 10 * 60 // 10 min
|
const DEFAULT_WAIT_FOR_POD_TIME_SECONDS = 10 * 60 // 10 min
|
||||||
|
|
||||||
export const POD_VOLUME_NAME = 'work'
|
|
||||||
|
|
||||||
export const requiredPermissions = [
|
export const requiredPermissions = [
|
||||||
{
|
{
|
||||||
group: '',
|
group: '',
|
||||||
@@ -49,12 +54,6 @@ export const requiredPermissions = [
|
|||||||
resource: 'pods',
|
resource: 'pods',
|
||||||
subresource: 'log'
|
subresource: 'log'
|
||||||
},
|
},
|
||||||
{
|
|
||||||
group: 'batch',
|
|
||||||
verbs: ['get', 'list', 'create', 'delete'],
|
|
||||||
resource: 'jobs',
|
|
||||||
subresource: ''
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
group: '',
|
group: '',
|
||||||
verbs: ['create', 'delete', 'get', 'list'],
|
verbs: ['create', 'delete', 'get', 'list'],
|
||||||
@@ -63,7 +62,8 @@ export const requiredPermissions = [
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
export async function createPod(
|
export async function createJobPod(
|
||||||
|
name: string,
|
||||||
jobContainer?: k8s.V1Container,
|
jobContainer?: k8s.V1Container,
|
||||||
services?: k8s.V1Container[],
|
services?: k8s.V1Container[],
|
||||||
registry?: Registry,
|
registry?: Registry,
|
||||||
@@ -83,7 +83,7 @@ export async function createPod(
|
|||||||
appPod.kind = 'Pod'
|
appPod.kind = 'Pod'
|
||||||
|
|
||||||
appPod.metadata = new k8s.V1ObjectMeta()
|
appPod.metadata = new k8s.V1ObjectMeta()
|
||||||
appPod.metadata.name = getJobPodName()
|
appPod.metadata.name = name
|
||||||
|
|
||||||
const instanceLabel = new RunnerInstanceLabel()
|
const instanceLabel = new RunnerInstanceLabel()
|
||||||
appPod.metadata.labels = {
|
appPod.metadata.labels = {
|
||||||
@@ -93,19 +93,68 @@ export async function createPod(
|
|||||||
|
|
||||||
appPod.spec = new k8s.V1PodSpec()
|
appPod.spec = new k8s.V1PodSpec()
|
||||||
appPod.spec.containers = containers
|
appPod.spec.containers = containers
|
||||||
|
appPod.spec.securityContext = {
|
||||||
|
fsGroup: 1001
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract working directory from GITHUB_WORKSPACE
|
||||||
|
// GITHUB_WORKSPACE is like /__w/repo-name/repo-name
|
||||||
|
const githubWorkspace = process.env.GITHUB_WORKSPACE
|
||||||
|
const workingDirPath = githubWorkspace?.split('/').slice(-2).join('/') ?? ''
|
||||||
|
|
||||||
|
const initCommands = [
|
||||||
|
'mkdir -p /mnt/externals',
|
||||||
|
'mkdir -p /mnt/work',
|
||||||
|
'mkdir -p /mnt/github',
|
||||||
|
'mv /home/runner/externals/* /mnt/externals/'
|
||||||
|
]
|
||||||
|
|
||||||
|
if (workingDirPath) {
|
||||||
|
initCommands.push(`mkdir -p /mnt/work/${workingDirPath}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
appPod.spec.initContainers = [
|
||||||
|
{
|
||||||
|
name: 'fs-init',
|
||||||
|
image:
|
||||||
|
process.env.ACTIONS_RUNNER_IMAGE ||
|
||||||
|
'ghcr.io/actions/actions-runner:latest',
|
||||||
|
command: ['sh', '-c', initCommands.join(' && ')],
|
||||||
|
securityContext: {
|
||||||
|
runAsGroup: 1001,
|
||||||
|
runAsUser: 1001
|
||||||
|
},
|
||||||
|
volumeMounts: [
|
||||||
|
{
|
||||||
|
name: EXTERNALS_VOLUME_NAME,
|
||||||
|
mountPath: '/mnt/externals'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: WORK_VOLUME,
|
||||||
|
mountPath: '/mnt/work'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: GITHUB_VOLUME_NAME,
|
||||||
|
mountPath: '/mnt/github'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
appPod.spec.restartPolicy = 'Never'
|
appPod.spec.restartPolicy = 'Never'
|
||||||
|
|
||||||
const nodeName = await getCurrentNodeName()
|
|
||||||
if (useKubeScheduler()) {
|
|
||||||
appPod.spec.affinity = await getPodAffinity(nodeName)
|
|
||||||
} else {
|
|
||||||
appPod.spec.nodeName = nodeName
|
|
||||||
}
|
|
||||||
const claimName = getVolumeClaimName()
|
|
||||||
appPod.spec.volumes = [
|
appPod.spec.volumes = [
|
||||||
{
|
{
|
||||||
name: 'work',
|
name: EXTERNALS_VOLUME_NAME,
|
||||||
persistentVolumeClaim: { claimName }
|
emptyDir: {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: GITHUB_VOLUME_NAME,
|
||||||
|
emptyDir: {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: WORK_VOLUME,
|
||||||
|
emptyDir: {}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -127,102 +176,71 @@ export async function createPod(
|
|||||||
mergePodSpecWithOptions(appPod.spec, extension.spec)
|
mergePodSpecWithOptions(appPod.spec, extension.spec)
|
||||||
}
|
}
|
||||||
|
|
||||||
const { body } = await k8sApi.createNamespacedPod(namespace(), appPod)
|
return await k8sApi.createNamespacedPod({
|
||||||
return body
|
namespace: namespace(),
|
||||||
|
body: appPod
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function createJob(
|
export async function createContainerStepPod(
|
||||||
|
name: string,
|
||||||
container: k8s.V1Container,
|
container: k8s.V1Container,
|
||||||
extension?: k8s.V1PodTemplateSpec
|
extension?: k8s.V1PodTemplateSpec
|
||||||
): Promise<k8s.V1Job> {
|
): Promise<k8s.V1Pod> {
|
||||||
const runnerInstanceLabel = new RunnerInstanceLabel()
|
const appPod = new k8s.V1Pod()
|
||||||
|
|
||||||
const job = new k8s.V1Job()
|
appPod.apiVersion = 'v1'
|
||||||
job.apiVersion = 'batch/v1'
|
appPod.kind = 'Pod'
|
||||||
job.kind = 'Job'
|
|
||||||
job.metadata = new k8s.V1ObjectMeta()
|
|
||||||
job.metadata.name = getStepPodName()
|
|
||||||
job.metadata.labels = { [runnerInstanceLabel.key]: runnerInstanceLabel.value }
|
|
||||||
job.metadata.annotations = {}
|
|
||||||
|
|
||||||
job.spec = new k8s.V1JobSpec()
|
appPod.metadata = new k8s.V1ObjectMeta()
|
||||||
job.spec.ttlSecondsAfterFinished = 300
|
appPod.metadata.name = name
|
||||||
job.spec.backoffLimit = 0
|
|
||||||
job.spec.template = new k8s.V1PodTemplateSpec()
|
|
||||||
|
|
||||||
job.spec.template.spec = new k8s.V1PodSpec()
|
const instanceLabel = new RunnerInstanceLabel()
|
||||||
job.spec.template.metadata = new k8s.V1ObjectMeta()
|
appPod.metadata.labels = {
|
||||||
job.spec.template.metadata.labels = {}
|
[instanceLabel.key]: instanceLabel.value
|
||||||
job.spec.template.metadata.annotations = {}
|
|
||||||
job.spec.template.spec.containers = [container]
|
|
||||||
job.spec.template.spec.restartPolicy = 'Never'
|
|
||||||
|
|
||||||
const nodeName = await getCurrentNodeName()
|
|
||||||
if (useKubeScheduler()) {
|
|
||||||
job.spec.template.spec.affinity = await getPodAffinity(nodeName)
|
|
||||||
} else {
|
|
||||||
job.spec.template.spec.nodeName = nodeName
|
|
||||||
}
|
}
|
||||||
|
appPod.metadata.annotations = {}
|
||||||
|
|
||||||
const claimName = getVolumeClaimName()
|
appPod.spec = new k8s.V1PodSpec()
|
||||||
job.spec.template.spec.volumes = [
|
appPod.spec.containers = [container]
|
||||||
|
|
||||||
|
appPod.spec.restartPolicy = 'Never'
|
||||||
|
|
||||||
|
appPod.spec.volumes = [
|
||||||
{
|
{
|
||||||
name: 'work',
|
name: EXTERNALS_VOLUME_NAME,
|
||||||
persistentVolumeClaim: { claimName }
|
emptyDir: {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: GITHUB_VOLUME_NAME,
|
||||||
|
emptyDir: {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: WORK_VOLUME,
|
||||||
|
emptyDir: {}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
if (extension) {
|
if (extension?.metadata) {
|
||||||
if (extension.metadata) {
|
mergeObjectMeta(appPod, extension.metadata)
|
||||||
// apply metadata both to the job and the pod created by the job
|
|
||||||
mergeObjectMeta(job, extension.metadata)
|
|
||||||
mergeObjectMeta(job.spec.template, extension.metadata)
|
|
||||||
}
|
|
||||||
if (extension.spec) {
|
|
||||||
mergePodSpecWithOptions(job.spec.template.spec, extension.spec)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const { body } = await k8sBatchV1Api.createNamespacedJob(namespace(), job)
|
if (extension?.spec) {
|
||||||
return body
|
mergePodSpecWithOptions(appPod.spec, extension.spec)
|
||||||
}
|
|
||||||
|
|
||||||
export async function getContainerJobPodName(jobName: string): Promise<string> {
|
|
||||||
const selector = `job-name=${jobName}`
|
|
||||||
const backOffManager = new BackOffManager(60)
|
|
||||||
while (true) {
|
|
||||||
const podList = await k8sApi.listNamespacedPod(
|
|
||||||
namespace(),
|
|
||||||
undefined,
|
|
||||||
undefined,
|
|
||||||
undefined,
|
|
||||||
undefined,
|
|
||||||
selector,
|
|
||||||
1
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!podList.body.items?.length) {
|
|
||||||
await backOffManager.backOff()
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!podList.body.items[0].metadata?.name) {
|
|
||||||
throw new Error(
|
|
||||||
`Failed to determine the name of the pod for job ${jobName}`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return podList.body.items[0].metadata.name
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return await k8sApi.createNamespacedPod({
|
||||||
|
namespace: namespace(),
|
||||||
|
body: appPod
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function deletePod(podName: string): Promise<void> {
|
export async function deletePod(name: string): Promise<void> {
|
||||||
await k8sApi.deleteNamespacedPod(
|
await k8sApi.deleteNamespacedPod({
|
||||||
podName,
|
name,
|
||||||
namespace(),
|
namespace: namespace(),
|
||||||
undefined,
|
gracePeriodSeconds: 0
|
||||||
undefined,
|
})
|
||||||
0
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function execPodStep(
|
export async function execPodStep(
|
||||||
@@ -230,11 +248,11 @@ export async function execPodStep(
|
|||||||
podName: string,
|
podName: string,
|
||||||
containerName: string,
|
containerName: string,
|
||||||
stdin?: stream.Readable
|
stdin?: stream.Readable
|
||||||
): Promise<void> {
|
): Promise<number> {
|
||||||
const exec = new k8s.Exec(kc)
|
const exec = new k8s.Exec(kc)
|
||||||
|
|
||||||
command = fixArgs(command)
|
command = fixArgs(command)
|
||||||
// Exec returns a websocket. If websocket fails, we should reject the promise. Otherwise, websocket will call a callback. Since at that point, websocket is not failing, we can safely resolve or reject the promise.
|
return await new Promise(function (resolve, reject) {
|
||||||
await new Promise(function (resolve, reject) {
|
|
||||||
exec
|
exec
|
||||||
.exec(
|
.exec(
|
||||||
namespace(),
|
namespace(),
|
||||||
@@ -246,9 +264,9 @@ export async function execPodStep(
|
|||||||
stdin ?? null,
|
stdin ?? null,
|
||||||
false /* tty */,
|
false /* tty */,
|
||||||
resp => {
|
resp => {
|
||||||
// kube.exec returns an error if exit code is not 0, but we can't actually get the exit code
|
core.debug(`execPodStep response: ${JSON.stringify(resp)}`)
|
||||||
if (resp.status === 'Success') {
|
if (resp.status === 'Success') {
|
||||||
resolve(resp.code)
|
resolve(resp.code || 0)
|
||||||
} else {
|
} else {
|
||||||
core.debug(
|
core.debug(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
@@ -256,16 +274,298 @@ export async function execPodStep(
|
|||||||
details: resp?.details
|
details: resp?.details
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
reject(resp?.message)
|
reject(new Error(resp?.message || 'execPodStep failed'))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
// If exec.exec fails, explicitly reject the outer promise
|
|
||||||
// eslint-disable-next-line github/no-then
|
|
||||||
.catch(e => reject(e))
|
.catch(e => reject(e))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function execCalculateOutputHashSorted(
|
||||||
|
podName: string,
|
||||||
|
containerName: string,
|
||||||
|
command: string[]
|
||||||
|
): Promise<string> {
|
||||||
|
const exec = new k8s.Exec(kc)
|
||||||
|
|
||||||
|
let output = ''
|
||||||
|
const outputWriter = new stream.Writable({
|
||||||
|
write(chunk, _enc, cb) {
|
||||||
|
try {
|
||||||
|
output += chunk.toString('utf8')
|
||||||
|
cb()
|
||||||
|
} catch (e) {
|
||||||
|
cb(e as Error)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
exec
|
||||||
|
.exec(
|
||||||
|
namespace(),
|
||||||
|
podName,
|
||||||
|
containerName,
|
||||||
|
command,
|
||||||
|
outputWriter, // capture stdout
|
||||||
|
process.stderr,
|
||||||
|
null,
|
||||||
|
false /* tty */,
|
||||||
|
resp => {
|
||||||
|
core.debug(`internalExecOutput response: ${JSON.stringify(resp)}`)
|
||||||
|
if (resp.status === 'Success') {
|
||||||
|
resolve()
|
||||||
|
} else {
|
||||||
|
core.debug(
|
||||||
|
JSON.stringify({
|
||||||
|
message: resp?.message,
|
||||||
|
details: resp?.details
|
||||||
|
})
|
||||||
|
)
|
||||||
|
reject(new Error(resp?.message || 'internalExecOutput failed'))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.catch(e => reject(e))
|
||||||
|
})
|
||||||
|
|
||||||
|
outputWriter.end()
|
||||||
|
|
||||||
|
// Sort lines for consistent ordering across platforms
|
||||||
|
const sortedOutput =
|
||||||
|
output
|
||||||
|
.split('\n')
|
||||||
|
.filter(line => line.length > 0)
|
||||||
|
.sort()
|
||||||
|
.join('\n') + '\n'
|
||||||
|
|
||||||
|
const hash = createHash('sha256')
|
||||||
|
hash.update(sortedOutput)
|
||||||
|
return hash.digest('hex')
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function localCalculateOutputHashSorted(
|
||||||
|
commands: string[]
|
||||||
|
): Promise<string> {
|
||||||
|
return await new Promise<string>((resolve, reject) => {
|
||||||
|
const child = spawn(commands[0], commands.slice(1), {
|
||||||
|
stdio: ['ignore', 'pipe', 'ignore']
|
||||||
|
})
|
||||||
|
|
||||||
|
let output = ''
|
||||||
|
child.stdout.on('data', chunk => {
|
||||||
|
output += chunk.toString('utf8')
|
||||||
|
})
|
||||||
|
child.on('error', reject)
|
||||||
|
child.on('close', (code: number) => {
|
||||||
|
if (code === 0) {
|
||||||
|
// Sort lines for consistent ordering across distributions/platforms
|
||||||
|
const sortedOutput =
|
||||||
|
output
|
||||||
|
.split('\n')
|
||||||
|
.filter(line => line.length > 0)
|
||||||
|
.sort()
|
||||||
|
.join('\n') + '\n'
|
||||||
|
|
||||||
|
const hash = createHash('sha256')
|
||||||
|
hash.update(sortedOutput)
|
||||||
|
resolve(hash.digest('hex'))
|
||||||
|
} else {
|
||||||
|
reject(new Error(`child process exited with code ${code}`))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function execCpToPod(
|
||||||
|
podName: string,
|
||||||
|
runnerPath: string,
|
||||||
|
containerPath: string
|
||||||
|
): Promise<void> {
|
||||||
|
core.debug(`Copying ${runnerPath} to pod ${podName} at ${containerPath}`)
|
||||||
|
|
||||||
|
let attempt = 0
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
const exec = new k8s.Exec(kc)
|
||||||
|
// Use tar to extract with --no-same-owner to avoid ownership issues.
|
||||||
|
// Then use find to fix permissions. The -m flag helps but we also need to fix permissions after.
|
||||||
|
const command = [
|
||||||
|
'sh',
|
||||||
|
'-c',
|
||||||
|
`tar xf - --no-same-owner -C ${shlex.quote(containerPath)} 2>/dev/null; ` +
|
||||||
|
`find ${shlex.quote(containerPath)} -type f -exec chmod u+rw {} \\; 2>/dev/null; ` +
|
||||||
|
`find ${shlex.quote(containerPath)} -type d -exec chmod u+rwx {} \\; 2>/dev/null`
|
||||||
|
]
|
||||||
|
const readStream = tar.pack(runnerPath)
|
||||||
|
const errStream = new WritableStreamBuffer()
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
exec
|
||||||
|
.exec(
|
||||||
|
namespace(),
|
||||||
|
podName,
|
||||||
|
JOB_CONTAINER_NAME,
|
||||||
|
command,
|
||||||
|
null,
|
||||||
|
errStream,
|
||||||
|
readStream,
|
||||||
|
false,
|
||||||
|
async status => {
|
||||||
|
if (errStream.size()) {
|
||||||
|
reject(
|
||||||
|
new Error(
|
||||||
|
`Error from execCpToPod - status: ${status.status}, details: \n ${errStream.getContentsAsString()}`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
resolve(status)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.catch(e => reject(e))
|
||||||
|
})
|
||||||
|
break
|
||||||
|
} catch (error) {
|
||||||
|
core.debug(`cpToPod: Attempt ${attempt + 1} failed: ${error}`)
|
||||||
|
attempt++
|
||||||
|
if (attempt >= 30) {
|
||||||
|
throw new Error(
|
||||||
|
`cpToPod failed after ${attempt} attempts: ${JSON.stringify(error)}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
await sleep(1000)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let attempts = 15
|
||||||
|
const delay = 1000
|
||||||
|
for (let i = 0; i < attempts; i++) {
|
||||||
|
try {
|
||||||
|
const want = await localCalculateOutputHashSorted([
|
||||||
|
'sh',
|
||||||
|
'-c',
|
||||||
|
listDirAllCommand(runnerPath)
|
||||||
|
])
|
||||||
|
|
||||||
|
const got = await execCalculateOutputHashSorted(
|
||||||
|
podName,
|
||||||
|
JOB_CONTAINER_NAME,
|
||||||
|
['sh', '-c', listDirAllCommand(containerPath)]
|
||||||
|
)
|
||||||
|
|
||||||
|
if (got !== want) {
|
||||||
|
core.debug(
|
||||||
|
`The hash of the directory does not match the expected value; want='${want}' got='${got}'`
|
||||||
|
)
|
||||||
|
await sleep(delay)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
break
|
||||||
|
} catch (error) {
|
||||||
|
core.debug(`Attempt ${i + 1} failed: ${error}`)
|
||||||
|
await sleep(delay)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function execCpFromPod(
|
||||||
|
podName: string,
|
||||||
|
containerPath: string,
|
||||||
|
parentRunnerPath: string
|
||||||
|
): Promise<void> {
|
||||||
|
const targetRunnerPath = `${parentRunnerPath}/${path.basename(containerPath)}`
|
||||||
|
core.debug(
|
||||||
|
`Copying from pod ${podName} ${containerPath} to ${targetRunnerPath}`
|
||||||
|
)
|
||||||
|
|
||||||
|
let attempt = 0
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
// make temporary directory
|
||||||
|
const exec = new k8s.Exec(kc)
|
||||||
|
const containerPaths = containerPath.split('/')
|
||||||
|
const dirname = containerPaths.pop() as string
|
||||||
|
const command = [
|
||||||
|
'tar',
|
||||||
|
'cf',
|
||||||
|
'-',
|
||||||
|
'-C',
|
||||||
|
containerPaths.join('/') || '/',
|
||||||
|
dirname
|
||||||
|
]
|
||||||
|
const writerStream = tar.extract(parentRunnerPath)
|
||||||
|
const errStream = new WritableStreamBuffer()
|
||||||
|
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
exec
|
||||||
|
.exec(
|
||||||
|
namespace(),
|
||||||
|
podName,
|
||||||
|
JOB_CONTAINER_NAME,
|
||||||
|
command,
|
||||||
|
writerStream,
|
||||||
|
errStream,
|
||||||
|
null,
|
||||||
|
false,
|
||||||
|
async status => {
|
||||||
|
if (errStream.size()) {
|
||||||
|
reject(
|
||||||
|
new Error(
|
||||||
|
`Error from cpFromPod - details: \n ${errStream.getContentsAsString()}`
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
resolve(status)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
.catch(e => reject(e))
|
||||||
|
})
|
||||||
|
break
|
||||||
|
} catch (error) {
|
||||||
|
core.debug(`Attempt ${attempt + 1} failed: ${error}`)
|
||||||
|
attempt++
|
||||||
|
if (attempt >= 30) {
|
||||||
|
throw new Error(
|
||||||
|
`execCpFromPod failed after ${attempt} attempts: ${JSON.stringify(error)}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
await sleep(1000)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let attempts = 15
|
||||||
|
const delay = 1000
|
||||||
|
for (let i = 0; i < attempts; i++) {
|
||||||
|
try {
|
||||||
|
const want = await execCalculateOutputHashSorted(
|
||||||
|
podName,
|
||||||
|
JOB_CONTAINER_NAME,
|
||||||
|
['sh', '-c', listDirAllCommand(containerPath)]
|
||||||
|
)
|
||||||
|
|
||||||
|
const got = await localCalculateOutputHashSorted([
|
||||||
|
'sh',
|
||||||
|
'-c',
|
||||||
|
listDirAllCommand(targetRunnerPath)
|
||||||
|
])
|
||||||
|
|
||||||
|
if (got !== want) {
|
||||||
|
core.debug(
|
||||||
|
`The hash of the directory does not match the expected value; want='${want}' got='${got}'`
|
||||||
|
)
|
||||||
|
await sleep(delay)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
break
|
||||||
|
} catch (error) {
|
||||||
|
core.debug(`Attempt ${i + 1} failed: ${error}`)
|
||||||
|
await sleep(delay)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function waitForJobToComplete(jobName: string): Promise<void> {
|
export async function waitForJobToComplete(jobName: string): Promise<void> {
|
||||||
const backOffManager = new BackOffManager()
|
const backOffManager = new BackOffManager()
|
||||||
while (true) {
|
while (true) {
|
||||||
@@ -274,7 +574,7 @@ export async function waitForJobToComplete(jobName: string): Promise<void> {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(`job ${jobName} has failed`)
|
throw new Error(`job ${jobName} has failed: ${JSON.stringify(error)}`)
|
||||||
}
|
}
|
||||||
await backOffManager.backOff()
|
await backOffManager.backOff()
|
||||||
}
|
}
|
||||||
@@ -315,8 +615,10 @@ export async function createDockerSecret(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const { body } = await k8sApi.createNamespacedSecret(namespace(), secret)
|
return await k8sApi.createNamespacedSecret({
|
||||||
return body
|
namespace: namespace(),
|
||||||
|
body: secret
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function createSecretForEnvs(envs: {
|
export async function createSecretForEnvs(envs: {
|
||||||
@@ -340,30 +642,33 @@ export async function createSecretForEnvs(envs: {
|
|||||||
secret.data[key] = Buffer.from(value).toString('base64')
|
secret.data[key] = Buffer.from(value).toString('base64')
|
||||||
}
|
}
|
||||||
|
|
||||||
await k8sApi.createNamespacedSecret(namespace(), secret)
|
await k8sApi.createNamespacedSecret({
|
||||||
|
namespace: namespace(),
|
||||||
|
body: secret
|
||||||
|
})
|
||||||
return secretName
|
return secretName
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function deleteSecret(secretName: string): Promise<void> {
|
export async function deleteSecret(name: string): Promise<void> {
|
||||||
await k8sApi.deleteNamespacedSecret(secretName, namespace())
|
await k8sApi.deleteNamespacedSecret({
|
||||||
|
name,
|
||||||
|
namespace: namespace()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function pruneSecrets(): Promise<void> {
|
export async function pruneSecrets(): Promise<void> {
|
||||||
const secretList = await k8sApi.listNamespacedSecret(
|
const secretList = await k8sApi.listNamespacedSecret({
|
||||||
namespace(),
|
namespace: namespace(),
|
||||||
undefined,
|
labelSelector: new RunnerInstanceLabel().toString()
|
||||||
undefined,
|
})
|
||||||
undefined,
|
if (!secretList.items.length) {
|
||||||
undefined,
|
|
||||||
new RunnerInstanceLabel().toString()
|
|
||||||
)
|
|
||||||
if (!secretList.body.items.length) {
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
secretList.body.items.map(
|
secretList.items.map(
|
||||||
secret => secret.metadata?.name && deleteSecret(secret.metadata.name)
|
async secret =>
|
||||||
|
secret.metadata?.name && (await deleteSecret(secret.metadata.name))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -391,7 +696,9 @@ export async function waitForPodPhases(
|
|||||||
await backOffManager.backOff()
|
await backOffManager.backOff()
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
throw new Error(`Pod ${podName} is unhealthy with phase status ${phase}`)
|
throw new Error(
|
||||||
|
`Pod ${podName} is unhealthy with phase status ${phase}: ${JSON.stringify(error)}`
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -414,7 +721,7 @@ export function getPrepareJobTimeoutSeconds(): number {
|
|||||||
return timeoutSeconds
|
return timeoutSeconds
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getPodPhase(podName: string): Promise<PodPhase> {
|
async function getPodPhase(name: string): Promise<PodPhase> {
|
||||||
const podPhaseLookup = new Set<string>([
|
const podPhaseLookup = new Set<string>([
|
||||||
PodPhase.PENDING,
|
PodPhase.PENDING,
|
||||||
PodPhase.RUNNING,
|
PodPhase.RUNNING,
|
||||||
@@ -422,8 +729,10 @@ async function getPodPhase(podName: string): Promise<PodPhase> {
|
|||||||
PodPhase.FAILED,
|
PodPhase.FAILED,
|
||||||
PodPhase.UNKNOWN
|
PodPhase.UNKNOWN
|
||||||
])
|
])
|
||||||
const { body } = await k8sApi.readNamespacedPod(podName, namespace())
|
const pod = await k8sApi.readNamespacedPod({
|
||||||
const pod = body
|
name,
|
||||||
|
namespace: namespace()
|
||||||
|
})
|
||||||
|
|
||||||
if (!pod.status?.phase || !podPhaseLookup.has(pod.status.phase)) {
|
if (!pod.status?.phase || !podPhaseLookup.has(pod.status.phase)) {
|
||||||
return PodPhase.UNKNOWN
|
return PodPhase.UNKNOWN
|
||||||
@@ -431,11 +740,13 @@ async function getPodPhase(podName: string): Promise<PodPhase> {
|
|||||||
return pod.status?.phase as PodPhase
|
return pod.status?.phase as PodPhase
|
||||||
}
|
}
|
||||||
|
|
||||||
async function isJobSucceeded(jobName: string): Promise<boolean> {
|
async function isJobSucceeded(name: string): Promise<boolean> {
|
||||||
const { body } = await k8sBatchV1Api.readNamespacedJob(jobName, namespace())
|
const job = await k8sBatchV1Api.readNamespacedJob({
|
||||||
const job = body
|
name,
|
||||||
|
namespace: namespace()
|
||||||
|
})
|
||||||
if (job.status?.failed) {
|
if (job.status?.failed) {
|
||||||
throw new Error(`job ${jobName} has failed`)
|
throw new Error(`job ${name} has failed`)
|
||||||
}
|
}
|
||||||
return !!job.status?.succeeded
|
return !!job.status?.succeeded
|
||||||
}
|
}
|
||||||
@@ -455,30 +766,26 @@ export async function getPodLogs(
|
|||||||
process.stderr.write(err.message)
|
process.stderr.write(err.message)
|
||||||
})
|
})
|
||||||
|
|
||||||
const r = await log.log(namespace(), podName, containerName, logStream, {
|
await log.log(namespace(), podName, containerName, logStream, {
|
||||||
follow: true,
|
follow: true,
|
||||||
pretty: false,
|
pretty: false,
|
||||||
timestamps: false
|
timestamps: false
|
||||||
})
|
})
|
||||||
await new Promise(resolve => r.on('close', () => resolve(null)))
|
await new Promise(resolve => logStream.on('end', () => resolve(null)))
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function prunePods(): Promise<void> {
|
export async function prunePods(): Promise<void> {
|
||||||
const podList = await k8sApi.listNamespacedPod(
|
const podList = await k8sApi.listNamespacedPod({
|
||||||
namespace(),
|
namespace: namespace(),
|
||||||
undefined,
|
labelSelector: new RunnerInstanceLabel().toString()
|
||||||
undefined,
|
})
|
||||||
undefined,
|
if (!podList.items.length) {
|
||||||
undefined,
|
|
||||||
new RunnerInstanceLabel().toString()
|
|
||||||
)
|
|
||||||
if (!podList.body.items.length) {
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
podList.body.items.map(
|
podList.items.map(
|
||||||
pod => pod.metadata?.name && deletePod(pod.metadata.name)
|
async pod => pod.metadata?.name && (await deletePod(pod.metadata.name))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -486,16 +793,16 @@ export async function prunePods(): Promise<void> {
|
|||||||
export async function getPodStatus(
|
export async function getPodStatus(
|
||||||
name: string
|
name: string
|
||||||
): Promise<k8s.V1PodStatus | undefined> {
|
): Promise<k8s.V1PodStatus | undefined> {
|
||||||
const { body } = await k8sApi.readNamespacedPod(name, namespace())
|
const pod = await k8sApi.readNamespacedPod({
|
||||||
return body.status
|
name,
|
||||||
|
namespace: namespace()
|
||||||
|
})
|
||||||
|
return pod.status
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function isAuthPermissionsOK(): Promise<boolean> {
|
export async function isAuthPermissionsOK(): Promise<boolean> {
|
||||||
const sar = new k8s.V1SelfSubjectAccessReview()
|
const sar = new k8s.V1SelfSubjectAccessReview()
|
||||||
const asyncs: Promise<{
|
const asyncs: Promise<k8s.V1SelfSubjectAccessReview>[] = []
|
||||||
response: unknown
|
|
||||||
body: k8s.V1SelfSubjectAccessReview
|
|
||||||
}>[] = []
|
|
||||||
for (const resource of requiredPermissions) {
|
for (const resource of requiredPermissions) {
|
||||||
for (const verb of resource.verbs) {
|
for (const verb of resource.verbs) {
|
||||||
sar.spec = new k8s.V1SelfSubjectAccessReviewSpec()
|
sar.spec = new k8s.V1SelfSubjectAccessReviewSpec()
|
||||||
@@ -505,11 +812,13 @@ export async function isAuthPermissionsOK(): Promise<boolean> {
|
|||||||
sar.spec.resourceAttributes.group = resource.group
|
sar.spec.resourceAttributes.group = resource.group
|
||||||
sar.spec.resourceAttributes.resource = resource.resource
|
sar.spec.resourceAttributes.resource = resource.resource
|
||||||
sar.spec.resourceAttributes.subresource = resource.subresource
|
sar.spec.resourceAttributes.subresource = resource.subresource
|
||||||
asyncs.push(k8sAuthorizationV1Api.createSelfSubjectAccessReview(sar))
|
asyncs.push(
|
||||||
|
k8sAuthorizationV1Api.createSelfSubjectAccessReview({ body: sar })
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const responses = await Promise.all(asyncs)
|
const responses = await Promise.all(asyncs)
|
||||||
return responses.every(resp => resp.body.status?.allowed)
|
return responses.every(resp => resp.status?.allowed)
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function isPodContainerAlpine(
|
export async function isPodContainerAlpine(
|
||||||
@@ -522,48 +831,18 @@ export async function isPodContainerAlpine(
|
|||||||
[
|
[
|
||||||
'sh',
|
'sh',
|
||||||
'-c',
|
'-c',
|
||||||
`'[ $(cat /etc/*release* | grep -i -e "^ID=*alpine*" -c) != 0 ] || exit 1'`
|
`[ $(cat /etc/*release* | grep -i -e "^ID=*alpine*" -c) != 0 ] || exit 1`
|
||||||
],
|
],
|
||||||
podName,
|
podName,
|
||||||
containerName
|
containerName
|
||||||
)
|
)
|
||||||
} catch (err) {
|
} catch {
|
||||||
isAlpine = false
|
isAlpine = false
|
||||||
}
|
}
|
||||||
|
|
||||||
return isAlpine
|
return isAlpine
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getCurrentNodeName(): Promise<string> {
|
|
||||||
const resp = await k8sApi.readNamespacedPod(getRunnerPodName(), namespace())
|
|
||||||
|
|
||||||
const nodeName = resp.body.spec?.nodeName
|
|
||||||
if (!nodeName) {
|
|
||||||
throw new Error('Failed to determine node name')
|
|
||||||
}
|
|
||||||
return nodeName
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getPodAffinity(nodeName: string): Promise<k8s.V1Affinity> {
|
|
||||||
const affinity = new k8s.V1Affinity()
|
|
||||||
affinity.nodeAffinity = new k8s.V1NodeAffinity()
|
|
||||||
affinity.nodeAffinity.requiredDuringSchedulingIgnoredDuringExecution =
|
|
||||||
new k8s.V1NodeSelector()
|
|
||||||
affinity.nodeAffinity.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms =
|
|
||||||
[
|
|
||||||
{
|
|
||||||
matchExpressions: [
|
|
||||||
{
|
|
||||||
key: 'kubernetes.io/hostname',
|
|
||||||
operator: 'In',
|
|
||||||
values: [nodeName]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
return affinity
|
|
||||||
}
|
|
||||||
|
|
||||||
export function namespace(): string {
|
export function namespace(): string {
|
||||||
if (process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']) {
|
if (process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']) {
|
||||||
return process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']
|
return process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']
|
||||||
@@ -647,6 +926,8 @@ export function containerPorts(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function getPodByName(name): Promise<k8s.V1Pod> {
|
export async function getPodByName(name): Promise<k8s.V1Pod> {
|
||||||
const { body } = await k8sApi.readNamespacedPod(name, namespace())
|
return await k8sApi.readNamespacedPod({
|
||||||
return body
|
name,
|
||||||
|
namespace: namespace()
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,12 +2,10 @@ import * as k8s from '@kubernetes/client-node'
|
|||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import * as yaml from 'js-yaml'
|
import * as yaml from 'js-yaml'
|
||||||
import * as core from '@actions/core'
|
import * as core from '@actions/core'
|
||||||
import { Mount } from 'hooklib'
|
|
||||||
import * as path from 'path'
|
|
||||||
import { v1 as uuidv4 } from 'uuid'
|
import { v1 as uuidv4 } from 'uuid'
|
||||||
import { POD_VOLUME_NAME } from './index'
|
|
||||||
import { CONTAINER_EXTENSION_PREFIX } from '../hooks/constants'
|
import { CONTAINER_EXTENSION_PREFIX } from '../hooks/constants'
|
||||||
import * as shlex from 'shlex'
|
import * as shlex from 'shlex'
|
||||||
|
import { Mount } from 'hooklib'
|
||||||
|
|
||||||
export const DEFAULT_CONTAINER_ENTRY_POINT_ARGS = [`-f`, `/dev/null`]
|
export const DEFAULT_CONTAINER_ENTRY_POINT_ARGS = [`-f`, `/dev/null`]
|
||||||
export const DEFAULT_CONTAINER_ENTRY_POINT = 'tail'
|
export const DEFAULT_CONTAINER_ENTRY_POINT = 'tail'
|
||||||
@@ -15,101 +13,48 @@ export const DEFAULT_CONTAINER_ENTRY_POINT = 'tail'
|
|||||||
export const ENV_HOOK_TEMPLATE_PATH = 'ACTIONS_RUNNER_CONTAINER_HOOK_TEMPLATE'
|
export const ENV_HOOK_TEMPLATE_PATH = 'ACTIONS_RUNNER_CONTAINER_HOOK_TEMPLATE'
|
||||||
export const ENV_USE_KUBE_SCHEDULER = 'ACTIONS_RUNNER_USE_KUBE_SCHEDULER'
|
export const ENV_USE_KUBE_SCHEDULER = 'ACTIONS_RUNNER_USE_KUBE_SCHEDULER'
|
||||||
|
|
||||||
export function containerVolumes(
|
export const EXTERNALS_VOLUME_NAME = 'externals'
|
||||||
userMountVolumes: Mount[] = [],
|
export const GITHUB_VOLUME_NAME = 'github'
|
||||||
jobContainer = true,
|
export const WORK_VOLUME = 'work'
|
||||||
containerAction = false
|
|
||||||
): k8s.V1VolumeMount[] {
|
|
||||||
const mounts: k8s.V1VolumeMount[] = [
|
|
||||||
{
|
|
||||||
name: POD_VOLUME_NAME,
|
|
||||||
mountPath: '/__w'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
const workspacePath = process.env.GITHUB_WORKSPACE as string
|
export const CONTAINER_VOLUMES: k8s.V1VolumeMount[] = [
|
||||||
if (containerAction) {
|
{
|
||||||
const i = workspacePath.lastIndexOf('_work/')
|
name: EXTERNALS_VOLUME_NAME,
|
||||||
const workspaceRelativePath = workspacePath.slice(i + '_work/'.length)
|
mountPath: '/__e'
|
||||||
mounts.push(
|
},
|
||||||
{
|
{
|
||||||
name: POD_VOLUME_NAME,
|
name: WORK_VOLUME,
|
||||||
mountPath: '/github/workspace',
|
mountPath: '/__w'
|
||||||
subPath: workspaceRelativePath
|
},
|
||||||
},
|
{
|
||||||
{
|
name: GITHUB_VOLUME_NAME,
|
||||||
name: POD_VOLUME_NAME,
|
mountPath: '/github'
|
||||||
mountPath: '/github/file_commands',
|
|
||||||
subPath: '_temp/_runner_file_commands'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: POD_VOLUME_NAME,
|
|
||||||
mountPath: '/github/home',
|
|
||||||
subPath: '_temp/_github_home'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: POD_VOLUME_NAME,
|
|
||||||
mountPath: '/github/workflow',
|
|
||||||
subPath: '_temp/_github_workflow'
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return mounts
|
|
||||||
}
|
}
|
||||||
|
]
|
||||||
|
|
||||||
if (!jobContainer) {
|
export function prepareJobScript(userVolumeMounts: Mount[]): {
|
||||||
return mounts
|
containerPath: string
|
||||||
|
runnerPath: string
|
||||||
|
} {
|
||||||
|
let mountDirs = userVolumeMounts.map(m => m.targetVolumePath).join(' ')
|
||||||
|
|
||||||
|
const content = `#!/bin/sh -l
|
||||||
|
set -e
|
||||||
|
cp -R /__w/_temp/_github_home /github/home
|
||||||
|
cp -R /__w/_temp/_github_workflow /github/workflow
|
||||||
|
mkdir -p ${mountDirs}
|
||||||
|
`
|
||||||
|
|
||||||
|
const filename = `${uuidv4()}.sh`
|
||||||
|
const entryPointPath = `${process.env.RUNNER_TEMP}/${filename}`
|
||||||
|
fs.writeFileSync(entryPointPath, content)
|
||||||
|
return {
|
||||||
|
containerPath: `/__w/_temp/${filename}`,
|
||||||
|
runnerPath: entryPointPath
|
||||||
}
|
}
|
||||||
|
|
||||||
mounts.push(
|
|
||||||
{
|
|
||||||
name: POD_VOLUME_NAME,
|
|
||||||
mountPath: '/__e',
|
|
||||||
subPath: 'externals'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: POD_VOLUME_NAME,
|
|
||||||
mountPath: '/github/home',
|
|
||||||
subPath: '_temp/_github_home'
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: POD_VOLUME_NAME,
|
|
||||||
mountPath: '/github/workflow',
|
|
||||||
subPath: '_temp/_github_workflow'
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if (!userMountVolumes?.length) {
|
|
||||||
return mounts
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const userVolume of userMountVolumes) {
|
|
||||||
let sourceVolumePath = ''
|
|
||||||
if (path.isAbsolute(userVolume.sourceVolumePath)) {
|
|
||||||
if (!userVolume.sourceVolumePath.startsWith(workspacePath)) {
|
|
||||||
throw new Error(
|
|
||||||
'Volume mounts outside of the work folder are not supported'
|
|
||||||
)
|
|
||||||
}
|
|
||||||
// source volume path should be relative path
|
|
||||||
sourceVolumePath = userVolume.sourceVolumePath.slice(
|
|
||||||
workspacePath.length + 1
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
sourceVolumePath = userVolume.sourceVolumePath
|
|
||||||
}
|
|
||||||
|
|
||||||
mounts.push({
|
|
||||||
name: POD_VOLUME_NAME,
|
|
||||||
mountPath: userVolume.targetVolumePath,
|
|
||||||
subPath: sourceVolumePath,
|
|
||||||
readOnly: userVolume.readOnly
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return mounts
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function writeEntryPointScript(
|
export function writeRunScript(
|
||||||
workingDirectory: string,
|
workingDirectory: string,
|
||||||
entryPoint: string,
|
entryPoint: string,
|
||||||
entryPointArgs?: string[],
|
entryPointArgs?: string[],
|
||||||
@@ -123,33 +68,12 @@ export function writeEntryPointScript(
|
|||||||
typeof prependPath === 'string' ? prependPath : prependPath.join(':')
|
typeof prependPath === 'string' ? prependPath : prependPath.join(':')
|
||||||
exportPath = `export PATH=${prepend}:$PATH`
|
exportPath = `export PATH=${prepend}:$PATH`
|
||||||
}
|
}
|
||||||
let environmentPrefix = ''
|
|
||||||
|
|
||||||
if (environmentVariables && Object.entries(environmentVariables).length) {
|
let environmentPrefix = scriptEnv(environmentVariables)
|
||||||
const envBuffer: string[] = []
|
|
||||||
for (const [key, value] of Object.entries(environmentVariables)) {
|
|
||||||
if (
|
|
||||||
key.includes(`=`) ||
|
|
||||||
key.includes(`'`) ||
|
|
||||||
key.includes(`"`) ||
|
|
||||||
key.includes(`$`)
|
|
||||||
) {
|
|
||||||
throw new Error(
|
|
||||||
`environment key ${key} is invalid - the key must not contain =, $, ', or "`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
envBuffer.push(
|
|
||||||
`"${key}=${value
|
|
||||||
.replace(/\\/g, '\\\\')
|
|
||||||
.replace(/"/g, '\\"')
|
|
||||||
.replace(/\$/g, '\\$')
|
|
||||||
.replace(/`/g, '\\`')}"`
|
|
||||||
)
|
|
||||||
}
|
|
||||||
environmentPrefix = `env ${envBuffer.join(' ')} `
|
|
||||||
}
|
|
||||||
|
|
||||||
const content = `#!/bin/sh -l
|
const content = `#!/bin/sh -l
|
||||||
|
set -e
|
||||||
|
rm "$0" # remove script after running
|
||||||
${exportPath}
|
${exportPath}
|
||||||
cd ${workingDirectory} && \
|
cd ${workingDirectory} && \
|
||||||
exec ${environmentPrefix} ${entryPoint} ${
|
exec ${environmentPrefix} ${entryPoint} ${
|
||||||
@@ -165,9 +89,76 @@ exec ${environmentPrefix} ${entryPoint} ${
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function writeContainerStepScript(
|
||||||
|
dst: string,
|
||||||
|
workingDirectory: string,
|
||||||
|
entryPoint: string,
|
||||||
|
entryPointArgs?: string[],
|
||||||
|
environmentVariables?: { [key: string]: string }
|
||||||
|
): { containerPath: string; runnerPath: string } {
|
||||||
|
let environmentPrefix = scriptEnv(environmentVariables)
|
||||||
|
|
||||||
|
const parts = workingDirectory.split('/').slice(-2)
|
||||||
|
if (parts.length !== 2) {
|
||||||
|
throw new Error(`Invalid working directory: ${workingDirectory}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = `#!/bin/sh -l
|
||||||
|
rm "$0" # remove script after running
|
||||||
|
mv /__w/_temp/_github_home /github/home && \
|
||||||
|
mv /__w/_temp/_github_workflow /github/workflow && \
|
||||||
|
mv /__w/_temp/_runner_file_commands /github/file_commands || true && \
|
||||||
|
mv /__w/${parts.join('/')}/ /github/workspace && \
|
||||||
|
cd /github/workspace && \
|
||||||
|
exec ${environmentPrefix} ${entryPoint} ${
|
||||||
|
entryPointArgs?.length ? entryPointArgs.join(' ') : ''
|
||||||
|
}
|
||||||
|
`
|
||||||
|
const filename = `${uuidv4()}.sh`
|
||||||
|
const entryPointPath = `${dst}/${filename}`
|
||||||
|
core.debug(`Writing container step script to ${entryPointPath}`)
|
||||||
|
fs.writeFileSync(entryPointPath, content)
|
||||||
|
return {
|
||||||
|
containerPath: `/__w/_temp/${filename}`,
|
||||||
|
runnerPath: entryPointPath
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function scriptEnv(envs?: { [key: string]: string }): string {
|
||||||
|
if (!envs || !Object.entries(envs).length) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
const envBuffer: string[] = []
|
||||||
|
for (const [key, value] of Object.entries(envs)) {
|
||||||
|
if (
|
||||||
|
key.includes(`=`) ||
|
||||||
|
key.includes(`'`) ||
|
||||||
|
key.includes(`"`) ||
|
||||||
|
key.includes(`$`)
|
||||||
|
) {
|
||||||
|
throw new Error(
|
||||||
|
`environment key ${key} is invalid - the key must not contain =, $, ', or "`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
envBuffer.push(
|
||||||
|
`"${key}=${value
|
||||||
|
.replace(/\\/g, '\\\\')
|
||||||
|
.replace(/"/g, '\\"')
|
||||||
|
.replace(/\$/g, '\\$')
|
||||||
|
.replace(/`/g, '\\`')}"`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!envBuffer?.length) {
|
||||||
|
return ''
|
||||||
|
}
|
||||||
|
|
||||||
|
return `env ${envBuffer.join(' ')} `
|
||||||
|
}
|
||||||
|
|
||||||
export function generateContainerName(image: string): string {
|
export function generateContainerName(image: string): string {
|
||||||
const nameWithTag = image.split('/').pop()
|
const nameWithTag = image.split('/').pop()
|
||||||
const name = nameWithTag?.split(':').at(0)
|
const name = nameWithTag?.split(':')[0]
|
||||||
|
|
||||||
if (!name) {
|
if (!name) {
|
||||||
throw new Error(`Image definition '${image}' is invalid`)
|
throw new Error(`Image definition '${image}' is invalid`)
|
||||||
@@ -297,5 +288,18 @@ function mergeLists<T>(base?: T[], from?: T[]): T[] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function fixArgs(args: string[]): string[] {
|
export function fixArgs(args: string[]): string[] {
|
||||||
|
// Preserve shell command strings passed via `sh -c` without re-tokenizing.
|
||||||
|
// Retokenizing would split the script into multiple args, breaking `sh -c`.
|
||||||
|
if (args.length >= 2 && args[0] === 'sh' && args[1] === '-c') {
|
||||||
|
return args
|
||||||
|
}
|
||||||
return shlex.split(args.join(' '))
|
return shlex.split(args.join(' '))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function sleep(ms: number): Promise<void> {
|
||||||
|
return new Promise(resolve => setTimeout(resolve, ms))
|
||||||
|
}
|
||||||
|
|
||||||
|
export function listDirAllCommand(dir: string): string {
|
||||||
|
return `cd ${shlex.quote(dir)} && find . -not -path '*/_runner_hook_responses*' -exec stat -c '%s %n' {} \\;`
|
||||||
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { cleanupJob, prepareJob } from '../src/hooks'
|
|||||||
import { RunnerInstanceLabel } from '../src/hooks/constants'
|
import { RunnerInstanceLabel } from '../src/hooks/constants'
|
||||||
import { namespace } from '../src/k8s'
|
import { namespace } from '../src/k8s'
|
||||||
import { TestHelper } from './test-setup'
|
import { TestHelper } from './test-setup'
|
||||||
|
import { PrepareJobArgs } from 'hooklib'
|
||||||
|
|
||||||
let testHelper: TestHelper
|
let testHelper: TestHelper
|
||||||
|
|
||||||
@@ -14,7 +15,10 @@ describe('Cleanup Job', () => {
|
|||||||
const prepareJobOutputFilePath = testHelper.createFile(
|
const prepareJobOutputFilePath = testHelper.createFile(
|
||||||
'prepare-job-output.json'
|
'prepare-job-output.json'
|
||||||
)
|
)
|
||||||
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
await prepareJob(
|
||||||
|
prepareJobData.args as PrepareJobArgs,
|
||||||
|
prepareJobOutputFilePath
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(async () => {
|
afterEach(async () => {
|
||||||
@@ -32,16 +36,12 @@ describe('Cleanup Job', () => {
|
|||||||
kc.loadFromDefault()
|
kc.loadFromDefault()
|
||||||
const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
|
const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
|
||||||
|
|
||||||
const podList = await k8sApi.listNamespacedPod(
|
const podList = await k8sApi.listNamespacedPod({
|
||||||
namespace(),
|
namespace: namespace(),
|
||||||
undefined,
|
labelSelector: new RunnerInstanceLabel().toString()
|
||||||
undefined,
|
})
|
||||||
undefined,
|
|
||||||
undefined,
|
|
||||||
new RunnerInstanceLabel().toString()
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(podList.body.items.length).toBe(0)
|
expect(podList.items.length).toBe(0)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should have no runner linked secrets', async () => {
|
it('should have no runner linked secrets', async () => {
|
||||||
@@ -51,15 +51,11 @@ describe('Cleanup Job', () => {
|
|||||||
kc.loadFromDefault()
|
kc.loadFromDefault()
|
||||||
const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
|
const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
|
||||||
|
|
||||||
const secretList = await k8sApi.listNamespacedSecret(
|
const secretList = await k8sApi.listNamespacedSecret({
|
||||||
namespace(),
|
namespace: namespace(),
|
||||||
undefined,
|
labelSelector: new RunnerInstanceLabel().toString()
|
||||||
undefined,
|
})
|
||||||
undefined,
|
|
||||||
undefined,
|
|
||||||
new RunnerInstanceLabel().toString()
|
|
||||||
)
|
|
||||||
|
|
||||||
expect(secretList.body.items.length).toBe(0)
|
expect(secretList.items.length).toBe(0)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import {
|
|||||||
runScriptStep
|
runScriptStep
|
||||||
} from '../src/hooks'
|
} from '../src/hooks'
|
||||||
import { TestHelper } from './test-setup'
|
import { TestHelper } from './test-setup'
|
||||||
|
import { RunContainerStepArgs, RunScriptStepArgs } from 'hooklib'
|
||||||
|
|
||||||
jest.useRealTimers()
|
jest.useRealTimers()
|
||||||
|
|
||||||
@@ -25,6 +26,7 @@ describe('e2e', () => {
|
|||||||
afterEach(async () => {
|
afterEach(async () => {
|
||||||
await testHelper.cleanup()
|
await testHelper.cleanup()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should prepare job, run script step, run container step then cleanup without errors', async () => {
|
it('should prepare job, run script step, run container step then cleanup without errors', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||||
@@ -36,13 +38,16 @@ describe('e2e', () => {
|
|||||||
const prepareJobOutputData = JSON.parse(prepareJobOutputJson.toString())
|
const prepareJobOutputData = JSON.parse(prepareJobOutputJson.toString())
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
runScriptStep(scriptStepData.args, prepareJobOutputData.state, null)
|
runScriptStep(
|
||||||
|
scriptStepData.args as RunScriptStepArgs,
|
||||||
|
prepareJobOutputData.state
|
||||||
|
)
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
|
|
||||||
const runContainerStepData = testHelper.getRunContainerStepDefinition()
|
const runContainerStepData = testHelper.getRunContainerStepDefinition()
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
runContainerStep(runContainerStepData.args)
|
runContainerStep(runContainerStepData.args as RunContainerStepArgs)
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
|
|
||||||
await expect(cleanupJob()).resolves.not.toThrow()
|
await expect(cleanupJob()).resolves.not.toThrow()
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import { containerPorts, POD_VOLUME_NAME } from '../src/k8s'
|
import { containerPorts } from '../src/k8s'
|
||||||
import {
|
import {
|
||||||
containerVolumes,
|
|
||||||
generateContainerName,
|
generateContainerName,
|
||||||
writeEntryPointScript,
|
writeRunScript,
|
||||||
mergePodSpecWithOptions,
|
mergePodSpecWithOptions,
|
||||||
mergeContainerWithOptions,
|
mergeContainerWithOptions,
|
||||||
readExtensionFromFile,
|
readExtensionFromFile,
|
||||||
@@ -27,91 +26,55 @@ describe('k8s utils', () => {
|
|||||||
|
|
||||||
it('should not throw', () => {
|
it('should not throw', () => {
|
||||||
expect(() =>
|
expect(() =>
|
||||||
writeEntryPointScript(
|
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
|
||||||
'/test',
|
SOME_ENV: 'SOME_VALUE'
|
||||||
'sh',
|
})
|
||||||
['-e', 'script.sh'],
|
|
||||||
['/prepend/path'],
|
|
||||||
{
|
|
||||||
SOME_ENV: 'SOME_VALUE'
|
|
||||||
}
|
|
||||||
)
|
|
||||||
).not.toThrow()
|
).not.toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should throw if RUNNER_TEMP is not set', () => {
|
it('should throw if RUNNER_TEMP is not set', () => {
|
||||||
delete process.env.RUNNER_TEMP
|
delete process.env.RUNNER_TEMP
|
||||||
expect(() =>
|
expect(() =>
|
||||||
writeEntryPointScript(
|
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
|
||||||
'/test',
|
SOME_ENV: 'SOME_VALUE'
|
||||||
'sh',
|
})
|
||||||
['-e', 'script.sh'],
|
|
||||||
['/prepend/path'],
|
|
||||||
{
|
|
||||||
SOME_ENV: 'SOME_VALUE'
|
|
||||||
}
|
|
||||||
)
|
|
||||||
).toThrow()
|
).toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should throw if environment variable name contains double quote', () => {
|
it('should throw if environment variable name contains double quote', () => {
|
||||||
expect(() =>
|
expect(() =>
|
||||||
writeEntryPointScript(
|
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
|
||||||
'/test',
|
'SOME"_ENV': 'SOME_VALUE'
|
||||||
'sh',
|
})
|
||||||
['-e', 'script.sh'],
|
|
||||||
['/prepend/path'],
|
|
||||||
{
|
|
||||||
'SOME"_ENV': 'SOME_VALUE'
|
|
||||||
}
|
|
||||||
)
|
|
||||||
).toThrow()
|
).toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should throw if environment variable name contains =', () => {
|
it('should throw if environment variable name contains =', () => {
|
||||||
expect(() =>
|
expect(() =>
|
||||||
writeEntryPointScript(
|
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
|
||||||
'/test',
|
'SOME=ENV': 'SOME_VALUE'
|
||||||
'sh',
|
})
|
||||||
['-e', 'script.sh'],
|
|
||||||
['/prepend/path'],
|
|
||||||
{
|
|
||||||
'SOME=ENV': 'SOME_VALUE'
|
|
||||||
}
|
|
||||||
)
|
|
||||||
).toThrow()
|
).toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should throw if environment variable name contains single quote', () => {
|
it('should throw if environment variable name contains single quote', () => {
|
||||||
expect(() =>
|
expect(() =>
|
||||||
writeEntryPointScript(
|
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
|
||||||
'/test',
|
"SOME'_ENV": 'SOME_VALUE'
|
||||||
'sh',
|
})
|
||||||
['-e', 'script.sh'],
|
|
||||||
['/prepend/path'],
|
|
||||||
{
|
|
||||||
"SOME'_ENV": 'SOME_VALUE'
|
|
||||||
}
|
|
||||||
)
|
|
||||||
).toThrow()
|
).toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should throw if environment variable name contains dollar', () => {
|
it('should throw if environment variable name contains dollar', () => {
|
||||||
expect(() =>
|
expect(() =>
|
||||||
writeEntryPointScript(
|
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
|
||||||
'/test',
|
SOME_$_ENV: 'SOME_VALUE'
|
||||||
'sh',
|
})
|
||||||
['-e', 'script.sh'],
|
|
||||||
['/prepend/path'],
|
|
||||||
{
|
|
||||||
SOME_$_ENV: 'SOME_VALUE'
|
|
||||||
}
|
|
||||||
)
|
|
||||||
).toThrow()
|
).toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should escape double quote, dollar and backslash in environment variable values', () => {
|
it('should escape double quote, dollar and backslash in environment variable values', () => {
|
||||||
const { runnerPath } = writeEntryPointScript(
|
const { runnerPath } = writeRunScript(
|
||||||
'/test',
|
'/test',
|
||||||
'sh',
|
'sh',
|
||||||
['-e', 'script.sh'],
|
['-e', 'script.sh'],
|
||||||
@@ -130,7 +93,7 @@ describe('k8s utils', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should return object with containerPath and runnerPath', () => {
|
it('should return object with containerPath and runnerPath', () => {
|
||||||
const { containerPath, runnerPath } = writeEntryPointScript(
|
const { containerPath, runnerPath } = writeRunScript(
|
||||||
'/test',
|
'/test',
|
||||||
'sh',
|
'sh',
|
||||||
['-e', 'script.sh'],
|
['-e', 'script.sh'],
|
||||||
@@ -145,7 +108,7 @@ describe('k8s utils', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should write entrypoint path and the file should exist', () => {
|
it('should write entrypoint path and the file should exist', () => {
|
||||||
const { runnerPath } = writeEntryPointScript(
|
const { runnerPath } = writeRunScript(
|
||||||
'/test',
|
'/test',
|
||||||
'sh',
|
'sh',
|
||||||
['-e', 'script.sh'],
|
['-e', 'script.sh'],
|
||||||
@@ -168,88 +131,6 @@ describe('k8s utils', () => {
|
|||||||
await testHelper.cleanup()
|
await testHelper.cleanup()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should throw if container action and GITHUB_WORKSPACE env is not set', () => {
|
|
||||||
delete process.env.GITHUB_WORKSPACE
|
|
||||||
expect(() => containerVolumes([], true, true)).toThrow()
|
|
||||||
expect(() => containerVolumes([], false, true)).toThrow()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should always have work mount', () => {
|
|
||||||
let volumes = containerVolumes([], true, true)
|
|
||||||
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
|
|
||||||
volumes = containerVolumes([], true, false)
|
|
||||||
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
|
|
||||||
volumes = containerVolumes([], false, true)
|
|
||||||
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
|
|
||||||
volumes = containerVolumes([], false, false)
|
|
||||||
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should always have /github/workflow mount if working on container job or container action', () => {
|
|
||||||
let volumes = containerVolumes([], true, true)
|
|
||||||
expect(volumes.find(e => e.mountPath === '/github/workflow')).toBeTruthy()
|
|
||||||
volumes = containerVolumes([], true, false)
|
|
||||||
expect(volumes.find(e => e.mountPath === '/github/workflow')).toBeTruthy()
|
|
||||||
volumes = containerVolumes([], false, true)
|
|
||||||
expect(volumes.find(e => e.mountPath === '/github/workflow')).toBeTruthy()
|
|
||||||
|
|
||||||
volumes = containerVolumes([], false, false)
|
|
||||||
expect(
|
|
||||||
volumes.find(e => e.mountPath === '/github/workflow')
|
|
||||||
).toBeUndefined()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should have container action volumes', () => {
|
|
||||||
let volumes = containerVolumes([], true, true)
|
|
||||||
let workspace = volumes.find(e => e.mountPath === '/github/workspace')
|
|
||||||
let fileCommands = volumes.find(
|
|
||||||
e => e.mountPath === '/github/file_commands'
|
|
||||||
)
|
|
||||||
expect(workspace).toBeTruthy()
|
|
||||||
expect(workspace?.subPath).toBe('repo/repo')
|
|
||||||
expect(fileCommands).toBeTruthy()
|
|
||||||
expect(fileCommands?.subPath).toBe('_temp/_runner_file_commands')
|
|
||||||
|
|
||||||
volumes = containerVolumes([], false, true)
|
|
||||||
workspace = volumes.find(e => e.mountPath === '/github/workspace')
|
|
||||||
fileCommands = volumes.find(e => e.mountPath === '/github/file_commands')
|
|
||||||
expect(workspace).toBeTruthy()
|
|
||||||
expect(workspace?.subPath).toBe('repo/repo')
|
|
||||||
expect(fileCommands).toBeTruthy()
|
|
||||||
expect(fileCommands?.subPath).toBe('_temp/_runner_file_commands')
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should have externals, github home mounts if job container', () => {
|
|
||||||
const volumes = containerVolumes()
|
|
||||||
expect(volumes.find(e => e.mountPath === '/__e')).toBeTruthy()
|
|
||||||
expect(volumes.find(e => e.mountPath === '/github/home')).toBeTruthy()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should throw if user volume source volume path is not in workspace', () => {
|
|
||||||
expect(() =>
|
|
||||||
containerVolumes(
|
|
||||||
[
|
|
||||||
{
|
|
||||||
sourceVolumePath: '/outside/of/workdir'
|
|
||||||
}
|
|
||||||
],
|
|
||||||
true,
|
|
||||||
false
|
|
||||||
)
|
|
||||||
).toThrow()
|
|
||||||
})
|
|
||||||
|
|
||||||
it(`all volumes should have name ${POD_VOLUME_NAME}`, () => {
|
|
||||||
let volumes = containerVolumes([], true, true)
|
|
||||||
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
|
|
||||||
volumes = containerVolumes([], true, false)
|
|
||||||
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
|
|
||||||
volumes = containerVolumes([], false, true)
|
|
||||||
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
|
|
||||||
volumes = containerVolumes([], false, false)
|
|
||||||
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should parse container ports', () => {
|
it('should parse container ports', () => {
|
||||||
const tt = [
|
const tt = [
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -3,15 +3,9 @@ import * as path from 'path'
|
|||||||
import { cleanupJob } from '../src/hooks'
|
import { cleanupJob } from '../src/hooks'
|
||||||
import { createContainerSpec, prepareJob } from '../src/hooks/prepare-job'
|
import { createContainerSpec, prepareJob } from '../src/hooks/prepare-job'
|
||||||
import { TestHelper } from './test-setup'
|
import { TestHelper } from './test-setup'
|
||||||
import {
|
import { ENV_HOOK_TEMPLATE_PATH, generateContainerName } from '../src/k8s/utils'
|
||||||
ENV_HOOK_TEMPLATE_PATH,
|
import { execPodStep, getPodByName } from '../src/k8s'
|
||||||
ENV_USE_KUBE_SCHEDULER,
|
|
||||||
generateContainerName,
|
|
||||||
readExtensionFromFile
|
|
||||||
} from '../src/k8s/utils'
|
|
||||||
import { getPodByName } from '../src/k8s'
|
|
||||||
import { V1Container } from '@kubernetes/client-node'
|
import { V1Container } from '@kubernetes/client-node'
|
||||||
import * as yaml from 'js-yaml'
|
|
||||||
import { JOB_CONTAINER_NAME } from '../src/hooks/constants'
|
import { JOB_CONTAINER_NAME } from '../src/hooks/constants'
|
||||||
|
|
||||||
jest.useRealTimers()
|
jest.useRealTimers()
|
||||||
@@ -47,19 +41,34 @@ describe('Prepare job', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should prepare job with absolute path for userVolumeMount', async () => {
|
it('should prepare job with absolute path for userVolumeMount', async () => {
|
||||||
|
const userVolumeMount = path.join(
|
||||||
|
process.env.GITHUB_WORKSPACE as string,
|
||||||
|
'myvolume'
|
||||||
|
)
|
||||||
|
fs.mkdirSync(userVolumeMount, { recursive: true })
|
||||||
|
fs.writeFileSync(path.join(userVolumeMount, 'file.txt'), 'hello')
|
||||||
prepareJobData.args.container.userMountVolumes = [
|
prepareJobData.args.container.userMountVolumes = [
|
||||||
{
|
{
|
||||||
sourceVolumePath: path.join(
|
sourceVolumePath: userVolumeMount,
|
||||||
process.env.GITHUB_WORKSPACE as string,
|
targetVolumePath: '/__w/myvolume',
|
||||||
'/myvolume'
|
|
||||||
),
|
|
||||||
targetVolumePath: '/volume_mount',
|
|
||||||
readOnly: false
|
readOnly: false
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
await expect(
|
await expect(
|
||||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
|
|
||||||
|
const content = JSON.parse(
|
||||||
|
fs.readFileSync(prepareJobOutputFilePath).toString()
|
||||||
|
)
|
||||||
|
|
||||||
|
await execPodStep(
|
||||||
|
['sh', '-c', '[ "$(cat /__w/myvolume/file.txt)" = "hello" ] || exit 5'],
|
||||||
|
content!.state!.jobPod,
|
||||||
|
JOB_CONTAINER_NAME
|
||||||
|
).then(output => {
|
||||||
|
expect(output).toBe(0)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should prepare job with envs CI and GITHUB_ACTIONS', async () => {
|
it('should prepare job with envs CI and GITHUB_ACTIONS', async () => {
|
||||||
@@ -110,19 +119,6 @@ describe('Prepare job', () => {
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should throw an exception if the user volume mount is absolute path outside of GITHUB_WORKSPACE', async () => {
|
|
||||||
prepareJobData.args.container.userMountVolumes = [
|
|
||||||
{
|
|
||||||
sourceVolumePath: '/somewhere/not/in/gh-workspace',
|
|
||||||
targetVolumePath: '/containermount',
|
|
||||||
readOnly: false
|
|
||||||
}
|
|
||||||
]
|
|
||||||
await expect(
|
|
||||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
|
||||||
).rejects.toThrow()
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should not run prepare job without the job container', async () => {
|
it('should not run prepare job without the job container', async () => {
|
||||||
prepareJobData.args.container = undefined
|
prepareJobData.args.container = undefined
|
||||||
await expect(
|
await expect(
|
||||||
@@ -168,8 +164,7 @@ describe('Prepare job', () => {
|
|||||||
|
|
||||||
expect(got.metadata?.annotations?.['annotated-by']).toBe('extension')
|
expect(got.metadata?.annotations?.['annotated-by']).toBe('extension')
|
||||||
expect(got.metadata?.labels?.['labeled-by']).toBe('extension')
|
expect(got.metadata?.labels?.['labeled-by']).toBe('extension')
|
||||||
expect(got.spec?.securityContext?.runAsUser).toBe(1000)
|
expect(got.spec?.restartPolicy).toBe('Never')
|
||||||
expect(got.spec?.securityContext?.runAsGroup).toBe(3000)
|
|
||||||
|
|
||||||
// job container
|
// job container
|
||||||
expect(got.spec?.containers[0].name).toBe(JOB_CONTAINER_NAME)
|
expect(got.spec?.containers[0].name).toBe(JOB_CONTAINER_NAME)
|
||||||
@@ -219,17 +214,6 @@ describe('Prepare job', () => {
|
|||||||
expect(content.context.services.length).toBe(1)
|
expect(content.context.services.length).toBe(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not throw exception using kube scheduler', async () => {
|
|
||||||
// only for ReadWriteMany volumes or single node cluster
|
|
||||||
process.env[ENV_USE_KUBE_SCHEDULER] = 'true'
|
|
||||||
|
|
||||||
await expect(
|
|
||||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
|
||||||
).resolves.not.toThrow()
|
|
||||||
|
|
||||||
delete process.env[ENV_USE_KUBE_SCHEDULER]
|
|
||||||
})
|
|
||||||
|
|
||||||
test.each([undefined, null, []])(
|
test.each([undefined, null, []])(
|
||||||
'should not throw exception when portMapping=%p',
|
'should not throw exception when portMapping=%p',
|
||||||
async pm => {
|
async pm => {
|
||||||
@@ -243,4 +227,20 @@ describe('Prepare job', () => {
|
|||||||
expect(() => content.context.services[0].image).not.toThrow()
|
expect(() => content.context.services[0].image).not.toThrow()
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
it('should prepare job with container with non-root user', async () => {
|
||||||
|
prepareJobData.args!.container!.image =
|
||||||
|
'ghcr.io/actions/actions-runner:latest' // known to use user 1001
|
||||||
|
await expect(
|
||||||
|
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||||
|
).resolves.not.toThrow()
|
||||||
|
|
||||||
|
const content = JSON.parse(
|
||||||
|
fs.readFileSync(prepareJobOutputFilePath).toString()
|
||||||
|
)
|
||||||
|
expect(content.state.jobPod).toBeTruthy()
|
||||||
|
expect(content.context.container.image).toBe(
|
||||||
|
'ghcr.io/actions/actions-runner:latest'
|
||||||
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { runContainerStep } from '../src/hooks'
|
import { prepareJob, runContainerStep } from '../src/hooks'
|
||||||
import { TestHelper } from './test-setup'
|
import { TestHelper } from './test-setup'
|
||||||
import { ENV_HOOK_TEMPLATE_PATH } from '../src/k8s/utils'
|
import { ENV_HOOK_TEMPLATE_PATH } from '../src/k8s/utils'
|
||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
@@ -10,11 +10,16 @@ jest.useRealTimers()
|
|||||||
let testHelper: TestHelper
|
let testHelper: TestHelper
|
||||||
|
|
||||||
let runContainerStepData: any
|
let runContainerStepData: any
|
||||||
|
let prepareJobData: any
|
||||||
|
let prepareJobOutputFilePath: string
|
||||||
|
|
||||||
describe('Run container step', () => {
|
describe('Run container step', () => {
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
testHelper = new TestHelper()
|
testHelper = new TestHelper()
|
||||||
await testHelper.initialize()
|
await testHelper.initialize()
|
||||||
|
prepareJobData = testHelper.getPrepareJobDefinition()
|
||||||
|
prepareJobOutputFilePath = testHelper.createFile('prepare-job-output.json')
|
||||||
|
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||||
runContainerStepData = testHelper.getRunContainerStepDefinition()
|
runContainerStepData = testHelper.getRunContainerStepDefinition()
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -22,11 +27,6 @@ describe('Run container step', () => {
|
|||||||
await testHelper.cleanup()
|
await testHelper.cleanup()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should not throw', async () => {
|
|
||||||
const exitCode = await runContainerStep(runContainerStepData.args)
|
|
||||||
expect(exitCode).toBe(0)
|
|
||||||
})
|
|
||||||
|
|
||||||
it('should run pod with extensions applied', async () => {
|
it('should run pod with extensions applied', async () => {
|
||||||
const extension = {
|
const extension = {
|
||||||
metadata: {
|
metadata: {
|
||||||
@@ -42,7 +42,7 @@ describe('Run container step', () => {
|
|||||||
{
|
{
|
||||||
name: JOB_CONTAINER_EXTENSION_NAME,
|
name: JOB_CONTAINER_EXTENSION_NAME,
|
||||||
command: ['sh'],
|
command: ['sh'],
|
||||||
args: ['-c', 'echo test']
|
args: ['-c', 'sleep 10000']
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'side-container',
|
name: 'side-container',
|
||||||
@@ -51,11 +51,7 @@ describe('Run container step', () => {
|
|||||||
args: ['-c', 'echo test']
|
args: ['-c', 'echo test']
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
restartPolicy: 'Never',
|
restartPolicy: 'Never'
|
||||||
securityContext: {
|
|
||||||
runAsUser: 1000,
|
|
||||||
runAsGroup: 3000
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import { cleanupJob, prepareJob, runScriptStep } from '../src/hooks'
|
import { cleanupJob, prepareJob, runScriptStep } from '../src/hooks'
|
||||||
import { TestHelper } from './test-setup'
|
import { TestHelper } from './test-setup'
|
||||||
|
import { PrepareJobArgs, RunScriptStepArgs } from 'hooklib'
|
||||||
|
|
||||||
jest.useRealTimers()
|
jest.useRealTimers()
|
||||||
|
|
||||||
@@ -8,7 +9,9 @@ let testHelper: TestHelper
|
|||||||
|
|
||||||
let prepareJobOutputData: any
|
let prepareJobOutputData: any
|
||||||
|
|
||||||
let runScriptStepDefinition
|
let runScriptStepDefinition: {
|
||||||
|
args: RunScriptStepArgs
|
||||||
|
}
|
||||||
|
|
||||||
describe('Run script step', () => {
|
describe('Run script step', () => {
|
||||||
beforeEach(async () => {
|
beforeEach(async () => {
|
||||||
@@ -19,9 +22,14 @@ describe('Run script step', () => {
|
|||||||
)
|
)
|
||||||
|
|
||||||
const prepareJobData = testHelper.getPrepareJobDefinition()
|
const prepareJobData = testHelper.getPrepareJobDefinition()
|
||||||
runScriptStepDefinition = testHelper.getRunScriptStepDefinition()
|
runScriptStepDefinition = testHelper.getRunScriptStepDefinition() as {
|
||||||
|
args: RunScriptStepArgs
|
||||||
|
}
|
||||||
|
|
||||||
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
await prepareJob(
|
||||||
|
prepareJobData.args as PrepareJobArgs,
|
||||||
|
prepareJobOutputFilePath
|
||||||
|
)
|
||||||
const outputContent = fs.readFileSync(prepareJobOutputFilePath)
|
const outputContent = fs.readFileSync(prepareJobOutputFilePath)
|
||||||
prepareJobOutputData = JSON.parse(outputContent.toString())
|
prepareJobOutputData = JSON.parse(outputContent.toString())
|
||||||
})
|
})
|
||||||
@@ -37,22 +45,14 @@ describe('Run script step', () => {
|
|||||||
|
|
||||||
it('should not throw an exception', async () => {
|
it('should not throw an exception', async () => {
|
||||||
await expect(
|
await expect(
|
||||||
runScriptStep(
|
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
|
||||||
runScriptStepDefinition.args,
|
|
||||||
prepareJobOutputData.state,
|
|
||||||
null
|
|
||||||
)
|
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should fail if the working directory does not exist', async () => {
|
it('should fail if the working directory does not exist', async () => {
|
||||||
runScriptStepDefinition.args.workingDirectory = '/foo/bar'
|
runScriptStepDefinition.args.workingDirectory = '/foo/bar'
|
||||||
await expect(
|
await expect(
|
||||||
runScriptStep(
|
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
|
||||||
runScriptStepDefinition.args,
|
|
||||||
prepareJobOutputData.state,
|
|
||||||
null
|
|
||||||
)
|
|
||||||
).rejects.toThrow()
|
).rejects.toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -64,16 +64,12 @@ describe('Run script step', () => {
|
|||||||
"'if [[ -z $NODE_ENV ]]; then exit 1; fi'"
|
"'if [[ -z $NODE_ENV ]]; then exit 1; fi'"
|
||||||
]
|
]
|
||||||
await expect(
|
await expect(
|
||||||
runScriptStep(
|
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
|
||||||
runScriptStepDefinition.args,
|
|
||||||
prepareJobOutputData.state,
|
|
||||||
null
|
|
||||||
)
|
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('Should have path variable changed in container with prepend path string', async () => {
|
it('Should have path variable changed in container with prepend path string', async () => {
|
||||||
runScriptStepDefinition.args.prependPath = '/some/path'
|
runScriptStepDefinition.args.prependPath = ['/some/path']
|
||||||
runScriptStepDefinition.args.entryPoint = '/bin/bash'
|
runScriptStepDefinition.args.entryPoint = '/bin/bash'
|
||||||
runScriptStepDefinition.args.entryPointArgs = [
|
runScriptStepDefinition.args.entryPointArgs = [
|
||||||
'-c',
|
'-c',
|
||||||
@@ -81,11 +77,7 @@ describe('Run script step', () => {
|
|||||||
]
|
]
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
runScriptStep(
|
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
|
||||||
runScriptStepDefinition.args,
|
|
||||||
prepareJobOutputData.state,
|
|
||||||
null
|
|
||||||
)
|
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -103,11 +95,7 @@ describe('Run script step', () => {
|
|||||||
]
|
]
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
runScriptStep(
|
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
|
||||||
runScriptStepDefinition.args,
|
|
||||||
prepareJobOutputData.state,
|
|
||||||
null
|
|
||||||
)
|
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -122,11 +110,7 @@ describe('Run script step', () => {
|
|||||||
]
|
]
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
runScriptStep(
|
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
|
||||||
runScriptStepDefinition.args,
|
|
||||||
prepareJobOutputData.state,
|
|
||||||
null
|
|
||||||
)
|
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -9,87 +9,97 @@ const kc = new k8s.KubeConfig()
|
|||||||
kc.loadFromDefault()
|
kc.loadFromDefault()
|
||||||
|
|
||||||
const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
|
const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
|
||||||
const k8sStorageApi = kc.makeApiClient(k8s.StorageV1Api)
|
|
||||||
|
|
||||||
export class TestHelper {
|
export class TestHelper {
|
||||||
private tempDirPath: string
|
private tempDirPath: string
|
||||||
private podName: string
|
private podName: string
|
||||||
|
private runnerWorkdir: string
|
||||||
|
private runnerTemp: string
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.tempDirPath = `${__dirname}/_temp/runner`
|
this.tempDirPath = `${__dirname}/_temp/runner`
|
||||||
|
this.runnerWorkdir = `${this.tempDirPath}/_work`
|
||||||
|
this.runnerTemp = `${this.tempDirPath}/_work/_temp`
|
||||||
this.podName = uuidv4().replace(/-/g, '')
|
this.podName = uuidv4().replace(/-/g, '')
|
||||||
}
|
}
|
||||||
|
|
||||||
public async initialize(): Promise<void> {
|
async initialize(): Promise<void> {
|
||||||
process.env['ACTIONS_RUNNER_POD_NAME'] = `${this.podName}`
|
process.env['ACTIONS_RUNNER_POD_NAME'] = `${this.podName}`
|
||||||
process.env['RUNNER_WORKSPACE'] = `${this.tempDirPath}/_work/repo`
|
process.env['RUNNER_WORKSPACE'] = `${this.runnerWorkdir}/repo`
|
||||||
process.env['RUNNER_TEMP'] = `${this.tempDirPath}/_work/_temp`
|
process.env['RUNNER_TEMP'] = `${this.runnerTemp}`
|
||||||
process.env['GITHUB_WORKSPACE'] = `${this.tempDirPath}/_work/repo/repo`
|
process.env['GITHUB_WORKSPACE'] = `${this.runnerWorkdir}/repo/repo`
|
||||||
process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE'] = 'default'
|
process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE'] = 'default'
|
||||||
|
|
||||||
fs.mkdirSync(`${this.tempDirPath}/_work/repo/repo`, { recursive: true })
|
fs.mkdirSync(`${this.runnerWorkdir}/repo/repo`, { recursive: true })
|
||||||
fs.mkdirSync(`${this.tempDirPath}/externals`, { recursive: true })
|
fs.mkdirSync(`${this.tempDirPath}/externals`, { recursive: true })
|
||||||
fs.mkdirSync(process.env.RUNNER_TEMP, { recursive: true })
|
fs.mkdirSync(this.runnerTemp, { recursive: true })
|
||||||
|
fs.mkdirSync(`${this.runnerTemp}/_github_workflow`, { recursive: true })
|
||||||
|
fs.mkdirSync(`${this.runnerTemp}/_github_home`, { recursive: true })
|
||||||
|
fs.mkdirSync(`${this.runnerTemp}/_runner_file_commands`, {
|
||||||
|
recursive: true
|
||||||
|
})
|
||||||
|
|
||||||
fs.copyFileSync(
|
fs.copyFileSync(
|
||||||
path.resolve(`${__dirname}/../../../examples/example-script.sh`),
|
path.resolve(`${__dirname}/../../../examples/example-script.sh`),
|
||||||
`${process.env.RUNNER_TEMP}/example-script.sh`
|
`${this.runnerTemp}/example-script.sh`
|
||||||
)
|
)
|
||||||
|
|
||||||
await this.cleanupK8sResources()
|
await this.cleanupK8sResources()
|
||||||
try {
|
try {
|
||||||
await this.createTestVolume()
|
|
||||||
await this.createTestJobPod()
|
await this.createTestJobPod()
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.log(e)
|
console.log(e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public async cleanup(): Promise<void> {
|
async cleanup(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
await this.cleanupK8sResources()
|
await this.cleanupK8sResources()
|
||||||
fs.rmSync(this.tempDirPath, { recursive: true })
|
fs.rmSync(this.tempDirPath, { recursive: true })
|
||||||
} catch {}
|
} catch {
|
||||||
|
// Ignore errors during cleanup
|
||||||
|
}
|
||||||
}
|
}
|
||||||
public async cleanupK8sResources() {
|
|
||||||
|
async cleanupK8sResources(): Promise<void> {
|
||||||
await k8sApi
|
await k8sApi
|
||||||
.deleteNamespacedPersistentVolumeClaim(
|
.deleteNamespacedPod({
|
||||||
`${this.podName}-work`,
|
name: this.podName,
|
||||||
'default',
|
namespace: 'default',
|
||||||
undefined,
|
gracePeriodSeconds: 0
|
||||||
undefined,
|
})
|
||||||
0
|
.catch((e: k8s.ApiException<any>) => {
|
||||||
)
|
if (e.code !== 404) {
|
||||||
.catch(e => {})
|
console.error(JSON.stringify(e))
|
||||||
await k8sApi.deletePersistentVolume(`${this.podName}-pv`).catch(e => {})
|
}
|
||||||
await k8sStorageApi.deleteStorageClass('local-storage').catch(e => {})
|
})
|
||||||
await k8sApi
|
await k8sApi
|
||||||
.deleteNamespacedPod(this.podName, 'default', undefined, undefined, 0)
|
.deleteNamespacedPod({
|
||||||
.catch(e => {})
|
name: `${this.podName}-workflow`,
|
||||||
await k8sApi
|
namespace: 'default',
|
||||||
.deleteNamespacedPod(
|
gracePeriodSeconds: 0
|
||||||
`${this.podName}-workflow`,
|
})
|
||||||
'default',
|
.catch((e: k8s.ApiException<any>) => {
|
||||||
undefined,
|
if (e.code !== 404) {
|
||||||
undefined,
|
console.error(JSON.stringify(e))
|
||||||
0
|
}
|
||||||
)
|
})
|
||||||
.catch(e => {})
|
|
||||||
}
|
}
|
||||||
public createFile(fileName?: string): string {
|
createFile(fileName?: string): string {
|
||||||
const filePath = `${this.tempDirPath}/${fileName || uuidv4()}`
|
const filePath = `${this.tempDirPath}/${fileName || uuidv4()}`
|
||||||
fs.writeFileSync(filePath, '')
|
fs.writeFileSync(filePath, '')
|
||||||
return filePath
|
return filePath
|
||||||
}
|
}
|
||||||
|
|
||||||
public removeFile(fileName: string): void {
|
removeFile(fileName: string): void {
|
||||||
const filePath = `${this.tempDirPath}/${fileName}`
|
const filePath = `${this.tempDirPath}/${fileName}`
|
||||||
fs.rmSync(filePath)
|
fs.rmSync(filePath)
|
||||||
}
|
}
|
||||||
|
|
||||||
public async createTestJobPod() {
|
async createTestJobPod(): Promise<void> {
|
||||||
const container = {
|
const container = {
|
||||||
name: 'nginx',
|
name: 'runner',
|
||||||
image: 'nginx:latest',
|
image: 'ghcr.io/actions/actions-runner:latest',
|
||||||
imagePullPolicy: 'IfNotPresent'
|
imagePullPolicy: 'IfNotPresent'
|
||||||
} as k8s.V1Container
|
} as k8s.V1Container
|
||||||
|
|
||||||
@@ -99,59 +109,18 @@ export class TestHelper {
|
|||||||
},
|
},
|
||||||
spec: {
|
spec: {
|
||||||
restartPolicy: 'Never',
|
restartPolicy: 'Never',
|
||||||
containers: [container]
|
containers: [container],
|
||||||
|
securityContext: {
|
||||||
|
runAsUser: 1001,
|
||||||
|
runAsGroup: 1001,
|
||||||
|
fsGroup: 1001
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} as k8s.V1Pod
|
} as k8s.V1Pod
|
||||||
await k8sApi.createNamespacedPod('default', pod)
|
await k8sApi.createNamespacedPod({ namespace: 'default', body: pod })
|
||||||
}
|
}
|
||||||
|
|
||||||
public async createTestVolume() {
|
getPrepareJobDefinition(): HookData {
|
||||||
var sc: k8s.V1StorageClass = {
|
|
||||||
metadata: {
|
|
||||||
name: 'local-storage'
|
|
||||||
},
|
|
||||||
provisioner: 'kubernetes.io/no-provisioner',
|
|
||||||
volumeBindingMode: 'Immediate'
|
|
||||||
}
|
|
||||||
await k8sStorageApi.createStorageClass(sc)
|
|
||||||
|
|
||||||
var volume: k8s.V1PersistentVolume = {
|
|
||||||
metadata: {
|
|
||||||
name: `${this.podName}-pv`
|
|
||||||
},
|
|
||||||
spec: {
|
|
||||||
storageClassName: 'local-storage',
|
|
||||||
capacity: {
|
|
||||||
storage: '2Gi'
|
|
||||||
},
|
|
||||||
volumeMode: 'Filesystem',
|
|
||||||
accessModes: ['ReadWriteOnce'],
|
|
||||||
hostPath: {
|
|
||||||
path: `${this.tempDirPath}/_work`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await k8sApi.createPersistentVolume(volume)
|
|
||||||
var volumeClaim: k8s.V1PersistentVolumeClaim = {
|
|
||||||
metadata: {
|
|
||||||
name: `${this.podName}-work`
|
|
||||||
},
|
|
||||||
spec: {
|
|
||||||
accessModes: ['ReadWriteOnce'],
|
|
||||||
volumeMode: 'Filesystem',
|
|
||||||
storageClassName: 'local-storage',
|
|
||||||
volumeName: `${this.podName}-pv`,
|
|
||||||
resources: {
|
|
||||||
requests: {
|
|
||||||
storage: '1Gi'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
await k8sApi.createNamespacedPersistentVolumeClaim('default', volumeClaim)
|
|
||||||
}
|
|
||||||
|
|
||||||
public getPrepareJobDefinition(): HookData {
|
|
||||||
const prepareJob = JSON.parse(
|
const prepareJob = JSON.parse(
|
||||||
fs.readFileSync(
|
fs.readFileSync(
|
||||||
path.resolve(__dirname + '/../../../examples/prepare-job.json'),
|
path.resolve(__dirname + '/../../../examples/prepare-job.json'),
|
||||||
@@ -168,7 +137,7 @@ export class TestHelper {
|
|||||||
return prepareJob
|
return prepareJob
|
||||||
}
|
}
|
||||||
|
|
||||||
public getRunScriptStepDefinition(): HookData {
|
getRunScriptStepDefinition(): HookData {
|
||||||
const runScriptStep = JSON.parse(
|
const runScriptStep = JSON.parse(
|
||||||
fs.readFileSync(
|
fs.readFileSync(
|
||||||
path.resolve(__dirname + '/../../../examples/run-script-step.json'),
|
path.resolve(__dirname + '/../../../examples/run-script-step.json'),
|
||||||
@@ -180,7 +149,7 @@ export class TestHelper {
|
|||||||
return runScriptStep
|
return runScriptStep
|
||||||
}
|
}
|
||||||
|
|
||||||
public getRunContainerStepDefinition(): HookData {
|
getRunContainerStepDefinition(): HookData {
|
||||||
const runContainerStep = JSON.parse(
|
const runContainerStep = JSON.parse(
|
||||||
fs.readFileSync(
|
fs.readFileSync(
|
||||||
path.resolve(__dirname + '/../../../examples/run-container-step.json'),
|
path.resolve(__dirname + '/../../../examples/run-container-step.json'),
|
||||||
|
|||||||
@@ -5,7 +5,8 @@
|
|||||||
"outDir": "./lib",
|
"outDir": "./lib",
|
||||||
"rootDir": "./src"
|
"rootDir": "./src"
|
||||||
},
|
},
|
||||||
|
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
|
||||||
"include": [
|
"include": [
|
||||||
"./src"
|
"src/**/*",
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
6
packages/k8s/tsconfig.test.json
Normal file
6
packages/k8s/tsconfig.test.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"allowJs": true
|
||||||
|
},
|
||||||
|
"extends": "./tsconfig.json"
|
||||||
|
}
|
||||||
@@ -1,24 +1,15 @@
|
|||||||
## Features
|
## Features
|
||||||
|
|
||||||
- k8s: Use pod affinity when KubeScheduler is enabled [#212]
|
- k8s: remove dependency on the runner's volume [#244]
|
||||||
- docker: support alternative network modes [#209]
|
|
||||||
|
|
||||||
## Bugs
|
## Bugs
|
||||||
|
|
||||||
- Expose CI=true and GITHUB_ACTIONS env variables [#215]
|
- docker: fix readOnly volumes in createContainer [#236]
|
||||||
- k8s: add /github/home to containerAction mounts and surface createSecretForEnvs errors [#198]
|
|
||||||
- k8s: start logging from the beginning [#184]
|
|
||||||
|
|
||||||
## Misc
|
## Misc
|
||||||
|
|
||||||
- Bump node in tests to node 22 since node14 is quite old [#216]
|
- bump all dependencies [#234] [#240] [#239] [#238]
|
||||||
- Bump jsonpath-plus from 10.1.0 to 10.3.0 in /packages/k8s [#213]
|
- bump actions [#254]
|
||||||
- Bump braces from 3.0.2 to 3.0.3 in /packages/hooklib [#194]
|
|
||||||
- Bump cross-spawn from 7.0.3 to 7.0.6 in /packages/k8s [#196]
|
|
||||||
- Bump ws from 7.5.8 to 7.5.10 in /packages/k8s [#192]
|
|
||||||
- Remove dependency on deprecated release actions [#193]
|
|
||||||
- Update to the latest available actions [#191]
|
|
||||||
|
|
||||||
|
|
||||||
## SHA-256 Checksums
|
## SHA-256 Checksums
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user