mirror of
https://github.com/actions/runner-container-hooks.git
synced 2025-12-16 09:46:43 +00:00
Compare commits
52 Commits
v0.6.0
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5cbb16af07 | ||
|
|
d171f9e4bc | ||
|
|
87f8ef60ea | ||
|
|
5f5708a2b8 | ||
|
|
f8e1cae677 | ||
|
|
996cc75daf | ||
|
|
adf5e34937 | ||
|
|
4041f8648c | ||
|
|
1f60eaf940 | ||
|
|
c3d8e2ab20 | ||
|
|
3f829eef9e | ||
|
|
011ffb284e | ||
|
|
0951cc73e4 | ||
|
|
15e808935c | ||
|
|
ad9cb43c31 | ||
|
|
2934de33f8 | ||
|
|
ea25fd1b3e | ||
|
|
c03a5fb3c1 | ||
|
|
96c35e7cc6 | ||
|
|
c67938c536 | ||
|
|
464be47642 | ||
|
|
74ce64c1d0 | ||
|
|
9a71a3a7e9 | ||
|
|
9a858922c8 | ||
|
|
605551ff1c | ||
|
|
878781f9c4 | ||
|
|
1e051b849b | ||
|
|
589414ea69 | ||
|
|
dd4f7dae2c | ||
|
|
7da5474a5d | ||
|
|
375992cd31 | ||
|
|
aae800a69b | ||
|
|
e47f9b8af4 | ||
|
|
54e14cb7f3 | ||
|
|
ef2229fc0b | ||
|
|
88dc98f8ef | ||
|
|
b388518d40 | ||
|
|
7afb8f9323 | ||
|
|
d4c5425b22 | ||
|
|
120636d3d7 | ||
|
|
5e805a0546 | ||
|
|
27bae0b2b7 | ||
|
|
8eed1ad1b6 | ||
|
|
7b404841b2 | ||
|
|
977d53963d | ||
|
|
77b40ac6df | ||
|
|
ee10d95fd4 | ||
|
|
73655d4639 | ||
|
|
ca4ea17d58 | ||
|
|
ed70e2f8e0 | ||
|
|
aeabaf144a | ||
|
|
8388a36f44 |
@@ -1,4 +0,0 @@
|
||||
dist/
|
||||
lib/
|
||||
node_modules/
|
||||
**/tests/**
|
||||
@@ -1,56 +0,0 @@
|
||||
{
|
||||
"plugins": ["@typescript-eslint"],
|
||||
"extends": ["plugin:github/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 9,
|
||||
"sourceType": "module",
|
||||
"project": "./tsconfig.json"
|
||||
},
|
||||
"rules": {
|
||||
"eslint-comments/no-use": "off",
|
||||
"import/no-namespace": "off",
|
||||
"no-constant-condition": "off",
|
||||
"no-unused-vars": "off",
|
||||
"i18n-text/no-en": "off",
|
||||
"@typescript-eslint/no-unused-vars": "error",
|
||||
"@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}],
|
||||
"@typescript-eslint/no-require-imports": "error",
|
||||
"@typescript-eslint/array-type": "error",
|
||||
"@typescript-eslint/await-thenable": "error",
|
||||
"camelcase": "off",
|
||||
"@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}],
|
||||
"@typescript-eslint/func-call-spacing": ["error", "never"],
|
||||
"@typescript-eslint/no-array-constructor": "error",
|
||||
"@typescript-eslint/no-empty-interface": "error",
|
||||
"@typescript-eslint/no-explicit-any": "warn",
|
||||
"@typescript-eslint/no-extraneous-class": "error",
|
||||
"@typescript-eslint/no-floating-promises": "error",
|
||||
"@typescript-eslint/no-for-in-array": "error",
|
||||
"@typescript-eslint/no-inferrable-types": "error",
|
||||
"@typescript-eslint/no-misused-new": "error",
|
||||
"@typescript-eslint/no-namespace": "error",
|
||||
"@typescript-eslint/no-non-null-assertion": "warn",
|
||||
"@typescript-eslint/no-unnecessary-qualifier": "error",
|
||||
"@typescript-eslint/no-unnecessary-type-assertion": "error",
|
||||
"@typescript-eslint/no-useless-constructor": "error",
|
||||
"@typescript-eslint/no-var-requires": "error",
|
||||
"@typescript-eslint/prefer-for-of": "warn",
|
||||
"@typescript-eslint/prefer-function-type": "warn",
|
||||
"@typescript-eslint/prefer-includes": "error",
|
||||
"@typescript-eslint/prefer-string-starts-ends-with": "error",
|
||||
"@typescript-eslint/promise-function-async": "error",
|
||||
"@typescript-eslint/require-array-sort-compare": "error",
|
||||
"@typescript-eslint/restrict-plus-operands": "error",
|
||||
"semi": "off",
|
||||
"@typescript-eslint/semi": ["error", "never"],
|
||||
"@typescript-eslint/type-annotation-spacing": "error",
|
||||
"@typescript-eslint/unbound-method": "error",
|
||||
"no-shadow": "off",
|
||||
"@typescript-eslint/no-shadow": ["error"]
|
||||
},
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
}
|
||||
}
|
||||
28
.github/dependabot.yml
vendored
Normal file
28
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
# Group updates into a single PR per workspace package
|
||||
- package-ecosystem: npm
|
||||
directory: "/packages/docker"
|
||||
schedule:
|
||||
interval: weekly
|
||||
groups:
|
||||
all-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
- package-ecosystem: npm
|
||||
directory: "/packages/hooklib"
|
||||
schedule:
|
||||
interval: weekly
|
||||
groups:
|
||||
all-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
- package-ecosystem: npm
|
||||
directory: "/packages/k8s"
|
||||
schedule:
|
||||
interval: weekly
|
||||
groups:
|
||||
all-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
51
.github/workflows/build.yaml
vendored
51
.github/workflows/build.yaml
vendored
@@ -6,14 +6,50 @@ on:
|
||||
paths-ignore:
|
||||
- '**.md'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
format-and-lint:
|
||||
name: Format & Lint Checks
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v5
|
||||
- run: npm install
|
||||
name: Install dependencies
|
||||
- run: npm run bootstrap
|
||||
name: Bootstrap the packages
|
||||
- run: npm run build-all
|
||||
name: Build packages
|
||||
- run: npm run format-check
|
||||
name: Check formatting
|
||||
- name: Check linter
|
||||
run: |
|
||||
npm run lint
|
||||
git diff --exit-code -- . ':!packages/k8s/tests/test-kind.yaml'
|
||||
|
||||
docker-tests:
|
||||
name: Docker Hook Tests
|
||||
runs-on: ubuntu-latest
|
||||
needs: format-and-lint
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- run: npm install
|
||||
name: Install dependencies
|
||||
- run: npm run bootstrap
|
||||
name: Bootstrap the packages
|
||||
- run: npm run build-all
|
||||
name: Build packages
|
||||
- name: Run Docker tests
|
||||
run: npm run test --prefix packages/docker
|
||||
|
||||
k8s-tests:
|
||||
name: Kubernetes Hook Tests
|
||||
runs-on: ubuntu-latest
|
||||
needs: format-and-lint
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- run: sed -i "s|{{PATHTOREPO}}|$(pwd)|" packages/k8s/tests/test-kind.yaml
|
||||
name: Setup kind cluster yaml config
|
||||
- uses: helm/kind-action@v1.2.0
|
||||
- uses: helm/kind-action@v1.12.0
|
||||
with:
|
||||
config: packages/k8s/tests/test-kind.yaml
|
||||
- run: npm install
|
||||
@@ -22,10 +58,5 @@ jobs:
|
||||
name: Bootstrap the packages
|
||||
- run: npm run build-all
|
||||
name: Build packages
|
||||
- run: npm run format-check
|
||||
- name: Check linter
|
||||
run: |
|
||||
npm run lint
|
||||
git diff --exit-code -- ':!packages/k8s/tests/test-kind.yaml'
|
||||
- name: Run tests
|
||||
run: npm run test
|
||||
- name: Run Kubernetes tests
|
||||
run: npm run test --prefix packages/k8s
|
||||
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@@ -38,11 +38,11 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v5
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
@@ -69,4 +69,4 @@ jobs:
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
|
||||
88
.github/workflows/release.yaml
vendored
88
.github/workflows/release.yaml
vendored
@@ -1,76 +1,70 @@
|
||||
name: CD - Release new version
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: npm install
|
||||
name: Install dependencies
|
||||
- run: npm run bootstrap
|
||||
name: Bootstrap the packages
|
||||
- run: npm run build-all
|
||||
name: Build packages
|
||||
- uses: actions/github-script@v6
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
|
||||
- name: Bootstrap the packages
|
||||
run: npm run bootstrap
|
||||
|
||||
- name: Build packages
|
||||
run: npm run build-all
|
||||
|
||||
- uses: actions/github-script@v8
|
||||
id: releaseVersion
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
result-encoding: string
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
const hookVersion = require('./package.json').version
|
||||
core.setOutput('version', hookVersion);
|
||||
return require('./package.json').version
|
||||
|
||||
- name: Zip up releases
|
||||
run: |
|
||||
zip -r -j actions-runner-hooks-docker-${{ steps.releaseVersion.outputs.version }}.zip packages/docker/dist
|
||||
zip -r -j actions-runner-hooks-k8s-${{ steps.releaseVersion.outputs.version }}.zip packages/k8s/dist
|
||||
zip -r -j actions-runner-hooks-docker-${{ steps.releaseVersion.outputs.result }}.zip packages/docker/dist
|
||||
zip -r -j actions-runner-hooks-k8s-${{ steps.releaseVersion.outputs.result }}.zip packages/k8s/dist
|
||||
|
||||
- name: Calculate SHA
|
||||
id: sha
|
||||
shell: bash
|
||||
run: |
|
||||
sha_docker=$(sha256sum actions-runner-hooks-docker-${{ steps.releaseVersion.outputs.version }}.zip | awk '{print $1}')
|
||||
sha_docker=$(sha256sum actions-runner-hooks-docker-${{ steps.releaseVersion.outputs.result }}.zip | awk '{print $1}')
|
||||
echo "Docker SHA: $sha_docker"
|
||||
echo "docker-sha=$sha_docker" >> $GITHUB_OUTPUT
|
||||
sha_k8s=$(sha256sum actions-runner-hooks-k8s-${{ steps.releaseVersion.outputs.version }}.zip | awk '{print $1}')
|
||||
sha_k8s=$(sha256sum actions-runner-hooks-k8s-${{ steps.releaseVersion.outputs.result }}.zip | awk '{print $1}')
|
||||
echo "K8s SHA: $sha_k8s"
|
||||
echo "k8s-sha=$sha_k8s" >> $GITHUB_OUTPUT
|
||||
- name: replace SHA
|
||||
|
||||
- name: Create release notes
|
||||
id: releaseNotes
|
||||
uses: actions/github-script@v6
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
var releaseNotes = fs.readFileSync('${{ github.workspace }}/releaseNotes.md', 'utf8').replace(/<HOOK_VERSION>/g, '${{ steps.releaseVersion.outputs.version }}')
|
||||
var releaseNotes = fs.readFileSync('${{ github.workspace }}/releaseNotes.md', 'utf8').replace(/<HOOK_VERSION>/g, '${{ steps.releaseVersion.outputs.result }}')
|
||||
releaseNotes = releaseNotes.replace(/<DOCKER_SHA>/g, '${{ steps.sha.outputs.docker-sha }}')
|
||||
releaseNotes = releaseNotes.replace(/<K8S_SHA>/g, '${{ steps.sha.outputs.k8s-sha }}')
|
||||
console.log(releaseNotes)
|
||||
core.setOutput('note', releaseNotes);
|
||||
- uses: actions/create-release@v1
|
||||
id: createRelease
|
||||
name: Create ${{ steps.releaseVersion.outputs.version }} Hook Release
|
||||
fs.writeFileSync('${{ github.workspace }}/finalReleaseNotes.md', releaseNotes);
|
||||
|
||||
- name: Create ${{ steps.releaseVersion.outputs.result }} Hook Release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: "v${{ steps.releaseVersion.outputs.version }}"
|
||||
release_name: "v${{ steps.releaseVersion.outputs.version }}"
|
||||
body: |
|
||||
${{ steps.releaseNotes.outputs.note }}
|
||||
- name: Upload K8s hooks
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-hooks-k8s-${{ steps.releaseVersion.outputs.version }}.zip
|
||||
asset_name: actions-runner-hooks-k8s-${{ steps.releaseVersion.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
- name: Upload docker hooks
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ steps.createRelease.outputs.upload_url }}
|
||||
asset_path: ${{ github.workspace }}/actions-runner-hooks-docker-${{ steps.releaseVersion.outputs.version }}.zip
|
||||
asset_name: actions-runner-hooks-docker-${{ steps.releaseVersion.outputs.version }}.zip
|
||||
asset_content_type: application/octet-stream
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
gh release create v${{ steps.releaseVersion.outputs.result }} \
|
||||
--title "v${{ steps.releaseVersion.outputs.result }}" \
|
||||
--repo ${{ github.repository }} \
|
||||
--notes-file ${{ github.workspace }}/finalReleaseNotes.md \
|
||||
--latest \
|
||||
${{ github.workspace }}/actions-runner-hooks-k8s-${{ steps.releaseVersion.outputs.result }}.zip \
|
||||
${{ github.workspace }}/actions-runner-hooks-docker-${{ steps.releaseVersion.outputs.result }}.zip
|
||||
@@ -1 +1 @@
|
||||
* @actions/actions-launch @actions/runner-akvelon
|
||||
* @actions/actions-compute @nikola-jokic
|
||||
|
||||
24
README.md
24
README.md
@@ -3,6 +3,24 @@ The Runner Container Hooks repo provides a set of packages that implement the co
|
||||
|
||||
More information on how to implement your own hooks can be found in the [adr](https://github.com/actions/runner/pull/1891). The `examples` folder provides example inputs for each hook.
|
||||
|
||||
### Note
|
||||
|
||||
Thank you for your interest in this GitHub action, however, right now we are not taking contributions.
|
||||
|
||||
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features we’re working on and what stage they’re in.
|
||||
|
||||
We are taking the following steps to better direct requests related to GitHub Actions, including:
|
||||
|
||||
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
|
||||
|
||||
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
|
||||
|
||||
3. Security Issues should be handled as per our [security.md](security.md)
|
||||
|
||||
We will still provide security updates for this project and fix major breaking changes during this time.
|
||||
|
||||
You are welcome to still raise bugs in this repo.
|
||||
|
||||
## Background
|
||||
|
||||
Three projects are included in the `packages` folder
|
||||
@@ -10,10 +28,6 @@ Three projects are included in the `packages` folder
|
||||
- docker: A hook implementation of the runner's docker implementation. More details can be found in the [readme](./packages/docker/README.md)
|
||||
- hooklib: a shared library which contains typescript definitions and utilities that the other projects consume
|
||||
|
||||
### Requirements
|
||||
|
||||
We welcome contributions. See [how to contribute to get started](./CONTRIBUTING.md).
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the terms of the MIT open source license. Please refer to [MIT](./LICENSE.md) for the full terms.
|
||||
@@ -28,4 +42,4 @@ Find a bug? Please file an issue in this repository using the issue templates.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
See our [Code of Conduct](./CODE_OF_CONDUCT.MD)
|
||||
See our [Code of Conduct](./CODE_OF_CONDUCT.MD)
|
||||
|
||||
122
eslint.config.js
Normal file
122
eslint.config.js
Normal file
@@ -0,0 +1,122 @@
|
||||
const eslint = require('@eslint/js');
|
||||
const tseslint = require('@typescript-eslint/eslint-plugin');
|
||||
const tsparser = require('@typescript-eslint/parser');
|
||||
const globals = require('globals');
|
||||
const pluginJest = require('eslint-plugin-jest');
|
||||
|
||||
module.exports = [
|
||||
eslint.configs.recommended,
|
||||
{
|
||||
files: ['**/*.ts'],
|
||||
languageOptions: {
|
||||
parser: tsparser,
|
||||
parserOptions: {
|
||||
ecmaVersion: 2018,
|
||||
sourceType: 'module',
|
||||
project: ['./tsconfig.json', './packages/*/tsconfig.json']
|
||||
},
|
||||
globals: {
|
||||
...globals.node,
|
||||
...globals.es6
|
||||
}
|
||||
},
|
||||
plugins: {
|
||||
'@typescript-eslint': tseslint,
|
||||
},
|
||||
rules: {
|
||||
// Disabled rules from original config
|
||||
'eslint-comments/no-use': 'off',
|
||||
'import/no-namespace': 'off',
|
||||
'no-constant-condition': 'off',
|
||||
'no-unused-vars': 'off',
|
||||
'i18n-text/no-en': 'off',
|
||||
'camelcase': 'off',
|
||||
'semi': 'off',
|
||||
'no-shadow': 'off',
|
||||
|
||||
// TypeScript ESLint rules
|
||||
'@typescript-eslint/no-unused-vars': 'error',
|
||||
'@typescript-eslint/explicit-member-accessibility': ['error', { accessibility: 'no-public' }],
|
||||
'@typescript-eslint/no-require-imports': 'error',
|
||||
'@typescript-eslint/array-type': 'error',
|
||||
'@typescript-eslint/await-thenable': 'error',
|
||||
'@typescript-eslint/explicit-function-return-type': ['error', { allowExpressions: true }],
|
||||
'@typescript-eslint/no-array-constructor': 'error',
|
||||
'@typescript-eslint/no-empty-interface': 'error',
|
||||
'@typescript-eslint/no-explicit-any': 'off', // Fixed: removed duplicate and kept only this one
|
||||
'@typescript-eslint/no-extraneous-class': 'error',
|
||||
'@typescript-eslint/no-floating-promises': 'error',
|
||||
'@typescript-eslint/no-for-in-array': 'error',
|
||||
'@typescript-eslint/no-inferrable-types': 'error',
|
||||
'@typescript-eslint/no-misused-new': 'error',
|
||||
'@typescript-eslint/no-namespace': 'error',
|
||||
'@typescript-eslint/no-non-null-assertion': 'warn',
|
||||
'@typescript-eslint/no-unnecessary-qualifier': 'error',
|
||||
'@typescript-eslint/no-unnecessary-type-assertion': 'error',
|
||||
'@typescript-eslint/no-useless-constructor': 'error',
|
||||
'@typescript-eslint/no-var-requires': 'error',
|
||||
'@typescript-eslint/prefer-for-of': 'warn',
|
||||
'@typescript-eslint/prefer-function-type': 'warn',
|
||||
'@typescript-eslint/prefer-includes': 'error',
|
||||
'@typescript-eslint/prefer-string-starts-ends-with': 'error',
|
||||
'@typescript-eslint/promise-function-async': 'error',
|
||||
'@typescript-eslint/require-array-sort-compare': 'error',
|
||||
'@typescript-eslint/restrict-plus-operands': 'error',
|
||||
'@typescript-eslint/unbound-method': 'error',
|
||||
'@typescript-eslint/no-shadow': ['error']
|
||||
}
|
||||
},
|
||||
{
|
||||
// Test files configuration - Fixed file pattern to match .ts files
|
||||
files: ['**/*test*.ts', '**/*spec*.ts', '**/tests/**/*.ts'],
|
||||
languageOptions: {
|
||||
parser: tsparser,
|
||||
parserOptions: {
|
||||
ecmaVersion: 2018,
|
||||
sourceType: 'module',
|
||||
project: ['./tsconfig.json', './packages/*/tsconfig.json']
|
||||
},
|
||||
globals: {
|
||||
...globals.node,
|
||||
...globals.es6,
|
||||
// Fixed Jest globals
|
||||
describe: 'readonly',
|
||||
it: 'readonly',
|
||||
test: 'readonly',
|
||||
expect: 'readonly',
|
||||
beforeEach: 'readonly',
|
||||
afterEach: 'readonly',
|
||||
beforeAll: 'readonly',
|
||||
afterAll: 'readonly',
|
||||
jest: 'readonly'
|
||||
}
|
||||
},
|
||||
plugins: {
|
||||
'@typescript-eslint': tseslint,
|
||||
jest: pluginJest
|
||||
},
|
||||
rules: {
|
||||
// Disable no-undef for test files since Jest globals are handled above
|
||||
'no-undef': 'off',
|
||||
// Relax some rules for test files
|
||||
'@typescript-eslint/no-explicit-any': 'off',
|
||||
'@typescript-eslint/no-non-null-assertion': 'off',
|
||||
'@typescript-eslint/explicit-function-return-type': 'off'
|
||||
}
|
||||
},
|
||||
{
|
||||
files: ['**/jest.config.js', '**/jest.setup.js'],
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.node,
|
||||
jest: 'readonly',
|
||||
module: 'writable'
|
||||
}
|
||||
},
|
||||
rules: {
|
||||
'@typescript-eslint/no-require-imports': 'off',
|
||||
'@typescript-eslint/no-var-requires': 'off',
|
||||
'import/no-commonjs': 'off'
|
||||
}
|
||||
}
|
||||
];
|
||||
@@ -4,9 +4,6 @@ metadata:
|
||||
labels:
|
||||
labeled-by: "extension"
|
||||
spec:
|
||||
securityContext:
|
||||
runAsUser: 1000
|
||||
runAsGroup: 3000
|
||||
restartPolicy: Never
|
||||
containers:
|
||||
- name: $job # overwrites job container
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
"state": {},
|
||||
"args": {
|
||||
"container": {
|
||||
"image": "node:14.16",
|
||||
"image": "node:22",
|
||||
"workingDirectory": "/__w/repo/repo",
|
||||
"createOptions": "--cpus 1",
|
||||
"environmentVariables": {
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
}
|
||||
},
|
||||
"args": {
|
||||
"image": "node:14.16",
|
||||
"image": "node:22",
|
||||
"dockerfile": null,
|
||||
"entryPointArgs": [
|
||||
"-e",
|
||||
|
||||
6221
package-lock.json
generated
6221
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
23
package.json
23
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "hooks",
|
||||
"version": "0.6.0",
|
||||
"version": "0.8.0",
|
||||
"description": "Three projects are included - k8s: a kubernetes hook implementation that spins up pods dynamically to run a job - docker: A hook implementation of the runner's docker implementation - A hook lib, which contains shared typescript definitions and utilities that the other packages consume",
|
||||
"main": "",
|
||||
"directories": {
|
||||
@@ -12,6 +12,7 @@
|
||||
"format": "prettier --write '**/*.ts'",
|
||||
"format-check": "prettier --check '**/*.ts'",
|
||||
"lint": "eslint packages/**/*.ts",
|
||||
"lint:fix": "eslint packages/**/*.ts --fix",
|
||||
"build-all": "npm run build --prefix packages/hooklib && npm run build --prefix packages/k8s && npm run build --prefix packages/docker"
|
||||
},
|
||||
"repository": {
|
||||
@@ -25,12 +26,18 @@
|
||||
},
|
||||
"homepage": "https://github.com/actions/runner-container-hooks#readme",
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.5.1",
|
||||
"@types/node": "^17.0.23",
|
||||
"@typescript-eslint/parser": "^5.18.0",
|
||||
"eslint": "^8.12.0",
|
||||
"eslint-plugin-github": "^4.3.6",
|
||||
"prettier": "^2.6.2",
|
||||
"typescript": "^4.6.3"
|
||||
"@eslint/js": "^9.31.0",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/node": "^24.0.14",
|
||||
"@typescript-eslint/eslint-plugin": "^8.37.0",
|
||||
"@typescript-eslint/parser": "^8.37.0",
|
||||
"eslint": "^9.31.0",
|
||||
"eslint-plugin-github": "^6.0.0",
|
||||
"globals": "^15.12.0",
|
||||
"prettier": "^3.6.2",
|
||||
"typescript": "^5.8.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"eslint-plugin-jest": "^29.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,26 @@
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
preset: 'ts-jest',
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/*-test.ts'],
|
||||
testRunner: 'jest-circus/runner',
|
||||
verbose: true,
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
'^.+\\.ts$': [
|
||||
'ts-jest',
|
||||
{
|
||||
tsconfig: 'tsconfig.test.json'
|
||||
}
|
||||
],
|
||||
// Transform ESM modules to CommonJS
|
||||
'^.+\\.(js|mjs)$': ['babel-jest', {
|
||||
presets: [['@babel/preset-env', { targets: { node: 'current' } }]]
|
||||
}]
|
||||
},
|
||||
setupFilesAfterEnv: ['./jest.setup.js'],
|
||||
verbose: true
|
||||
transformIgnorePatterns: [
|
||||
// Transform these ESM packages
|
||||
'node_modules/(?!(shlex|@kubernetes/client-node|openid-client|oauth4webapi|jose|uuid)/)'
|
||||
],
|
||||
setupFilesAfterEnv: ['./jest.setup.js']
|
||||
}
|
||||
|
||||
11172
packages/docker/package-lock.json
generated
11172
packages/docker/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -5,26 +5,31 @@
|
||||
"main": "lib/index.js",
|
||||
"scripts": {
|
||||
"test": "jest --runInBand",
|
||||
"build": "npx tsc && npx ncc build"
|
||||
"build": "npx tsc && npx ncc build",
|
||||
"format": "prettier --write '**/*.ts'",
|
||||
"format-check": "prettier --check '**/*.ts'",
|
||||
"lint": "eslint src/**/*.ts"
|
||||
},
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.9.1",
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"hooklib": "file:../hooklib",
|
||||
"shlex": "^2.1.2",
|
||||
"uuid": "^8.3.2"
|
||||
"shlex": "^3.0.0",
|
||||
"uuid": "^11.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.4.1",
|
||||
"@types/node": "^17.0.23",
|
||||
"@typescript-eslint/parser": "^5.18.0",
|
||||
"@vercel/ncc": "^0.33.4",
|
||||
"jest": "^27.5.1",
|
||||
"ts-jest": "^27.1.4",
|
||||
"ts-node": "^10.7.0",
|
||||
"tsconfig-paths": "^3.14.1",
|
||||
"typescript": "^4.6.3"
|
||||
"@babel/core": "^7.25.2",
|
||||
"@babel/preset-env": "^7.25.4",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/node": "^24.0.14",
|
||||
"@typescript-eslint/parser": "^8.37.0",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"jest": "^30.0.4",
|
||||
"ts-jest": "^29.4.0",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"typescript": "^5.8.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,18 +43,25 @@ export async function createContainer(
|
||||
|
||||
if (args.environmentVariables) {
|
||||
for (const [key] of Object.entries(args.environmentVariables)) {
|
||||
dockerArgs.push('-e')
|
||||
dockerArgs.push(key)
|
||||
dockerArgs.push('-e', key)
|
||||
}
|
||||
}
|
||||
|
||||
dockerArgs.push('-e', 'GITHUB_ACTIONS=true')
|
||||
// Use same behavior as the runner https://github.com/actions/runner/blob/27d9c886ab9a45e0013cb462529ac85d581f8c41/src/Runner.Worker/Container/DockerCommandManager.cs#L150
|
||||
if (!('CI' in (args.environmentVariables ?? {}))) {
|
||||
dockerArgs.push('-e', 'CI=true')
|
||||
}
|
||||
|
||||
const mountVolumes = [
|
||||
...(args.userMountVolumes || []),
|
||||
...(args.systemMountVolumes || [])
|
||||
]
|
||||
for (const mountVolume of mountVolumes) {
|
||||
dockerArgs.push(
|
||||
`-v=${mountVolume.sourceVolumePath}:${mountVolume.targetVolumePath}`
|
||||
`-v=${mountVolume.sourceVolumePath}:${mountVolume.targetVolumePath}${
|
||||
mountVolume.readOnly ? ':ro' : ''
|
||||
}`
|
||||
)
|
||||
}
|
||||
if (args.entryPoint) {
|
||||
@@ -403,11 +410,16 @@ export async function containerRun(
|
||||
}
|
||||
if (args.environmentVariables) {
|
||||
for (const [key] of Object.entries(args.environmentVariables)) {
|
||||
dockerArgs.push('-e')
|
||||
dockerArgs.push(key)
|
||||
dockerArgs.push('-e', key)
|
||||
}
|
||||
}
|
||||
|
||||
dockerArgs.push('-e', 'GITHUB_ACTIONS=true')
|
||||
// Use same behavior as the runner https://github.com/actions/runner/blob/27d9c886ab9a45e0013cb462529ac85d581f8c41/src/Runner.Worker/Container/DockerCommandManager.cs#L150
|
||||
if (!('CI' in (args.environmentVariables ?? {}))) {
|
||||
dockerArgs.push('-e', 'CI=true')
|
||||
}
|
||||
|
||||
const mountVolumes = [
|
||||
...(args.userMountVolumes || []),
|
||||
...(args.systemMountVolumes || [])
|
||||
|
||||
@@ -31,9 +31,13 @@ export async function prepareJob(
|
||||
core.info('No containers exist, skipping hook invocation')
|
||||
exit(0)
|
||||
}
|
||||
const networkName = generateNetworkName()
|
||||
// Create network
|
||||
await networkCreate(networkName)
|
||||
|
||||
let networkName = process.env.ACTIONS_RUNNER_NETWORK_DRIVER
|
||||
if (!networkName) {
|
||||
networkName = generateNetworkName()
|
||||
// Create network
|
||||
await networkCreate(networkName)
|
||||
}
|
||||
|
||||
// Create Job Container
|
||||
let containerMetadata: ContainerMetadata | undefined = undefined
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
/* eslint-disable @typescript-eslint/no-var-requires */
|
||||
/* eslint-disable @typescript-eslint/no-require-imports */
|
||||
/* eslint-disable import/no-commonjs */
|
||||
import * as core from '@actions/core'
|
||||
import { env } from 'process'
|
||||
// Import this way otherwise typescript has errors
|
||||
|
||||
@@ -75,4 +75,22 @@ describe('run script step', () => {
|
||||
runScriptStep(definitions.runScriptStep.args, prepareJobResponse.state)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
|
||||
it('Should confirm that CI and GITHUB_ACTIONS are set', async () => {
|
||||
definitions.runScriptStep.args.entryPoint = '/bin/bash'
|
||||
definitions.runScriptStep.args.entryPointArgs = [
|
||||
'-c',
|
||||
`'if [[ ! $(env | grep "^CI=") = "CI=true" ]]; then exit 1; fi'`
|
||||
]
|
||||
await expect(
|
||||
runScriptStep(definitions.runScriptStep.args, prepareJobResponse.state)
|
||||
).resolves.not.toThrow()
|
||||
definitions.runScriptStep.args.entryPointArgs = [
|
||||
'-c',
|
||||
`'if [[ ! $(env | grep "^GITHUB_ACTIONS=") = "GITHUB_ACTIONS=true" ]]; then exit 1; fi'`
|
||||
]
|
||||
await expect(
|
||||
runScriptStep(definitions.runScriptStep.args, prepareJobResponse.state)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -31,7 +31,7 @@ export default class TestSetup {
|
||||
private get allTestDirectories() {
|
||||
const resp = [this.testdir, this.runnerMockDir, this.runnerOutputDir]
|
||||
|
||||
for (const [key, value] of Object.entries(this.runnerMockSubdirs)) {
|
||||
for (const [, value] of Object.entries(this.runnerMockSubdirs)) {
|
||||
resp.push(`${this.runnerMockDir}/${value}`)
|
||||
}
|
||||
|
||||
@@ -42,12 +42,11 @@ export default class TestSetup {
|
||||
return resp
|
||||
}
|
||||
|
||||
public initialize(): void {
|
||||
initialize(): void {
|
||||
env['GITHUB_WORKSPACE'] = this.workingDirectory
|
||||
env['RUNNER_NAME'] = 'test'
|
||||
env[
|
||||
'RUNNER_TEMP'
|
||||
] = `${this.runnerMockDir}/${this.runnerMockSubdirs.workTemp}`
|
||||
env['RUNNER_TEMP'] =
|
||||
`${this.runnerMockDir}/${this.runnerMockSubdirs.workTemp}`
|
||||
|
||||
for (const dir of this.allTestDirectories) {
|
||||
fs.mkdirSync(dir, { recursive: true })
|
||||
@@ -59,7 +58,7 @@ export default class TestSetup {
|
||||
)
|
||||
}
|
||||
|
||||
public teardown(): void {
|
||||
teardown(): void {
|
||||
fs.rmdirSync(this.testdir, { recursive: true })
|
||||
}
|
||||
|
||||
@@ -108,21 +107,21 @@ export default class TestSetup {
|
||||
]
|
||||
}
|
||||
|
||||
public createOutputFile(name: string): string {
|
||||
createOutputFile(name: string): string {
|
||||
let filePath = path.join(this.runnerOutputDir, name || `${uuidv4()}.json`)
|
||||
fs.writeFileSync(filePath, '')
|
||||
return filePath
|
||||
}
|
||||
|
||||
public get workingDirectory(): string {
|
||||
get workingDirectory(): string {
|
||||
return `${this.runnerMockDir}/_work/${this.projectName}/${this.projectName}`
|
||||
}
|
||||
|
||||
public get containerWorkingDirectory(): string {
|
||||
get containerWorkingDirectory(): string {
|
||||
return `/__w/${this.projectName}/${this.projectName}`
|
||||
}
|
||||
|
||||
public initializeDockerAction(): string {
|
||||
initializeDockerAction(): string {
|
||||
const actionPath = `${this.testdir}/_actions/example-handle/example-repo/example-branch/mock-directory`
|
||||
fs.mkdirSync(actionPath, { recursive: true })
|
||||
this.writeDockerfile(actionPath)
|
||||
@@ -147,7 +146,7 @@ echo "::set-output name=time::$time"`
|
||||
fs.chmodSync(entryPointPath, 0o755)
|
||||
}
|
||||
|
||||
public getPrepareJobDefinition(): HookData {
|
||||
getPrepareJobDefinition(): HookData {
|
||||
const prepareJob = JSON.parse(
|
||||
fs.readFileSync(
|
||||
path.resolve(__dirname + '/../../../examples/prepare-job.json'),
|
||||
@@ -166,7 +165,7 @@ echo "::set-output name=time::$time"`
|
||||
return prepareJob
|
||||
}
|
||||
|
||||
public getRunScriptStepDefinition(): HookData {
|
||||
getRunScriptStepDefinition(): HookData {
|
||||
const runScriptStep = JSON.parse(
|
||||
fs.readFileSync(
|
||||
path.resolve(__dirname + '/../../../examples/run-script-step.json'),
|
||||
@@ -178,7 +177,7 @@ echo "::set-output name=time::$time"`
|
||||
return runScriptStep
|
||||
}
|
||||
|
||||
public getRunContainerStepDefinition(): HookData {
|
||||
getRunContainerStepDefinition(): HookData {
|
||||
const runContainerStep = JSON.parse(
|
||||
fs.readFileSync(
|
||||
path.resolve(__dirname + '/../../../examples/run-container-step.json'),
|
||||
|
||||
6
packages/docker/tsconfig.test.json
Normal file
6
packages/docker/tsconfig.test.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"allowJs": true
|
||||
},
|
||||
"extends": "./tsconfig.json"
|
||||
}
|
||||
5443
packages/hooklib/package-lock.json
generated
5443
packages/hooklib/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,7 @@
|
||||
"version": "0.1.0",
|
||||
"description": "",
|
||||
"main": "lib/index.js",
|
||||
"types": "index.d.ts",
|
||||
"types": "lib/index.d.ts",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"build": "tsc",
|
||||
@@ -14,15 +14,14 @@
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@types/node": "^17.0.23",
|
||||
"@typescript-eslint/parser": "^5.18.0",
|
||||
"@types/node": "^25.0.2",
|
||||
"@zeit/ncc": "^0.22.3",
|
||||
"eslint": "^8.12.0",
|
||||
"eslint-plugin-github": "^4.3.6",
|
||||
"prettier": "^2.6.2",
|
||||
"typescript": "^4.6.3"
|
||||
"eslint": "^9.39.2",
|
||||
"eslint-plugin-github": "^6.0.0",
|
||||
"prettier": "^3.7.4",
|
||||
"typescript": "^5.8.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.9.1"
|
||||
"@actions/core": "^2.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,9 +22,6 @@ rules:
|
||||
- apiGroups: [""]
|
||||
resources: ["pods/log"]
|
||||
verbs: ["get", "list", "watch",]
|
||||
- apiGroups: ["batch"]
|
||||
resources: ["jobs"]
|
||||
verbs: ["get", "list", "create", "delete"]
|
||||
- apiGroups: [""]
|
||||
resources: ["secrets"]
|
||||
verbs: ["get", "list", "create", "delete"]
|
||||
@@ -43,3 +40,5 @@ rules:
|
||||
- Building container actions from a dockerfile is not supported at this time
|
||||
- Container actions will not have access to the services network or job container network
|
||||
- Docker [create options](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idcontaineroptions) are not supported
|
||||
- Container actions will have to specify the entrypoint, since the default entrypoint will be overridden to run the commands from the workflow.
|
||||
- Container actions need to have the following binaries in their container image: `sh`, `env`, `tail`.
|
||||
|
||||
@@ -1,13 +1,26 @@
|
||||
// eslint-disable-next-line import/no-commonjs
|
||||
module.exports = {
|
||||
clearMocks: true,
|
||||
preset: 'ts-jest',
|
||||
moduleFileExtensions: ['js', 'ts'],
|
||||
testEnvironment: 'node',
|
||||
testMatch: ['**/*-test.ts'],
|
||||
testRunner: 'jest-circus/runner',
|
||||
verbose: true,
|
||||
transform: {
|
||||
'^.+\\.ts$': 'ts-jest'
|
||||
'^.+\\.ts$': [
|
||||
'ts-jest',
|
||||
{
|
||||
tsconfig: 'tsconfig.test.json'
|
||||
}
|
||||
],
|
||||
// Transform ESM modules to CommonJS
|
||||
'^.+\\.(js|mjs)$': ['babel-jest', {
|
||||
presets: [['@babel/preset-env', { targets: { node: 'current' } }]]
|
||||
}]
|
||||
},
|
||||
setupFilesAfterEnv: ['./jest.setup.js'],
|
||||
verbose: true
|
||||
transformIgnorePatterns: [
|
||||
// Transform these ESM packages
|
||||
'node_modules/(?!(shlex|@kubernetes/client-node|openid-client|oauth4webapi|jose|uuid)/)'
|
||||
],
|
||||
setupFilesAfterEnv: ['./jest.setup.js']
|
||||
}
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
// eslint-disable-next-line filenames/match-regex, no-undef
|
||||
jest.setTimeout(500000)
|
||||
|
||||
10613
packages/k8s/package-lock.json
generated
10613
packages/k8s/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -13,20 +13,25 @@
|
||||
"author": "",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/core": "^1.9.1",
|
||||
"@actions/core": "^1.11.1",
|
||||
"@actions/exec": "^1.1.1",
|
||||
"@actions/io": "^1.1.2",
|
||||
"@kubernetes/client-node": "^0.18.1",
|
||||
"@actions/io": "^1.1.3",
|
||||
"@kubernetes/client-node": "^1.3.0",
|
||||
"hooklib": "file:../hooklib",
|
||||
"js-yaml": "^4.1.0",
|
||||
"shlex": "^2.1.2"
|
||||
"shlex": "^3.0.0",
|
||||
"tar-fs": "^3.1.0",
|
||||
"uuid": "^11.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "^27.4.1",
|
||||
"@types/node": "^17.0.23",
|
||||
"@vercel/ncc": "^0.33.4",
|
||||
"jest": "^27.5.1",
|
||||
"ts-jest": "^27.1.4",
|
||||
"typescript": "^4.6.3"
|
||||
"@babel/core": "^7.28.3",
|
||||
"@babel/preset-env": "^7.28.3",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/node": "^24.3.0",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"babel-jest": "^30.1.1",
|
||||
"jest": "^30.1.1",
|
||||
"ts-jest": "^29.4.1",
|
||||
"typescript": "^5.9.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,32 +1,39 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as io from '@actions/io'
|
||||
import * as k8s from '@kubernetes/client-node'
|
||||
import {
|
||||
JobContainerInfo,
|
||||
ContextPorts,
|
||||
PrepareJobArgs,
|
||||
writeToResponseFile
|
||||
writeToResponseFile,
|
||||
ServiceContainerInfo
|
||||
} from 'hooklib'
|
||||
import path from 'path'
|
||||
import {
|
||||
containerPorts,
|
||||
createPod,
|
||||
createJobPod,
|
||||
isPodContainerAlpine,
|
||||
prunePods,
|
||||
waitForPodPhases,
|
||||
getPrepareJobTimeoutSeconds
|
||||
getPrepareJobTimeoutSeconds,
|
||||
execCpToPod,
|
||||
execPodStep
|
||||
} from '../k8s'
|
||||
import {
|
||||
containerVolumes,
|
||||
CONTAINER_VOLUMES,
|
||||
DEFAULT_CONTAINER_ENTRY_POINT,
|
||||
DEFAULT_CONTAINER_ENTRY_POINT_ARGS,
|
||||
generateContainerName,
|
||||
mergeContainerWithOptions,
|
||||
readExtensionFromFile,
|
||||
PodPhase,
|
||||
fixArgs
|
||||
fixArgs,
|
||||
prepareJobScript
|
||||
} from '../k8s/utils'
|
||||
import { CONTAINER_EXTENSION_PREFIX, JOB_CONTAINER_NAME } from './constants'
|
||||
import {
|
||||
CONTAINER_EXTENSION_PREFIX,
|
||||
getJobPodName,
|
||||
JOB_CONTAINER_NAME
|
||||
} from './constants'
|
||||
import { dirname } from 'path'
|
||||
|
||||
export async function prepareJob(
|
||||
args: PrepareJobArgs,
|
||||
@@ -39,11 +46,9 @@ export async function prepareJob(
|
||||
await prunePods()
|
||||
|
||||
const extension = readExtensionFromFile()
|
||||
await copyExternalsToRoot()
|
||||
|
||||
let container: k8s.V1Container | undefined = undefined
|
||||
if (args.container?.image) {
|
||||
core.debug(`Using image '${args.container.image}' for job image`)
|
||||
container = createContainerSpec(
|
||||
args.container,
|
||||
JOB_CONTAINER_NAME,
|
||||
@@ -55,7 +60,6 @@ export async function prepareJob(
|
||||
let services: k8s.V1Container[] = []
|
||||
if (args.services?.length) {
|
||||
services = args.services.map(service => {
|
||||
core.debug(`Adding service '${service.image}' to pod definition`)
|
||||
return createContainerSpec(
|
||||
service,
|
||||
generateContainerName(service.image),
|
||||
@@ -71,7 +75,8 @@ export async function prepareJob(
|
||||
|
||||
let createdPod: k8s.V1Pod | undefined = undefined
|
||||
try {
|
||||
createdPod = await createPod(
|
||||
createdPod = await createJobPod(
|
||||
getJobPodName(),
|
||||
container,
|
||||
services,
|
||||
args.container.registry,
|
||||
@@ -91,6 +96,13 @@ export async function prepareJob(
|
||||
`Job pod created, waiting for it to come online ${createdPod?.metadata?.name}`
|
||||
)
|
||||
|
||||
const runnerWorkspace = dirname(process.env.RUNNER_WORKSPACE as string)
|
||||
|
||||
let prepareScript: { containerPath: string; runnerPath: string } | undefined
|
||||
if (args.container?.userMountVolumes?.length) {
|
||||
prepareScript = prepareJobScript(args.container.userMountVolumes || [])
|
||||
}
|
||||
|
||||
try {
|
||||
await waitForPodPhases(
|
||||
createdPod.metadata.name,
|
||||
@@ -103,6 +115,28 @@ export async function prepareJob(
|
||||
throw new Error(`pod failed to come online with error: ${err}`)
|
||||
}
|
||||
|
||||
await execCpToPod(createdPod.metadata.name, runnerWorkspace, '/__w')
|
||||
|
||||
if (prepareScript) {
|
||||
await execPodStep(
|
||||
['sh', '-e', prepareScript.containerPath],
|
||||
createdPod.metadata.name,
|
||||
JOB_CONTAINER_NAME
|
||||
)
|
||||
|
||||
const promises: Promise<void>[] = []
|
||||
for (const vol of args?.container?.userMountVolumes || []) {
|
||||
promises.push(
|
||||
execCpToPod(
|
||||
createdPod.metadata.name,
|
||||
vol.sourceVolumePath,
|
||||
vol.targetVolumePath
|
||||
)
|
||||
)
|
||||
}
|
||||
await Promise.all(promises)
|
||||
}
|
||||
|
||||
core.debug('Job pod is ready for traffic')
|
||||
|
||||
let isAlpine = false
|
||||
@@ -119,13 +153,14 @@ export async function prepareJob(
|
||||
throw new Error(`failed to determine if the pod is alpine: ${message}`)
|
||||
}
|
||||
core.debug(`Setting isAlpine to ${isAlpine}`)
|
||||
generateResponseFile(responseFile, createdPod, isAlpine)
|
||||
generateResponseFile(responseFile, args, createdPod, isAlpine)
|
||||
}
|
||||
|
||||
function generateResponseFile(
|
||||
responseFile: string,
|
||||
args: PrepareJobArgs,
|
||||
appPod: k8s.V1Pod,
|
||||
isAlpine
|
||||
isAlpine: boolean
|
||||
): void {
|
||||
if (!appPod.metadata?.name) {
|
||||
throw new Error('app pod must have metadata.name specified')
|
||||
@@ -156,40 +191,34 @@ function generateResponseFile(
|
||||
}
|
||||
}
|
||||
|
||||
const serviceContainers = appPod.spec?.containers.filter(
|
||||
c => c.name !== JOB_CONTAINER_NAME
|
||||
)
|
||||
if (serviceContainers?.length) {
|
||||
response.context['services'] = serviceContainers.map(c => {
|
||||
const ctxPorts: ContextPorts = {}
|
||||
if (c.ports?.length) {
|
||||
for (const port of c.ports) {
|
||||
ctxPorts[port.containerPort] = port.hostPort
|
||||
}
|
||||
}
|
||||
if (args.services?.length) {
|
||||
const serviceContainerNames =
|
||||
args.services?.map(s => generateContainerName(s.image)) || []
|
||||
|
||||
return {
|
||||
image: c.image,
|
||||
ports: ctxPorts
|
||||
}
|
||||
})
|
||||
response.context['services'] = appPod?.spec?.containers
|
||||
?.filter(c => serviceContainerNames.includes(c.name))
|
||||
.map(c => {
|
||||
const ctxPorts: ContextPorts = {}
|
||||
if (c.ports?.length) {
|
||||
for (const port of c.ports) {
|
||||
if (port.containerPort && port.hostPort) {
|
||||
ctxPorts[port.containerPort.toString()] = port.hostPort.toString()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
image: c.image,
|
||||
ports: ctxPorts
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
writeToResponseFile(responseFile, JSON.stringify(response))
|
||||
}
|
||||
|
||||
async function copyExternalsToRoot(): Promise<void> {
|
||||
const workspace = process.env['RUNNER_WORKSPACE']
|
||||
if (workspace) {
|
||||
await io.cp(
|
||||
path.join(workspace, '../../externals'),
|
||||
path.join(workspace, '../externals'),
|
||||
{ force: true, recursive: true, copySourceDirectory: false }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export function createContainerSpec(
|
||||
container: JobContainerInfo,
|
||||
container: JobContainerInfo | ServiceContainerInfo,
|
||||
name: string,
|
||||
jobContainer = false,
|
||||
extension?: k8s.V1PodTemplateSpec
|
||||
@@ -204,31 +233,40 @@ export function createContainerSpec(
|
||||
image: container.image,
|
||||
ports: containerPorts(container)
|
||||
} as k8s.V1Container
|
||||
if (container.workingDirectory) {
|
||||
podContainer.workingDir = container.workingDirectory
|
||||
if (container['workingDirectory']) {
|
||||
podContainer.workingDir = container['workingDirectory']
|
||||
}
|
||||
|
||||
if (container.entryPoint) {
|
||||
podContainer.command = [container.entryPoint]
|
||||
}
|
||||
|
||||
if (container.entryPointArgs?.length > 0) {
|
||||
if (container.entryPointArgs && container.entryPointArgs.length > 0) {
|
||||
podContainer.args = fixArgs(container.entryPointArgs)
|
||||
}
|
||||
|
||||
podContainer.env = []
|
||||
for (const [key, value] of Object.entries(
|
||||
container['environmentVariables']
|
||||
container['environmentVariables'] || {}
|
||||
)) {
|
||||
if (value && key !== 'HOME') {
|
||||
podContainer.env.push({ name: key, value: value as string })
|
||||
podContainer.env.push({ name: key, value })
|
||||
}
|
||||
}
|
||||
|
||||
podContainer.volumeMounts = containerVolumes(
|
||||
container.userMountVolumes,
|
||||
jobContainer
|
||||
)
|
||||
podContainer.env.push({
|
||||
name: 'GITHUB_ACTIONS',
|
||||
value: 'true'
|
||||
})
|
||||
|
||||
if (!('CI' in (container['environmentVariables'] || {}))) {
|
||||
podContainer.env.push({
|
||||
name: 'CI',
|
||||
value: 'true'
|
||||
})
|
||||
}
|
||||
|
||||
podContainer.volumeMounts = CONTAINER_VOLUMES
|
||||
|
||||
if (!extension) {
|
||||
return podContainer
|
||||
|
||||
@@ -1,23 +1,31 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as fs from 'fs'
|
||||
import * as k8s from '@kubernetes/client-node'
|
||||
import { RunContainerStepArgs } from 'hooklib'
|
||||
import { dirname } from 'path'
|
||||
import {
|
||||
createJob,
|
||||
createSecretForEnvs,
|
||||
getContainerJobPodName,
|
||||
getPodLogs,
|
||||
getPodStatus,
|
||||
waitForJobToComplete,
|
||||
createContainerStepPod,
|
||||
deletePod,
|
||||
execCpFromPod,
|
||||
execCpToPod,
|
||||
execPodStep,
|
||||
getPrepareJobTimeoutSeconds,
|
||||
waitForPodPhases
|
||||
} from '../k8s'
|
||||
import {
|
||||
containerVolumes,
|
||||
fixArgs,
|
||||
CONTAINER_VOLUMES,
|
||||
mergeContainerWithOptions,
|
||||
PodPhase,
|
||||
readExtensionFromFile
|
||||
readExtensionFromFile,
|
||||
DEFAULT_CONTAINER_ENTRY_POINT_ARGS,
|
||||
writeContainerStepScript
|
||||
} from '../k8s/utils'
|
||||
import { JOB_CONTAINER_EXTENSION_NAME, JOB_CONTAINER_NAME } from './constants'
|
||||
import {
|
||||
getJobPodName,
|
||||
getStepPodName,
|
||||
JOB_CONTAINER_EXTENSION_NAME,
|
||||
JOB_CONTAINER_NAME
|
||||
} from './constants'
|
||||
|
||||
export async function runContainerStep(
|
||||
stepContainer: RunContainerStepArgs
|
||||
@@ -26,106 +34,109 @@ export async function runContainerStep(
|
||||
throw new Error('Building container actions is not currently supported')
|
||||
}
|
||||
|
||||
let secretName: string | undefined = undefined
|
||||
if (stepContainer.environmentVariables) {
|
||||
secretName = await createSecretForEnvs(stepContainer.environmentVariables)
|
||||
if (!stepContainer.entryPoint) {
|
||||
throw new Error(
|
||||
'failed to start the container since the entrypoint is overwritten'
|
||||
)
|
||||
}
|
||||
|
||||
const envs = stepContainer.environmentVariables || {}
|
||||
envs['GITHUB_ACTIONS'] = 'true'
|
||||
if (!('CI' in envs)) {
|
||||
envs.CI = 'true'
|
||||
}
|
||||
|
||||
const extension = readExtensionFromFile()
|
||||
|
||||
core.debug(`Created secret ${secretName} for container job envs`)
|
||||
const container = createContainerSpec(stepContainer, secretName, extension)
|
||||
const container = createContainerSpec(stepContainer, extension)
|
||||
|
||||
let job: k8s.V1Job
|
||||
let pod: k8s.V1Pod
|
||||
try {
|
||||
job = await createJob(container, extension)
|
||||
pod = await createContainerStepPod(getStepPodName(), container, extension)
|
||||
} catch (err) {
|
||||
core.debug(`createJob failed: ${JSON.stringify(err)}`)
|
||||
const message = (err as any)?.response?.body?.message || err
|
||||
throw new Error(`failed to run script step: ${message}`)
|
||||
}
|
||||
|
||||
if (!job.metadata?.name) {
|
||||
if (!pod.metadata?.name) {
|
||||
throw new Error(
|
||||
`Expected job ${JSON.stringify(
|
||||
job
|
||||
pod
|
||||
)} to have correctly set the metadata.name`
|
||||
)
|
||||
}
|
||||
core.debug(`Job created, waiting for pod to start: ${job.metadata?.name}`)
|
||||
const podName = pod.metadata.name
|
||||
|
||||
let podName: string
|
||||
try {
|
||||
podName = await getContainerJobPodName(job.metadata.name)
|
||||
} catch (err) {
|
||||
core.debug(`getContainerJobPodName failed: ${JSON.stringify(err)}`)
|
||||
const message = (err as any)?.response?.body?.message || err
|
||||
throw new Error(`failed to get container job pod name: ${message}`)
|
||||
}
|
||||
|
||||
await waitForPodPhases(
|
||||
podName,
|
||||
new Set([
|
||||
PodPhase.COMPLETED,
|
||||
PodPhase.RUNNING,
|
||||
PodPhase.SUCCEEDED,
|
||||
PodPhase.FAILED
|
||||
]),
|
||||
new Set([PodPhase.PENDING, PodPhase.UNKNOWN])
|
||||
)
|
||||
core.debug('Container step is running or complete, pulling logs')
|
||||
|
||||
await getPodLogs(podName, JOB_CONTAINER_NAME)
|
||||
|
||||
core.debug('Waiting for container job to complete')
|
||||
await waitForJobToComplete(job.metadata.name)
|
||||
|
||||
// pod has failed so pull the status code from the container
|
||||
const status = await getPodStatus(podName)
|
||||
if (status?.phase === 'Succeeded') {
|
||||
return 0
|
||||
}
|
||||
if (!status?.containerStatuses?.length) {
|
||||
core.error(
|
||||
`Can't determine container status from response: ${JSON.stringify(
|
||||
status
|
||||
)}`
|
||||
await waitForPodPhases(
|
||||
podName,
|
||||
new Set([PodPhase.RUNNING]),
|
||||
new Set([PodPhase.PENDING, PodPhase.UNKNOWN]),
|
||||
getPrepareJobTimeoutSeconds()
|
||||
)
|
||||
return 1
|
||||
|
||||
const runnerWorkspace = dirname(process.env.RUNNER_WORKSPACE as string)
|
||||
const githubWorkspace = process.env.GITHUB_WORKSPACE as string
|
||||
const parts = githubWorkspace.split('/').slice(-2)
|
||||
if (parts.length !== 2) {
|
||||
throw new Error(`Invalid github workspace directory: ${githubWorkspace}`)
|
||||
}
|
||||
const relativeWorkspace = parts.join('/')
|
||||
|
||||
core.debug(
|
||||
`Copying files from pod ${getJobPodName()} to ${runnerWorkspace}/${relativeWorkspace}`
|
||||
)
|
||||
await execCpFromPod(getJobPodName(), `/__w`, `${runnerWorkspace}`)
|
||||
|
||||
const { containerPath, runnerPath } = writeContainerStepScript(
|
||||
`${runnerWorkspace}/__w/_temp`,
|
||||
githubWorkspace,
|
||||
stepContainer.entryPoint,
|
||||
stepContainer.entryPointArgs,
|
||||
envs
|
||||
)
|
||||
|
||||
await execCpToPod(podName, `${runnerWorkspace}/__w`, '/__w')
|
||||
|
||||
fs.rmSync(`${runnerWorkspace}/__w`, { recursive: true, force: true })
|
||||
|
||||
try {
|
||||
core.debug(`Executing container step script in pod ${podName}`)
|
||||
return await execPodStep(
|
||||
['sh', '-e', containerPath],
|
||||
pod.metadata.name,
|
||||
JOB_CONTAINER_NAME
|
||||
)
|
||||
} catch (err) {
|
||||
core.debug(`execPodStep failed: ${JSON.stringify(err)}`)
|
||||
const message = (err as any)?.response?.body?.message || err
|
||||
throw new Error(`failed to run script step: ${message}`)
|
||||
} finally {
|
||||
fs.rmSync(runnerPath, { force: true })
|
||||
}
|
||||
} catch (error) {
|
||||
core.error(`Failed to run container step: ${error}`)
|
||||
throw error
|
||||
} finally {
|
||||
await deletePod(podName).catch(err => {
|
||||
core.error(`Failed to delete step pod ${podName}: ${err}`)
|
||||
})
|
||||
}
|
||||
const exitCode =
|
||||
status.containerStatuses[status.containerStatuses.length - 1].state
|
||||
?.terminated?.exitCode
|
||||
return Number(exitCode) || 1
|
||||
}
|
||||
|
||||
function createContainerSpec(
|
||||
container: RunContainerStepArgs,
|
||||
secretName?: string,
|
||||
extension?: k8s.V1PodTemplateSpec
|
||||
): k8s.V1Container {
|
||||
const podContainer = new k8s.V1Container()
|
||||
podContainer.name = JOB_CONTAINER_NAME
|
||||
podContainer.image = container.image
|
||||
podContainer.workingDir = container.workingDirectory
|
||||
podContainer.command = container.entryPoint
|
||||
? [container.entryPoint]
|
||||
: undefined
|
||||
podContainer.args = container.entryPointArgs?.length
|
||||
? fixArgs(container.entryPointArgs)
|
||||
: undefined
|
||||
podContainer.workingDir = '/__w'
|
||||
podContainer.command = ['tail']
|
||||
podContainer.args = DEFAULT_CONTAINER_ENTRY_POINT_ARGS
|
||||
|
||||
if (secretName) {
|
||||
podContainer.envFrom = [
|
||||
{
|
||||
secretRef: {
|
||||
name: secretName,
|
||||
optional: false
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
podContainer.volumeMounts = containerVolumes(undefined, false, true)
|
||||
podContainer.volumeMounts = CONTAINER_VOLUMES
|
||||
|
||||
if (!extension) {
|
||||
return podContainer
|
||||
|
||||
@@ -2,17 +2,19 @@
|
||||
import * as fs from 'fs'
|
||||
import * as core from '@actions/core'
|
||||
import { RunScriptStepArgs } from 'hooklib'
|
||||
import { execPodStep } from '../k8s'
|
||||
import { writeEntryPointScript } from '../k8s/utils'
|
||||
import { execCpFromPod, execCpToPod, execPodStep } from '../k8s'
|
||||
import { writeRunScript, sleep, listDirAllCommand } from '../k8s/utils'
|
||||
import { JOB_CONTAINER_NAME } from './constants'
|
||||
import { dirname } from 'path'
|
||||
import * as shlex from 'shlex'
|
||||
|
||||
export async function runScriptStep(
|
||||
args: RunScriptStepArgs,
|
||||
state,
|
||||
responseFile
|
||||
state
|
||||
): Promise<void> {
|
||||
// Write the entrypoint first. This will be later coppied to the workflow pod
|
||||
const { entryPoint, entryPointArgs, environmentVariables } = args
|
||||
const { containerPath, runnerPath } = writeEntryPointScript(
|
||||
const { containerPath, runnerPath } = writeRunScript(
|
||||
args.workingDirectory,
|
||||
entryPoint,
|
||||
entryPointArgs,
|
||||
@@ -20,6 +22,56 @@ export async function runScriptStep(
|
||||
environmentVariables
|
||||
)
|
||||
|
||||
const workdir = dirname(process.env.RUNNER_WORKSPACE as string)
|
||||
const runnerTemp = `${workdir}/_temp`
|
||||
const containerTemp = '/__w/_temp'
|
||||
const containerTempSrc = '/__w/_temp_pre'
|
||||
// Ensure base and staging dirs exist before copying
|
||||
await execPodStep(
|
||||
[
|
||||
'sh',
|
||||
'-c',
|
||||
'mkdir -p /__w && mkdir -p /__w/_temp && mkdir -p /__w/_temp_pre'
|
||||
],
|
||||
state.jobPod,
|
||||
JOB_CONTAINER_NAME
|
||||
)
|
||||
await execCpToPod(state.jobPod, runnerTemp, containerTempSrc)
|
||||
|
||||
// Copy GitHub directories from temp to /github
|
||||
// Merge strategy:
|
||||
// - Overwrite files in _runner_file_commands
|
||||
// - Append files not already present elsewhere
|
||||
const mergeCommands = [
|
||||
'set -e',
|
||||
'mkdir -p /__w/_temp /__w/_temp_pre',
|
||||
'SRC=/__w/_temp_pre',
|
||||
'DST=/__w/_temp',
|
||||
// Overwrite _runner_file_commands
|
||||
'cp -a "$SRC/_runner_file_commands/." "$DST/_runner_file_commands"',
|
||||
`find "$SRC" -type f ! -path "*/_runner_file_commands/*" -exec sh -c '
|
||||
rel="\${1#$2/}"
|
||||
target="$3/$rel"
|
||||
mkdir -p "$(dirname "$target")"
|
||||
cp -a "$1" "$target"
|
||||
' _ {} "$SRC" "$DST" \\;`,
|
||||
// Remove _temp_pre after merging
|
||||
'rm -rf /__w/_temp_pre'
|
||||
]
|
||||
|
||||
try {
|
||||
await execPodStep(
|
||||
['sh', '-c', mergeCommands.join(' && ')],
|
||||
state.jobPod,
|
||||
JOB_CONTAINER_NAME
|
||||
)
|
||||
} catch (err) {
|
||||
core.debug(`Failed to merge temp directories: ${JSON.stringify(err)}`)
|
||||
const message = (err as any)?.response?.body?.message || err
|
||||
throw new Error(`failed to merge temp dirs: ${message}`)
|
||||
}
|
||||
|
||||
// Execute the entrypoint script
|
||||
args.entryPoint = 'sh'
|
||||
args.entryPointArgs = ['-e', containerPath]
|
||||
try {
|
||||
@@ -33,6 +85,23 @@ export async function runScriptStep(
|
||||
const message = (err as any)?.response?.body?.message || err
|
||||
throw new Error(`failed to run script step: ${message}`)
|
||||
} finally {
|
||||
fs.rmSync(runnerPath)
|
||||
try {
|
||||
fs.rmSync(runnerPath, { force: true })
|
||||
} catch (removeErr) {
|
||||
core.debug(`Failed to remove file ${runnerPath}: ${removeErr}`)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
core.debug(
|
||||
`Copying from job pod '${state.jobPod}' ${containerTemp} to ${runnerTemp}`
|
||||
)
|
||||
await execCpFromPod(
|
||||
state.jobPod,
|
||||
`${containerTemp}/_runner_file_commands`,
|
||||
`${workdir}/_temp`
|
||||
)
|
||||
} catch (error) {
|
||||
core.warning('Failed to copy _temp from pod')
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
import * as core from '@actions/core'
|
||||
import { Command, getInputFromStdin, prepareJobArgs } from 'hooklib'
|
||||
import {
|
||||
Command,
|
||||
getInputFromStdin,
|
||||
PrepareJobArgs,
|
||||
RunContainerStepArgs,
|
||||
RunScriptStepArgs
|
||||
} from 'hooklib'
|
||||
import {
|
||||
cleanupJob,
|
||||
prepareJob,
|
||||
@@ -27,16 +33,16 @@ async function run(): Promise<void> {
|
||||
let exitCode = 0
|
||||
switch (command) {
|
||||
case Command.PrepareJob:
|
||||
await prepareJob(args as prepareJobArgs, responseFile)
|
||||
await prepareJob(args as PrepareJobArgs, responseFile)
|
||||
return process.exit(0)
|
||||
case Command.CleanupJob:
|
||||
await cleanupJob()
|
||||
return process.exit(0)
|
||||
case Command.RunScriptStep:
|
||||
await runScriptStep(args, state, null)
|
||||
await runScriptStep(args as RunScriptStepArgs, state)
|
||||
return process.exit(0)
|
||||
case Command.RunContainerStep:
|
||||
exitCode = await runContainerStep(args)
|
||||
exitCode = await runContainerStep(args as RunContainerStepArgs)
|
||||
return process.exit(exitCode)
|
||||
default:
|
||||
throw new Error(`Command not recognized: ${command}`)
|
||||
|
||||
@@ -1,22 +1,29 @@
|
||||
import * as core from '@actions/core'
|
||||
import * as path from 'path'
|
||||
import { spawn } from 'child_process'
|
||||
import * as k8s from '@kubernetes/client-node'
|
||||
import { ContainerInfo, Registry } from 'hooklib'
|
||||
import tar from 'tar-fs'
|
||||
import * as stream from 'stream'
|
||||
import { WritableStreamBuffer } from 'stream-buffers'
|
||||
import { createHash } from 'crypto'
|
||||
import type { ContainerInfo, Registry } from 'hooklib'
|
||||
import {
|
||||
getJobPodName,
|
||||
getRunnerPodName,
|
||||
getSecretName,
|
||||
getStepPodName,
|
||||
getVolumeClaimName,
|
||||
JOB_CONTAINER_NAME,
|
||||
RunnerInstanceLabel
|
||||
} from '../hooks/constants'
|
||||
import {
|
||||
PodPhase,
|
||||
mergePodSpecWithOptions,
|
||||
mergeObjectMeta,
|
||||
useKubeScheduler,
|
||||
fixArgs
|
||||
fixArgs,
|
||||
listDirAllCommand,
|
||||
sleep,
|
||||
EXTERNALS_VOLUME_NAME,
|
||||
GITHUB_VOLUME_NAME,
|
||||
WORK_VOLUME
|
||||
} from './utils'
|
||||
import * as shlex from 'shlex'
|
||||
|
||||
const kc = new k8s.KubeConfig()
|
||||
|
||||
@@ -28,8 +35,6 @@ const k8sAuthorizationV1Api = kc.makeApiClient(k8s.AuthorizationV1Api)
|
||||
|
||||
const DEFAULT_WAIT_FOR_POD_TIME_SECONDS = 10 * 60 // 10 min
|
||||
|
||||
export const POD_VOLUME_NAME = 'work'
|
||||
|
||||
export const requiredPermissions = [
|
||||
{
|
||||
group: '',
|
||||
@@ -49,12 +54,6 @@ export const requiredPermissions = [
|
||||
resource: 'pods',
|
||||
subresource: 'log'
|
||||
},
|
||||
{
|
||||
group: 'batch',
|
||||
verbs: ['get', 'list', 'create', 'delete'],
|
||||
resource: 'jobs',
|
||||
subresource: ''
|
||||
},
|
||||
{
|
||||
group: '',
|
||||
verbs: ['create', 'delete', 'get', 'list'],
|
||||
@@ -63,7 +62,8 @@ export const requiredPermissions = [
|
||||
}
|
||||
]
|
||||
|
||||
export async function createPod(
|
||||
export async function createJobPod(
|
||||
name: string,
|
||||
jobContainer?: k8s.V1Container,
|
||||
services?: k8s.V1Container[],
|
||||
registry?: Registry,
|
||||
@@ -83,7 +83,7 @@ export async function createPod(
|
||||
appPod.kind = 'Pod'
|
||||
|
||||
appPod.metadata = new k8s.V1ObjectMeta()
|
||||
appPod.metadata.name = getJobPodName()
|
||||
appPod.metadata.name = name
|
||||
|
||||
const instanceLabel = new RunnerInstanceLabel()
|
||||
appPod.metadata.labels = {
|
||||
@@ -93,17 +93,68 @@ export async function createPod(
|
||||
|
||||
appPod.spec = new k8s.V1PodSpec()
|
||||
appPod.spec.containers = containers
|
||||
appPod.spec.restartPolicy = 'Never'
|
||||
|
||||
if (!useKubeScheduler()) {
|
||||
appPod.spec.nodeName = await getCurrentNodeName()
|
||||
appPod.spec.securityContext = {
|
||||
fsGroup: 1001
|
||||
}
|
||||
|
||||
const claimName = getVolumeClaimName()
|
||||
// Extract working directory from GITHUB_WORKSPACE
|
||||
// GITHUB_WORKSPACE is like /__w/repo-name/repo-name
|
||||
const githubWorkspace = process.env.GITHUB_WORKSPACE
|
||||
const workingDirPath = githubWorkspace?.split('/').slice(-2).join('/') ?? ''
|
||||
|
||||
const initCommands = [
|
||||
'mkdir -p /mnt/externals',
|
||||
'mkdir -p /mnt/work',
|
||||
'mkdir -p /mnt/github',
|
||||
'mv /home/runner/externals/* /mnt/externals/'
|
||||
]
|
||||
|
||||
if (workingDirPath) {
|
||||
initCommands.push(`mkdir -p /mnt/work/${workingDirPath}`)
|
||||
}
|
||||
|
||||
appPod.spec.initContainers = [
|
||||
{
|
||||
name: 'fs-init',
|
||||
image:
|
||||
process.env.ACTIONS_RUNNER_IMAGE ||
|
||||
'ghcr.io/actions/actions-runner:latest',
|
||||
command: ['sh', '-c', initCommands.join(' && ')],
|
||||
securityContext: {
|
||||
runAsGroup: 1001,
|
||||
runAsUser: 1001
|
||||
},
|
||||
volumeMounts: [
|
||||
{
|
||||
name: EXTERNALS_VOLUME_NAME,
|
||||
mountPath: '/mnt/externals'
|
||||
},
|
||||
{
|
||||
name: WORK_VOLUME,
|
||||
mountPath: '/mnt/work'
|
||||
},
|
||||
{
|
||||
name: GITHUB_VOLUME_NAME,
|
||||
mountPath: '/mnt/github'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
appPod.spec.restartPolicy = 'Never'
|
||||
|
||||
appPod.spec.volumes = [
|
||||
{
|
||||
name: 'work',
|
||||
persistentVolumeClaim: { claimName }
|
||||
name: EXTERNALS_VOLUME_NAME,
|
||||
emptyDir: {}
|
||||
},
|
||||
{
|
||||
name: GITHUB_VOLUME_NAME,
|
||||
emptyDir: {}
|
||||
},
|
||||
{
|
||||
name: WORK_VOLUME,
|
||||
emptyDir: {}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -125,99 +176,71 @@ export async function createPod(
|
||||
mergePodSpecWithOptions(appPod.spec, extension.spec)
|
||||
}
|
||||
|
||||
const { body } = await k8sApi.createNamespacedPod(namespace(), appPod)
|
||||
return body
|
||||
return await k8sApi.createNamespacedPod({
|
||||
namespace: namespace(),
|
||||
body: appPod
|
||||
})
|
||||
}
|
||||
|
||||
export async function createJob(
|
||||
export async function createContainerStepPod(
|
||||
name: string,
|
||||
container: k8s.V1Container,
|
||||
extension?: k8s.V1PodTemplateSpec
|
||||
): Promise<k8s.V1Job> {
|
||||
const runnerInstanceLabel = new RunnerInstanceLabel()
|
||||
): Promise<k8s.V1Pod> {
|
||||
const appPod = new k8s.V1Pod()
|
||||
|
||||
const job = new k8s.V1Job()
|
||||
job.apiVersion = 'batch/v1'
|
||||
job.kind = 'Job'
|
||||
job.metadata = new k8s.V1ObjectMeta()
|
||||
job.metadata.name = getStepPodName()
|
||||
job.metadata.labels = { [runnerInstanceLabel.key]: runnerInstanceLabel.value }
|
||||
job.metadata.annotations = {}
|
||||
appPod.apiVersion = 'v1'
|
||||
appPod.kind = 'Pod'
|
||||
|
||||
job.spec = new k8s.V1JobSpec()
|
||||
job.spec.ttlSecondsAfterFinished = 300
|
||||
job.spec.backoffLimit = 0
|
||||
job.spec.template = new k8s.V1PodTemplateSpec()
|
||||
appPod.metadata = new k8s.V1ObjectMeta()
|
||||
appPod.metadata.name = name
|
||||
|
||||
job.spec.template.spec = new k8s.V1PodSpec()
|
||||
job.spec.template.metadata = new k8s.V1ObjectMeta()
|
||||
job.spec.template.metadata.labels = {}
|
||||
job.spec.template.metadata.annotations = {}
|
||||
job.spec.template.spec.containers = [container]
|
||||
job.spec.template.spec.restartPolicy = 'Never'
|
||||
|
||||
if (!useKubeScheduler()) {
|
||||
job.spec.template.spec.nodeName = await getCurrentNodeName()
|
||||
const instanceLabel = new RunnerInstanceLabel()
|
||||
appPod.metadata.labels = {
|
||||
[instanceLabel.key]: instanceLabel.value
|
||||
}
|
||||
appPod.metadata.annotations = {}
|
||||
|
||||
const claimName = getVolumeClaimName()
|
||||
job.spec.template.spec.volumes = [
|
||||
appPod.spec = new k8s.V1PodSpec()
|
||||
appPod.spec.containers = [container]
|
||||
|
||||
appPod.spec.restartPolicy = 'Never'
|
||||
|
||||
appPod.spec.volumes = [
|
||||
{
|
||||
name: 'work',
|
||||
persistentVolumeClaim: { claimName }
|
||||
name: EXTERNALS_VOLUME_NAME,
|
||||
emptyDir: {}
|
||||
},
|
||||
{
|
||||
name: GITHUB_VOLUME_NAME,
|
||||
emptyDir: {}
|
||||
},
|
||||
{
|
||||
name: WORK_VOLUME,
|
||||
emptyDir: {}
|
||||
}
|
||||
]
|
||||
|
||||
if (extension) {
|
||||
if (extension.metadata) {
|
||||
// apply metadata both to the job and the pod created by the job
|
||||
mergeObjectMeta(job, extension.metadata)
|
||||
mergeObjectMeta(job.spec.template, extension.metadata)
|
||||
}
|
||||
if (extension.spec) {
|
||||
mergePodSpecWithOptions(job.spec.template.spec, extension.spec)
|
||||
}
|
||||
if (extension?.metadata) {
|
||||
mergeObjectMeta(appPod, extension.metadata)
|
||||
}
|
||||
|
||||
const { body } = await k8sBatchV1Api.createNamespacedJob(namespace(), job)
|
||||
return body
|
||||
}
|
||||
|
||||
export async function getContainerJobPodName(jobName: string): Promise<string> {
|
||||
const selector = `job-name=${jobName}`
|
||||
const backOffManager = new BackOffManager(60)
|
||||
while (true) {
|
||||
const podList = await k8sApi.listNamespacedPod(
|
||||
namespace(),
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
selector,
|
||||
1
|
||||
)
|
||||
|
||||
if (!podList.body.items?.length) {
|
||||
await backOffManager.backOff()
|
||||
continue
|
||||
}
|
||||
|
||||
if (!podList.body.items[0].metadata?.name) {
|
||||
throw new Error(
|
||||
`Failed to determine the name of the pod for job ${jobName}`
|
||||
)
|
||||
}
|
||||
return podList.body.items[0].metadata.name
|
||||
if (extension?.spec) {
|
||||
mergePodSpecWithOptions(appPod.spec, extension.spec)
|
||||
}
|
||||
|
||||
return await k8sApi.createNamespacedPod({
|
||||
namespace: namespace(),
|
||||
body: appPod
|
||||
})
|
||||
}
|
||||
|
||||
export async function deletePod(podName: string): Promise<void> {
|
||||
await k8sApi.deleteNamespacedPod(
|
||||
podName,
|
||||
namespace(),
|
||||
undefined,
|
||||
undefined,
|
||||
0
|
||||
)
|
||||
export async function deletePod(name: string): Promise<void> {
|
||||
await k8sApi.deleteNamespacedPod({
|
||||
name,
|
||||
namespace: namespace(),
|
||||
gracePeriodSeconds: 0
|
||||
})
|
||||
}
|
||||
|
||||
export async function execPodStep(
|
||||
@@ -225,11 +248,11 @@ export async function execPodStep(
|
||||
podName: string,
|
||||
containerName: string,
|
||||
stdin?: stream.Readable
|
||||
): Promise<void> {
|
||||
): Promise<number> {
|
||||
const exec = new k8s.Exec(kc)
|
||||
|
||||
command = fixArgs(command)
|
||||
// Exec returns a websocket. If websocket fails, we should reject the promise. Otherwise, websocket will call a callback. Since at that point, websocket is not failing, we can safely resolve or reject the promise.
|
||||
await new Promise(function (resolve, reject) {
|
||||
return await new Promise(function (resolve, reject) {
|
||||
exec
|
||||
.exec(
|
||||
namespace(),
|
||||
@@ -241,9 +264,9 @@ export async function execPodStep(
|
||||
stdin ?? null,
|
||||
false /* tty */,
|
||||
resp => {
|
||||
// kube.exec returns an error if exit code is not 0, but we can't actually get the exit code
|
||||
core.debug(`execPodStep response: ${JSON.stringify(resp)}`)
|
||||
if (resp.status === 'Success') {
|
||||
resolve(resp.code)
|
||||
resolve(resp.code || 0)
|
||||
} else {
|
||||
core.debug(
|
||||
JSON.stringify({
|
||||
@@ -251,16 +274,298 @@ export async function execPodStep(
|
||||
details: resp?.details
|
||||
})
|
||||
)
|
||||
reject(resp?.message)
|
||||
reject(new Error(resp?.message || 'execPodStep failed'))
|
||||
}
|
||||
}
|
||||
)
|
||||
// If exec.exec fails, explicitly reject the outer promise
|
||||
// eslint-disable-next-line github/no-then
|
||||
.catch(e => reject(e))
|
||||
})
|
||||
}
|
||||
|
||||
export async function execCalculateOutputHashSorted(
|
||||
podName: string,
|
||||
containerName: string,
|
||||
command: string[]
|
||||
): Promise<string> {
|
||||
const exec = new k8s.Exec(kc)
|
||||
|
||||
let output = ''
|
||||
const outputWriter = new stream.Writable({
|
||||
write(chunk, _enc, cb) {
|
||||
try {
|
||||
output += chunk.toString('utf8')
|
||||
cb()
|
||||
} catch (e) {
|
||||
cb(e as Error)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
exec
|
||||
.exec(
|
||||
namespace(),
|
||||
podName,
|
||||
containerName,
|
||||
command,
|
||||
outputWriter, // capture stdout
|
||||
process.stderr,
|
||||
null,
|
||||
false /* tty */,
|
||||
resp => {
|
||||
core.debug(`internalExecOutput response: ${JSON.stringify(resp)}`)
|
||||
if (resp.status === 'Success') {
|
||||
resolve()
|
||||
} else {
|
||||
core.debug(
|
||||
JSON.stringify({
|
||||
message: resp?.message,
|
||||
details: resp?.details
|
||||
})
|
||||
)
|
||||
reject(new Error(resp?.message || 'internalExecOutput failed'))
|
||||
}
|
||||
}
|
||||
)
|
||||
.catch(e => reject(e))
|
||||
})
|
||||
|
||||
outputWriter.end()
|
||||
|
||||
// Sort lines for consistent ordering across platforms
|
||||
const sortedOutput =
|
||||
output
|
||||
.split('\n')
|
||||
.filter(line => line.length > 0)
|
||||
.sort()
|
||||
.join('\n') + '\n'
|
||||
|
||||
const hash = createHash('sha256')
|
||||
hash.update(sortedOutput)
|
||||
return hash.digest('hex')
|
||||
}
|
||||
|
||||
export async function localCalculateOutputHashSorted(
|
||||
commands: string[]
|
||||
): Promise<string> {
|
||||
return await new Promise<string>((resolve, reject) => {
|
||||
const child = spawn(commands[0], commands.slice(1), {
|
||||
stdio: ['ignore', 'pipe', 'ignore']
|
||||
})
|
||||
|
||||
let output = ''
|
||||
child.stdout.on('data', chunk => {
|
||||
output += chunk.toString('utf8')
|
||||
})
|
||||
child.on('error', reject)
|
||||
child.on('close', (code: number) => {
|
||||
if (code === 0) {
|
||||
// Sort lines for consistent ordering across distributions/platforms
|
||||
const sortedOutput =
|
||||
output
|
||||
.split('\n')
|
||||
.filter(line => line.length > 0)
|
||||
.sort()
|
||||
.join('\n') + '\n'
|
||||
|
||||
const hash = createHash('sha256')
|
||||
hash.update(sortedOutput)
|
||||
resolve(hash.digest('hex'))
|
||||
} else {
|
||||
reject(new Error(`child process exited with code ${code}`))
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
export async function execCpToPod(
|
||||
podName: string,
|
||||
runnerPath: string,
|
||||
containerPath: string
|
||||
): Promise<void> {
|
||||
core.debug(`Copying ${runnerPath} to pod ${podName} at ${containerPath}`)
|
||||
|
||||
let attempt = 0
|
||||
while (true) {
|
||||
try {
|
||||
const exec = new k8s.Exec(kc)
|
||||
// Use tar to extract with --no-same-owner to avoid ownership issues.
|
||||
// Then use find to fix permissions. The -m flag helps but we also need to fix permissions after.
|
||||
const command = [
|
||||
'sh',
|
||||
'-c',
|
||||
`tar xf - --no-same-owner -C ${shlex.quote(containerPath)} 2>/dev/null; ` +
|
||||
`find ${shlex.quote(containerPath)} -type f -exec chmod u+rw {} \\; 2>/dev/null; ` +
|
||||
`find ${shlex.quote(containerPath)} -type d -exec chmod u+rwx {} \\; 2>/dev/null`
|
||||
]
|
||||
const readStream = tar.pack(runnerPath)
|
||||
const errStream = new WritableStreamBuffer()
|
||||
await new Promise((resolve, reject) => {
|
||||
exec
|
||||
.exec(
|
||||
namespace(),
|
||||
podName,
|
||||
JOB_CONTAINER_NAME,
|
||||
command,
|
||||
null,
|
||||
errStream,
|
||||
readStream,
|
||||
false,
|
||||
async status => {
|
||||
if (errStream.size()) {
|
||||
reject(
|
||||
new Error(
|
||||
`Error from execCpToPod - status: ${status.status}, details: \n ${errStream.getContentsAsString()}`
|
||||
)
|
||||
)
|
||||
}
|
||||
resolve(status)
|
||||
}
|
||||
)
|
||||
.catch(e => reject(e))
|
||||
})
|
||||
break
|
||||
} catch (error) {
|
||||
core.debug(`cpToPod: Attempt ${attempt + 1} failed: ${error}`)
|
||||
attempt++
|
||||
if (attempt >= 30) {
|
||||
throw new Error(
|
||||
`cpToPod failed after ${attempt} attempts: ${JSON.stringify(error)}`
|
||||
)
|
||||
}
|
||||
await sleep(1000)
|
||||
}
|
||||
}
|
||||
|
||||
let attempts = 15
|
||||
const delay = 1000
|
||||
for (let i = 0; i < attempts; i++) {
|
||||
try {
|
||||
const want = await localCalculateOutputHashSorted([
|
||||
'sh',
|
||||
'-c',
|
||||
listDirAllCommand(runnerPath)
|
||||
])
|
||||
|
||||
const got = await execCalculateOutputHashSorted(
|
||||
podName,
|
||||
JOB_CONTAINER_NAME,
|
||||
['sh', '-c', listDirAllCommand(containerPath)]
|
||||
)
|
||||
|
||||
if (got !== want) {
|
||||
core.debug(
|
||||
`The hash of the directory does not match the expected value; want='${want}' got='${got}'`
|
||||
)
|
||||
await sleep(delay)
|
||||
continue
|
||||
}
|
||||
|
||||
break
|
||||
} catch (error) {
|
||||
core.debug(`Attempt ${i + 1} failed: ${error}`)
|
||||
await sleep(delay)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function execCpFromPod(
|
||||
podName: string,
|
||||
containerPath: string,
|
||||
parentRunnerPath: string
|
||||
): Promise<void> {
|
||||
const targetRunnerPath = `${parentRunnerPath}/${path.basename(containerPath)}`
|
||||
core.debug(
|
||||
`Copying from pod ${podName} ${containerPath} to ${targetRunnerPath}`
|
||||
)
|
||||
|
||||
let attempt = 0
|
||||
while (true) {
|
||||
try {
|
||||
// make temporary directory
|
||||
const exec = new k8s.Exec(kc)
|
||||
const containerPaths = containerPath.split('/')
|
||||
const dirname = containerPaths.pop() as string
|
||||
const command = [
|
||||
'tar',
|
||||
'cf',
|
||||
'-',
|
||||
'-C',
|
||||
containerPaths.join('/') || '/',
|
||||
dirname
|
||||
]
|
||||
const writerStream = tar.extract(parentRunnerPath)
|
||||
const errStream = new WritableStreamBuffer()
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
exec
|
||||
.exec(
|
||||
namespace(),
|
||||
podName,
|
||||
JOB_CONTAINER_NAME,
|
||||
command,
|
||||
writerStream,
|
||||
errStream,
|
||||
null,
|
||||
false,
|
||||
async status => {
|
||||
if (errStream.size()) {
|
||||
reject(
|
||||
new Error(
|
||||
`Error from cpFromPod - details: \n ${errStream.getContentsAsString()}`
|
||||
)
|
||||
)
|
||||
}
|
||||
resolve(status)
|
||||
}
|
||||
)
|
||||
.catch(e => reject(e))
|
||||
})
|
||||
break
|
||||
} catch (error) {
|
||||
core.debug(`Attempt ${attempt + 1} failed: ${error}`)
|
||||
attempt++
|
||||
if (attempt >= 30) {
|
||||
throw new Error(
|
||||
`execCpFromPod failed after ${attempt} attempts: ${JSON.stringify(error)}`
|
||||
)
|
||||
}
|
||||
await sleep(1000)
|
||||
}
|
||||
}
|
||||
|
||||
let attempts = 15
|
||||
const delay = 1000
|
||||
for (let i = 0; i < attempts; i++) {
|
||||
try {
|
||||
const want = await execCalculateOutputHashSorted(
|
||||
podName,
|
||||
JOB_CONTAINER_NAME,
|
||||
['sh', '-c', listDirAllCommand(containerPath)]
|
||||
)
|
||||
|
||||
const got = await localCalculateOutputHashSorted([
|
||||
'sh',
|
||||
'-c',
|
||||
listDirAllCommand(targetRunnerPath)
|
||||
])
|
||||
|
||||
if (got !== want) {
|
||||
core.debug(
|
||||
`The hash of the directory does not match the expected value; want='${want}' got='${got}'`
|
||||
)
|
||||
await sleep(delay)
|
||||
continue
|
||||
}
|
||||
|
||||
break
|
||||
} catch (error) {
|
||||
core.debug(`Attempt ${i + 1} failed: ${error}`)
|
||||
await sleep(delay)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function waitForJobToComplete(jobName: string): Promise<void> {
|
||||
const backOffManager = new BackOffManager()
|
||||
while (true) {
|
||||
@@ -269,7 +574,7 @@ export async function waitForJobToComplete(jobName: string): Promise<void> {
|
||||
return
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`job ${jobName} has failed`)
|
||||
throw new Error(`job ${jobName} has failed: ${JSON.stringify(error)}`)
|
||||
}
|
||||
await backOffManager.backOff()
|
||||
}
|
||||
@@ -310,8 +615,10 @@ export async function createDockerSecret(
|
||||
)
|
||||
}
|
||||
|
||||
const { body } = await k8sApi.createNamespacedSecret(namespace(), secret)
|
||||
return body
|
||||
return await k8sApi.createNamespacedSecret({
|
||||
namespace: namespace(),
|
||||
body: secret
|
||||
})
|
||||
}
|
||||
|
||||
export async function createSecretForEnvs(envs: {
|
||||
@@ -335,30 +642,33 @@ export async function createSecretForEnvs(envs: {
|
||||
secret.data[key] = Buffer.from(value).toString('base64')
|
||||
}
|
||||
|
||||
await k8sApi.createNamespacedSecret(namespace(), secret)
|
||||
await k8sApi.createNamespacedSecret({
|
||||
namespace: namespace(),
|
||||
body: secret
|
||||
})
|
||||
return secretName
|
||||
}
|
||||
|
||||
export async function deleteSecret(secretName: string): Promise<void> {
|
||||
await k8sApi.deleteNamespacedSecret(secretName, namespace())
|
||||
export async function deleteSecret(name: string): Promise<void> {
|
||||
await k8sApi.deleteNamespacedSecret({
|
||||
name,
|
||||
namespace: namespace()
|
||||
})
|
||||
}
|
||||
|
||||
export async function pruneSecrets(): Promise<void> {
|
||||
const secretList = await k8sApi.listNamespacedSecret(
|
||||
namespace(),
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
new RunnerInstanceLabel().toString()
|
||||
)
|
||||
if (!secretList.body.items.length) {
|
||||
const secretList = await k8sApi.listNamespacedSecret({
|
||||
namespace: namespace(),
|
||||
labelSelector: new RunnerInstanceLabel().toString()
|
||||
})
|
||||
if (!secretList.items.length) {
|
||||
return
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
secretList.body.items.map(
|
||||
secret => secret.metadata?.name && deleteSecret(secret.metadata.name)
|
||||
secretList.items.map(
|
||||
async secret =>
|
||||
secret.metadata?.name && (await deleteSecret(secret.metadata.name))
|
||||
)
|
||||
)
|
||||
}
|
||||
@@ -386,7 +696,9 @@ export async function waitForPodPhases(
|
||||
await backOffManager.backOff()
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Pod ${podName} is unhealthy with phase status ${phase}`)
|
||||
throw new Error(
|
||||
`Pod ${podName} is unhealthy with phase status ${phase}: ${JSON.stringify(error)}`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -409,7 +721,7 @@ export function getPrepareJobTimeoutSeconds(): number {
|
||||
return timeoutSeconds
|
||||
}
|
||||
|
||||
async function getPodPhase(podName: string): Promise<PodPhase> {
|
||||
async function getPodPhase(name: string): Promise<PodPhase> {
|
||||
const podPhaseLookup = new Set<string>([
|
||||
PodPhase.PENDING,
|
||||
PodPhase.RUNNING,
|
||||
@@ -417,8 +729,10 @@ async function getPodPhase(podName: string): Promise<PodPhase> {
|
||||
PodPhase.FAILED,
|
||||
PodPhase.UNKNOWN
|
||||
])
|
||||
const { body } = await k8sApi.readNamespacedPod(podName, namespace())
|
||||
const pod = body
|
||||
const pod = await k8sApi.readNamespacedPod({
|
||||
name,
|
||||
namespace: namespace()
|
||||
})
|
||||
|
||||
if (!pod.status?.phase || !podPhaseLookup.has(pod.status.phase)) {
|
||||
return PodPhase.UNKNOWN
|
||||
@@ -426,11 +740,13 @@ async function getPodPhase(podName: string): Promise<PodPhase> {
|
||||
return pod.status?.phase as PodPhase
|
||||
}
|
||||
|
||||
async function isJobSucceeded(jobName: string): Promise<boolean> {
|
||||
const { body } = await k8sBatchV1Api.readNamespacedJob(jobName, namespace())
|
||||
const job = body
|
||||
async function isJobSucceeded(name: string): Promise<boolean> {
|
||||
const job = await k8sBatchV1Api.readNamespacedJob({
|
||||
name,
|
||||
namespace: namespace()
|
||||
})
|
||||
if (job.status?.failed) {
|
||||
throw new Error(`job ${jobName} has failed`)
|
||||
throw new Error(`job ${name} has failed`)
|
||||
}
|
||||
return !!job.status?.succeeded
|
||||
}
|
||||
@@ -450,31 +766,26 @@ export async function getPodLogs(
|
||||
process.stderr.write(err.message)
|
||||
})
|
||||
|
||||
const r = await log.log(namespace(), podName, containerName, logStream, {
|
||||
await log.log(namespace(), podName, containerName, logStream, {
|
||||
follow: true,
|
||||
tailLines: 50,
|
||||
pretty: false,
|
||||
timestamps: false
|
||||
})
|
||||
await new Promise(resolve => r.on('close', () => resolve(null)))
|
||||
await new Promise(resolve => logStream.on('end', () => resolve(null)))
|
||||
}
|
||||
|
||||
export async function prunePods(): Promise<void> {
|
||||
const podList = await k8sApi.listNamespacedPod(
|
||||
namespace(),
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
new RunnerInstanceLabel().toString()
|
||||
)
|
||||
if (!podList.body.items.length) {
|
||||
const podList = await k8sApi.listNamespacedPod({
|
||||
namespace: namespace(),
|
||||
labelSelector: new RunnerInstanceLabel().toString()
|
||||
})
|
||||
if (!podList.items.length) {
|
||||
return
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
podList.body.items.map(
|
||||
pod => pod.metadata?.name && deletePod(pod.metadata.name)
|
||||
podList.items.map(
|
||||
async pod => pod.metadata?.name && (await deletePod(pod.metadata.name))
|
||||
)
|
||||
)
|
||||
}
|
||||
@@ -482,16 +793,16 @@ export async function prunePods(): Promise<void> {
|
||||
export async function getPodStatus(
|
||||
name: string
|
||||
): Promise<k8s.V1PodStatus | undefined> {
|
||||
const { body } = await k8sApi.readNamespacedPod(name, namespace())
|
||||
return body.status
|
||||
const pod = await k8sApi.readNamespacedPod({
|
||||
name,
|
||||
namespace: namespace()
|
||||
})
|
||||
return pod.status
|
||||
}
|
||||
|
||||
export async function isAuthPermissionsOK(): Promise<boolean> {
|
||||
const sar = new k8s.V1SelfSubjectAccessReview()
|
||||
const asyncs: Promise<{
|
||||
response: unknown
|
||||
body: k8s.V1SelfSubjectAccessReview
|
||||
}>[] = []
|
||||
const asyncs: Promise<k8s.V1SelfSubjectAccessReview>[] = []
|
||||
for (const resource of requiredPermissions) {
|
||||
for (const verb of resource.verbs) {
|
||||
sar.spec = new k8s.V1SelfSubjectAccessReviewSpec()
|
||||
@@ -501,11 +812,13 @@ export async function isAuthPermissionsOK(): Promise<boolean> {
|
||||
sar.spec.resourceAttributes.group = resource.group
|
||||
sar.spec.resourceAttributes.resource = resource.resource
|
||||
sar.spec.resourceAttributes.subresource = resource.subresource
|
||||
asyncs.push(k8sAuthorizationV1Api.createSelfSubjectAccessReview(sar))
|
||||
asyncs.push(
|
||||
k8sAuthorizationV1Api.createSelfSubjectAccessReview({ body: sar })
|
||||
)
|
||||
}
|
||||
}
|
||||
const responses = await Promise.all(asyncs)
|
||||
return responses.every(resp => resp.body.status?.allowed)
|
||||
return responses.every(resp => resp.status?.allowed)
|
||||
}
|
||||
|
||||
export async function isPodContainerAlpine(
|
||||
@@ -518,28 +831,18 @@ export async function isPodContainerAlpine(
|
||||
[
|
||||
'sh',
|
||||
'-c',
|
||||
`'[ $(cat /etc/*release* | grep -i -e "^ID=*alpine*" -c) != 0 ] || exit 1'`
|
||||
`[ $(cat /etc/*release* | grep -i -e "^ID=*alpine*" -c) != 0 ] || exit 1`
|
||||
],
|
||||
podName,
|
||||
containerName
|
||||
)
|
||||
} catch (err) {
|
||||
} catch {
|
||||
isAlpine = false
|
||||
}
|
||||
|
||||
return isAlpine
|
||||
}
|
||||
|
||||
async function getCurrentNodeName(): Promise<string> {
|
||||
const resp = await k8sApi.readNamespacedPod(getRunnerPodName(), namespace())
|
||||
|
||||
const nodeName = resp.body.spec?.nodeName
|
||||
if (!nodeName) {
|
||||
throw new Error('Failed to determine node name')
|
||||
}
|
||||
return nodeName
|
||||
}
|
||||
|
||||
export function namespace(): string {
|
||||
if (process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']) {
|
||||
return process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']
|
||||
@@ -623,6 +926,8 @@ export function containerPorts(
|
||||
}
|
||||
|
||||
export async function getPodByName(name): Promise<k8s.V1Pod> {
|
||||
const { body } = await k8sApi.readNamespacedPod(name, namespace())
|
||||
return body
|
||||
return await k8sApi.readNamespacedPod({
|
||||
name,
|
||||
namespace: namespace()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -2,12 +2,10 @@ import * as k8s from '@kubernetes/client-node'
|
||||
import * as fs from 'fs'
|
||||
import * as yaml from 'js-yaml'
|
||||
import * as core from '@actions/core'
|
||||
import { Mount } from 'hooklib'
|
||||
import * as path from 'path'
|
||||
import { v1 as uuidv4 } from 'uuid'
|
||||
import { POD_VOLUME_NAME } from './index'
|
||||
import { CONTAINER_EXTENSION_PREFIX } from '../hooks/constants'
|
||||
import * as shlex from 'shlex'
|
||||
import { Mount } from 'hooklib'
|
||||
|
||||
export const DEFAULT_CONTAINER_ENTRY_POINT_ARGS = [`-f`, `/dev/null`]
|
||||
export const DEFAULT_CONTAINER_ENTRY_POINT = 'tail'
|
||||
@@ -15,96 +13,48 @@ export const DEFAULT_CONTAINER_ENTRY_POINT = 'tail'
|
||||
export const ENV_HOOK_TEMPLATE_PATH = 'ACTIONS_RUNNER_CONTAINER_HOOK_TEMPLATE'
|
||||
export const ENV_USE_KUBE_SCHEDULER = 'ACTIONS_RUNNER_USE_KUBE_SCHEDULER'
|
||||
|
||||
export function containerVolumes(
|
||||
userMountVolumes: Mount[] = [],
|
||||
jobContainer = true,
|
||||
containerAction = false
|
||||
): k8s.V1VolumeMount[] {
|
||||
const mounts: k8s.V1VolumeMount[] = [
|
||||
{
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: '/__w'
|
||||
}
|
||||
]
|
||||
export const EXTERNALS_VOLUME_NAME = 'externals'
|
||||
export const GITHUB_VOLUME_NAME = 'github'
|
||||
export const WORK_VOLUME = 'work'
|
||||
|
||||
const workspacePath = process.env.GITHUB_WORKSPACE as string
|
||||
if (containerAction) {
|
||||
const i = workspacePath.lastIndexOf('_work/')
|
||||
const workspaceRelativePath = workspacePath.slice(i + '_work/'.length)
|
||||
mounts.push(
|
||||
{
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: '/github/workspace',
|
||||
subPath: workspaceRelativePath
|
||||
},
|
||||
{
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: '/github/file_commands',
|
||||
subPath: '_temp/_runner_file_commands'
|
||||
},
|
||||
{
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: '/github/workflow',
|
||||
subPath: '_temp/_github_workflow'
|
||||
}
|
||||
)
|
||||
return mounts
|
||||
export const CONTAINER_VOLUMES: k8s.V1VolumeMount[] = [
|
||||
{
|
||||
name: EXTERNALS_VOLUME_NAME,
|
||||
mountPath: '/__e'
|
||||
},
|
||||
{
|
||||
name: WORK_VOLUME,
|
||||
mountPath: '/__w'
|
||||
},
|
||||
{
|
||||
name: GITHUB_VOLUME_NAME,
|
||||
mountPath: '/github'
|
||||
}
|
||||
]
|
||||
|
||||
if (!jobContainer) {
|
||||
return mounts
|
||||
export function prepareJobScript(userVolumeMounts: Mount[]): {
|
||||
containerPath: string
|
||||
runnerPath: string
|
||||
} {
|
||||
let mountDirs = userVolumeMounts.map(m => m.targetVolumePath).join(' ')
|
||||
|
||||
const content = `#!/bin/sh -l
|
||||
set -e
|
||||
cp -R /__w/_temp/_github_home /github/home
|
||||
cp -R /__w/_temp/_github_workflow /github/workflow
|
||||
mkdir -p ${mountDirs}
|
||||
`
|
||||
|
||||
const filename = `${uuidv4()}.sh`
|
||||
const entryPointPath = `${process.env.RUNNER_TEMP}/${filename}`
|
||||
fs.writeFileSync(entryPointPath, content)
|
||||
return {
|
||||
containerPath: `/__w/_temp/${filename}`,
|
||||
runnerPath: entryPointPath
|
||||
}
|
||||
|
||||
mounts.push(
|
||||
{
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: '/__e',
|
||||
subPath: 'externals'
|
||||
},
|
||||
{
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: '/github/home',
|
||||
subPath: '_temp/_github_home'
|
||||
},
|
||||
{
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: '/github/workflow',
|
||||
subPath: '_temp/_github_workflow'
|
||||
}
|
||||
)
|
||||
|
||||
if (!userMountVolumes?.length) {
|
||||
return mounts
|
||||
}
|
||||
|
||||
for (const userVolume of userMountVolumes) {
|
||||
let sourceVolumePath = ''
|
||||
if (path.isAbsolute(userVolume.sourceVolumePath)) {
|
||||
if (!userVolume.sourceVolumePath.startsWith(workspacePath)) {
|
||||
throw new Error(
|
||||
'Volume mounts outside of the work folder are not supported'
|
||||
)
|
||||
}
|
||||
// source volume path should be relative path
|
||||
sourceVolumePath = userVolume.sourceVolumePath.slice(
|
||||
workspacePath.length + 1
|
||||
)
|
||||
} else {
|
||||
sourceVolumePath = userVolume.sourceVolumePath
|
||||
}
|
||||
|
||||
mounts.push({
|
||||
name: POD_VOLUME_NAME,
|
||||
mountPath: userVolume.targetVolumePath,
|
||||
subPath: sourceVolumePath,
|
||||
readOnly: userVolume.readOnly
|
||||
})
|
||||
}
|
||||
|
||||
return mounts
|
||||
}
|
||||
|
||||
export function writeEntryPointScript(
|
||||
export function writeRunScript(
|
||||
workingDirectory: string,
|
||||
entryPoint: string,
|
||||
entryPointArgs?: string[],
|
||||
@@ -118,33 +68,12 @@ export function writeEntryPointScript(
|
||||
typeof prependPath === 'string' ? prependPath : prependPath.join(':')
|
||||
exportPath = `export PATH=${prepend}:$PATH`
|
||||
}
|
||||
let environmentPrefix = ''
|
||||
|
||||
if (environmentVariables && Object.entries(environmentVariables).length) {
|
||||
const envBuffer: string[] = []
|
||||
for (const [key, value] of Object.entries(environmentVariables)) {
|
||||
if (
|
||||
key.includes(`=`) ||
|
||||
key.includes(`'`) ||
|
||||
key.includes(`"`) ||
|
||||
key.includes(`$`)
|
||||
) {
|
||||
throw new Error(
|
||||
`environment key ${key} is invalid - the key must not contain =, $, ', or "`
|
||||
)
|
||||
}
|
||||
envBuffer.push(
|
||||
`"${key}=${value
|
||||
.replace(/\\/g, '\\\\')
|
||||
.replace(/"/g, '\\"')
|
||||
.replace(/\$/g, '\\$')
|
||||
.replace(/`/g, '\\`')}"`
|
||||
)
|
||||
}
|
||||
environmentPrefix = `env ${envBuffer.join(' ')} `
|
||||
}
|
||||
let environmentPrefix = scriptEnv(environmentVariables)
|
||||
|
||||
const content = `#!/bin/sh -l
|
||||
set -e
|
||||
rm "$0" # remove script after running
|
||||
${exportPath}
|
||||
cd ${workingDirectory} && \
|
||||
exec ${environmentPrefix} ${entryPoint} ${
|
||||
@@ -160,9 +89,76 @@ exec ${environmentPrefix} ${entryPoint} ${
|
||||
}
|
||||
}
|
||||
|
||||
export function writeContainerStepScript(
|
||||
dst: string,
|
||||
workingDirectory: string,
|
||||
entryPoint: string,
|
||||
entryPointArgs?: string[],
|
||||
environmentVariables?: { [key: string]: string }
|
||||
): { containerPath: string; runnerPath: string } {
|
||||
let environmentPrefix = scriptEnv(environmentVariables)
|
||||
|
||||
const parts = workingDirectory.split('/').slice(-2)
|
||||
if (parts.length !== 2) {
|
||||
throw new Error(`Invalid working directory: ${workingDirectory}`)
|
||||
}
|
||||
|
||||
const content = `#!/bin/sh -l
|
||||
rm "$0" # remove script after running
|
||||
mv /__w/_temp/_github_home /github/home && \
|
||||
mv /__w/_temp/_github_workflow /github/workflow && \
|
||||
mv /__w/_temp/_runner_file_commands /github/file_commands || true && \
|
||||
mv /__w/${parts.join('/')}/ /github/workspace && \
|
||||
cd /github/workspace && \
|
||||
exec ${environmentPrefix} ${entryPoint} ${
|
||||
entryPointArgs?.length ? entryPointArgs.join(' ') : ''
|
||||
}
|
||||
`
|
||||
const filename = `${uuidv4()}.sh`
|
||||
const entryPointPath = `${dst}/${filename}`
|
||||
core.debug(`Writing container step script to ${entryPointPath}`)
|
||||
fs.writeFileSync(entryPointPath, content)
|
||||
return {
|
||||
containerPath: `/__w/_temp/${filename}`,
|
||||
runnerPath: entryPointPath
|
||||
}
|
||||
}
|
||||
|
||||
function scriptEnv(envs?: { [key: string]: string }): string {
|
||||
if (!envs || !Object.entries(envs).length) {
|
||||
return ''
|
||||
}
|
||||
const envBuffer: string[] = []
|
||||
for (const [key, value] of Object.entries(envs)) {
|
||||
if (
|
||||
key.includes(`=`) ||
|
||||
key.includes(`'`) ||
|
||||
key.includes(`"`) ||
|
||||
key.includes(`$`)
|
||||
) {
|
||||
throw new Error(
|
||||
`environment key ${key} is invalid - the key must not contain =, $, ', or "`
|
||||
)
|
||||
}
|
||||
envBuffer.push(
|
||||
`"${key}=${value
|
||||
.replace(/\\/g, '\\\\')
|
||||
.replace(/"/g, '\\"')
|
||||
.replace(/\$/g, '\\$')
|
||||
.replace(/`/g, '\\`')}"`
|
||||
)
|
||||
}
|
||||
|
||||
if (!envBuffer?.length) {
|
||||
return ''
|
||||
}
|
||||
|
||||
return `env ${envBuffer.join(' ')} `
|
||||
}
|
||||
|
||||
export function generateContainerName(image: string): string {
|
||||
const nameWithTag = image.split('/').pop()
|
||||
const name = nameWithTag?.split(':').at(0)
|
||||
const name = nameWithTag?.split(':')[0]
|
||||
|
||||
if (!name) {
|
||||
throw new Error(`Image definition '${image}' is invalid`)
|
||||
@@ -292,5 +288,18 @@ function mergeLists<T>(base?: T[], from?: T[]): T[] {
|
||||
}
|
||||
|
||||
export function fixArgs(args: string[]): string[] {
|
||||
// Preserve shell command strings passed via `sh -c` without re-tokenizing.
|
||||
// Retokenizing would split the script into multiple args, breaking `sh -c`.
|
||||
if (args.length >= 2 && args[0] === 'sh' && args[1] === '-c') {
|
||||
return args
|
||||
}
|
||||
return shlex.split(args.join(' '))
|
||||
}
|
||||
|
||||
export async function sleep(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms))
|
||||
}
|
||||
|
||||
export function listDirAllCommand(dir: string): string {
|
||||
return `cd ${shlex.quote(dir)} && find . -not -path '*/_runner_hook_responses*' -exec stat -c '%s %n' {} \\;`
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import { cleanupJob, prepareJob } from '../src/hooks'
|
||||
import { RunnerInstanceLabel } from '../src/hooks/constants'
|
||||
import { namespace } from '../src/k8s'
|
||||
import { TestHelper } from './test-setup'
|
||||
import { PrepareJobArgs } from 'hooklib'
|
||||
|
||||
let testHelper: TestHelper
|
||||
|
||||
@@ -14,7 +15,10 @@ describe('Cleanup Job', () => {
|
||||
const prepareJobOutputFilePath = testHelper.createFile(
|
||||
'prepare-job-output.json'
|
||||
)
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
await prepareJob(
|
||||
prepareJobData.args as PrepareJobArgs,
|
||||
prepareJobOutputFilePath
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
@@ -32,16 +36,12 @@ describe('Cleanup Job', () => {
|
||||
kc.loadFromDefault()
|
||||
const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
|
||||
|
||||
const podList = await k8sApi.listNamespacedPod(
|
||||
namespace(),
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
new RunnerInstanceLabel().toString()
|
||||
)
|
||||
const podList = await k8sApi.listNamespacedPod({
|
||||
namespace: namespace(),
|
||||
labelSelector: new RunnerInstanceLabel().toString()
|
||||
})
|
||||
|
||||
expect(podList.body.items.length).toBe(0)
|
||||
expect(podList.items.length).toBe(0)
|
||||
})
|
||||
|
||||
it('should have no runner linked secrets', async () => {
|
||||
@@ -51,15 +51,11 @@ describe('Cleanup Job', () => {
|
||||
kc.loadFromDefault()
|
||||
const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
|
||||
|
||||
const secretList = await k8sApi.listNamespacedSecret(
|
||||
namespace(),
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
undefined,
|
||||
new RunnerInstanceLabel().toString()
|
||||
)
|
||||
const secretList = await k8sApi.listNamespacedSecret({
|
||||
namespace: namespace(),
|
||||
labelSelector: new RunnerInstanceLabel().toString()
|
||||
})
|
||||
|
||||
expect(secretList.body.items.length).toBe(0)
|
||||
expect(secretList.items.length).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
runScriptStep
|
||||
} from '../src/hooks'
|
||||
import { TestHelper } from './test-setup'
|
||||
import { RunContainerStepArgs, RunScriptStepArgs } from 'hooklib'
|
||||
|
||||
jest.useRealTimers()
|
||||
|
||||
@@ -25,6 +26,7 @@ describe('e2e', () => {
|
||||
afterEach(async () => {
|
||||
await testHelper.cleanup()
|
||||
})
|
||||
|
||||
it('should prepare job, run script step, run container step then cleanup without errors', async () => {
|
||||
await expect(
|
||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
@@ -36,13 +38,16 @@ describe('e2e', () => {
|
||||
const prepareJobOutputData = JSON.parse(prepareJobOutputJson.toString())
|
||||
|
||||
await expect(
|
||||
runScriptStep(scriptStepData.args, prepareJobOutputData.state, null)
|
||||
runScriptStep(
|
||||
scriptStepData.args as RunScriptStepArgs,
|
||||
prepareJobOutputData.state
|
||||
)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
const runContainerStepData = testHelper.getRunContainerStepDefinition()
|
||||
|
||||
await expect(
|
||||
runContainerStep(runContainerStepData.args)
|
||||
runContainerStep(runContainerStepData.args as RunContainerStepArgs)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
await expect(cleanupJob()).resolves.not.toThrow()
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import * as fs from 'fs'
|
||||
import { containerPorts, POD_VOLUME_NAME } from '../src/k8s'
|
||||
import { containerPorts } from '../src/k8s'
|
||||
import {
|
||||
containerVolumes,
|
||||
generateContainerName,
|
||||
writeEntryPointScript,
|
||||
writeRunScript,
|
||||
mergePodSpecWithOptions,
|
||||
mergeContainerWithOptions,
|
||||
readExtensionFromFile,
|
||||
@@ -27,91 +26,55 @@ describe('k8s utils', () => {
|
||||
|
||||
it('should not throw', () => {
|
||||
expect(() =>
|
||||
writeEntryPointScript(
|
||||
'/test',
|
||||
'sh',
|
||||
['-e', 'script.sh'],
|
||||
['/prepend/path'],
|
||||
{
|
||||
SOME_ENV: 'SOME_VALUE'
|
||||
}
|
||||
)
|
||||
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
|
||||
SOME_ENV: 'SOME_VALUE'
|
||||
})
|
||||
).not.toThrow()
|
||||
})
|
||||
|
||||
it('should throw if RUNNER_TEMP is not set', () => {
|
||||
delete process.env.RUNNER_TEMP
|
||||
expect(() =>
|
||||
writeEntryPointScript(
|
||||
'/test',
|
||||
'sh',
|
||||
['-e', 'script.sh'],
|
||||
['/prepend/path'],
|
||||
{
|
||||
SOME_ENV: 'SOME_VALUE'
|
||||
}
|
||||
)
|
||||
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
|
||||
SOME_ENV: 'SOME_VALUE'
|
||||
})
|
||||
).toThrow()
|
||||
})
|
||||
|
||||
it('should throw if environment variable name contains double quote', () => {
|
||||
expect(() =>
|
||||
writeEntryPointScript(
|
||||
'/test',
|
||||
'sh',
|
||||
['-e', 'script.sh'],
|
||||
['/prepend/path'],
|
||||
{
|
||||
'SOME"_ENV': 'SOME_VALUE'
|
||||
}
|
||||
)
|
||||
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
|
||||
'SOME"_ENV': 'SOME_VALUE'
|
||||
})
|
||||
).toThrow()
|
||||
})
|
||||
|
||||
it('should throw if environment variable name contains =', () => {
|
||||
expect(() =>
|
||||
writeEntryPointScript(
|
||||
'/test',
|
||||
'sh',
|
||||
['-e', 'script.sh'],
|
||||
['/prepend/path'],
|
||||
{
|
||||
'SOME=ENV': 'SOME_VALUE'
|
||||
}
|
||||
)
|
||||
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
|
||||
'SOME=ENV': 'SOME_VALUE'
|
||||
})
|
||||
).toThrow()
|
||||
})
|
||||
|
||||
it('should throw if environment variable name contains single quote', () => {
|
||||
expect(() =>
|
||||
writeEntryPointScript(
|
||||
'/test',
|
||||
'sh',
|
||||
['-e', 'script.sh'],
|
||||
['/prepend/path'],
|
||||
{
|
||||
"SOME'_ENV": 'SOME_VALUE'
|
||||
}
|
||||
)
|
||||
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
|
||||
"SOME'_ENV": 'SOME_VALUE'
|
||||
})
|
||||
).toThrow()
|
||||
})
|
||||
|
||||
it('should throw if environment variable name contains dollar', () => {
|
||||
expect(() =>
|
||||
writeEntryPointScript(
|
||||
'/test',
|
||||
'sh',
|
||||
['-e', 'script.sh'],
|
||||
['/prepend/path'],
|
||||
{
|
||||
SOME_$_ENV: 'SOME_VALUE'
|
||||
}
|
||||
)
|
||||
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
|
||||
SOME_$_ENV: 'SOME_VALUE'
|
||||
})
|
||||
).toThrow()
|
||||
})
|
||||
|
||||
it('should escape double quote, dollar and backslash in environment variable values', () => {
|
||||
const { runnerPath } = writeEntryPointScript(
|
||||
const { runnerPath } = writeRunScript(
|
||||
'/test',
|
||||
'sh',
|
||||
['-e', 'script.sh'],
|
||||
@@ -130,7 +93,7 @@ describe('k8s utils', () => {
|
||||
})
|
||||
|
||||
it('should return object with containerPath and runnerPath', () => {
|
||||
const { containerPath, runnerPath } = writeEntryPointScript(
|
||||
const { containerPath, runnerPath } = writeRunScript(
|
||||
'/test',
|
||||
'sh',
|
||||
['-e', 'script.sh'],
|
||||
@@ -145,7 +108,7 @@ describe('k8s utils', () => {
|
||||
})
|
||||
|
||||
it('should write entrypoint path and the file should exist', () => {
|
||||
const { runnerPath } = writeEntryPointScript(
|
||||
const { runnerPath } = writeRunScript(
|
||||
'/test',
|
||||
'sh',
|
||||
['-e', 'script.sh'],
|
||||
@@ -168,88 +131,6 @@ describe('k8s utils', () => {
|
||||
await testHelper.cleanup()
|
||||
})
|
||||
|
||||
it('should throw if container action and GITHUB_WORKSPACE env is not set', () => {
|
||||
delete process.env.GITHUB_WORKSPACE
|
||||
expect(() => containerVolumes([], true, true)).toThrow()
|
||||
expect(() => containerVolumes([], false, true)).toThrow()
|
||||
})
|
||||
|
||||
it('should always have work mount', () => {
|
||||
let volumes = containerVolumes([], true, true)
|
||||
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
|
||||
volumes = containerVolumes([], true, false)
|
||||
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
|
||||
volumes = containerVolumes([], false, true)
|
||||
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
|
||||
volumes = containerVolumes([], false, false)
|
||||
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should always have /github/workflow mount if working on container job or container action', () => {
|
||||
let volumes = containerVolumes([], true, true)
|
||||
expect(volumes.find(e => e.mountPath === '/github/workflow')).toBeTruthy()
|
||||
volumes = containerVolumes([], true, false)
|
||||
expect(volumes.find(e => e.mountPath === '/github/workflow')).toBeTruthy()
|
||||
volumes = containerVolumes([], false, true)
|
||||
expect(volumes.find(e => e.mountPath === '/github/workflow')).toBeTruthy()
|
||||
|
||||
volumes = containerVolumes([], false, false)
|
||||
expect(
|
||||
volumes.find(e => e.mountPath === '/github/workflow')
|
||||
).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should have container action volumes', () => {
|
||||
let volumes = containerVolumes([], true, true)
|
||||
let workspace = volumes.find(e => e.mountPath === '/github/workspace')
|
||||
let fileCommands = volumes.find(
|
||||
e => e.mountPath === '/github/file_commands'
|
||||
)
|
||||
expect(workspace).toBeTruthy()
|
||||
expect(workspace?.subPath).toBe('repo/repo')
|
||||
expect(fileCommands).toBeTruthy()
|
||||
expect(fileCommands?.subPath).toBe('_temp/_runner_file_commands')
|
||||
|
||||
volumes = containerVolumes([], false, true)
|
||||
workspace = volumes.find(e => e.mountPath === '/github/workspace')
|
||||
fileCommands = volumes.find(e => e.mountPath === '/github/file_commands')
|
||||
expect(workspace).toBeTruthy()
|
||||
expect(workspace?.subPath).toBe('repo/repo')
|
||||
expect(fileCommands).toBeTruthy()
|
||||
expect(fileCommands?.subPath).toBe('_temp/_runner_file_commands')
|
||||
})
|
||||
|
||||
it('should have externals, github home mounts if job container', () => {
|
||||
const volumes = containerVolumes()
|
||||
expect(volumes.find(e => e.mountPath === '/__e')).toBeTruthy()
|
||||
expect(volumes.find(e => e.mountPath === '/github/home')).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should throw if user volume source volume path is not in workspace', () => {
|
||||
expect(() =>
|
||||
containerVolumes(
|
||||
[
|
||||
{
|
||||
sourceVolumePath: '/outside/of/workdir'
|
||||
}
|
||||
],
|
||||
true,
|
||||
false
|
||||
)
|
||||
).toThrow()
|
||||
})
|
||||
|
||||
it(`all volumes should have name ${POD_VOLUME_NAME}`, () => {
|
||||
let volumes = containerVolumes([], true, true)
|
||||
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
|
||||
volumes = containerVolumes([], true, false)
|
||||
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
|
||||
volumes = containerVolumes([], false, true)
|
||||
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
|
||||
volumes = containerVolumes([], false, false)
|
||||
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should parse container ports', () => {
|
||||
const tt = [
|
||||
{
|
||||
@@ -394,7 +275,7 @@ metadata:
|
||||
spec:
|
||||
containers:
|
||||
- name: test
|
||||
image: node:14.16
|
||||
image: node:22
|
||||
- name: job
|
||||
image: ubuntu:latest`
|
||||
)
|
||||
@@ -407,7 +288,7 @@ spec:
|
||||
|
||||
it('should merge container spec', () => {
|
||||
const base = {
|
||||
image: 'node:14.16',
|
||||
image: 'node:22',
|
||||
name: 'test',
|
||||
env: [
|
||||
{
|
||||
@@ -462,7 +343,7 @@ spec:
|
||||
const base = {
|
||||
containers: [
|
||||
{
|
||||
image: 'node:14.16',
|
||||
image: 'node:22',
|
||||
name: 'test',
|
||||
env: [
|
||||
{
|
||||
|
||||
@@ -3,15 +3,9 @@ import * as path from 'path'
|
||||
import { cleanupJob } from '../src/hooks'
|
||||
import { createContainerSpec, prepareJob } from '../src/hooks/prepare-job'
|
||||
import { TestHelper } from './test-setup'
|
||||
import {
|
||||
ENV_HOOK_TEMPLATE_PATH,
|
||||
ENV_USE_KUBE_SCHEDULER,
|
||||
generateContainerName,
|
||||
readExtensionFromFile
|
||||
} from '../src/k8s/utils'
|
||||
import { getPodByName } from '../src/k8s'
|
||||
import { ENV_HOOK_TEMPLATE_PATH, generateContainerName } from '../src/k8s/utils'
|
||||
import { execPodStep, getPodByName } from '../src/k8s'
|
||||
import { V1Container } from '@kubernetes/client-node'
|
||||
import * as yaml from 'js-yaml'
|
||||
import { JOB_CONTAINER_NAME } from '../src/hooks/constants'
|
||||
|
||||
jest.useRealTimers()
|
||||
@@ -47,32 +41,82 @@ describe('Prepare job', () => {
|
||||
})
|
||||
|
||||
it('should prepare job with absolute path for userVolumeMount', async () => {
|
||||
const userVolumeMount = path.join(
|
||||
process.env.GITHUB_WORKSPACE as string,
|
||||
'myvolume'
|
||||
)
|
||||
fs.mkdirSync(userVolumeMount, { recursive: true })
|
||||
fs.writeFileSync(path.join(userVolumeMount, 'file.txt'), 'hello')
|
||||
prepareJobData.args.container.userMountVolumes = [
|
||||
{
|
||||
sourceVolumePath: path.join(
|
||||
process.env.GITHUB_WORKSPACE as string,
|
||||
'/myvolume'
|
||||
),
|
||||
targetVolumePath: '/volume_mount',
|
||||
sourceVolumePath: userVolumeMount,
|
||||
targetVolumePath: '/__w/myvolume',
|
||||
readOnly: false
|
||||
}
|
||||
]
|
||||
await expect(
|
||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
const content = JSON.parse(
|
||||
fs.readFileSync(prepareJobOutputFilePath).toString()
|
||||
)
|
||||
|
||||
await execPodStep(
|
||||
['sh', '-c', '[ "$(cat /__w/myvolume/file.txt)" = "hello" ] || exit 5'],
|
||||
content!.state!.jobPod,
|
||||
JOB_CONTAINER_NAME
|
||||
).then(output => {
|
||||
expect(output).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
it('should throw an exception if the user volume mount is absolute path outside of GITHUB_WORKSPACE', async () => {
|
||||
prepareJobData.args.container.userMountVolumes = [
|
||||
{
|
||||
sourceVolumePath: '/somewhere/not/in/gh-workspace',
|
||||
targetVolumePath: '/containermount',
|
||||
readOnly: false
|
||||
}
|
||||
]
|
||||
await expect(
|
||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
).rejects.toThrow()
|
||||
it('should prepare job with envs CI and GITHUB_ACTIONS', async () => {
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
|
||||
const content = JSON.parse(
|
||||
fs.readFileSync(prepareJobOutputFilePath).toString()
|
||||
)
|
||||
|
||||
const got = await getPodByName(content.state.jobPod)
|
||||
expect(got.spec?.containers[0].env).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ name: 'CI', value: 'true' },
|
||||
{ name: 'GITHUB_ACTIONS', value: 'true' }
|
||||
])
|
||||
)
|
||||
expect(got.spec?.containers[1].env).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ name: 'CI', value: 'true' },
|
||||
{ name: 'GITHUB_ACTIONS', value: 'true' }
|
||||
])
|
||||
)
|
||||
})
|
||||
|
||||
it('should not override CI env var if already set', async () => {
|
||||
prepareJobData.args.container.environmentVariables = {
|
||||
CI: 'false'
|
||||
}
|
||||
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
|
||||
const content = JSON.parse(
|
||||
fs.readFileSync(prepareJobOutputFilePath).toString()
|
||||
)
|
||||
|
||||
const got = await getPodByName(content.state.jobPod)
|
||||
expect(got.spec?.containers[0].env).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ name: 'CI', value: 'false' },
|
||||
{ name: 'GITHUB_ACTIONS', value: 'true' }
|
||||
])
|
||||
)
|
||||
expect(got.spec?.containers[1].env).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ name: 'CI', value: 'true' },
|
||||
{ name: 'GITHUB_ACTIONS', value: 'true' }
|
||||
])
|
||||
)
|
||||
})
|
||||
|
||||
it('should not run prepare job without the job container', async () => {
|
||||
@@ -120,12 +164,11 @@ describe('Prepare job', () => {
|
||||
|
||||
expect(got.metadata?.annotations?.['annotated-by']).toBe('extension')
|
||||
expect(got.metadata?.labels?.['labeled-by']).toBe('extension')
|
||||
expect(got.spec?.securityContext?.runAsUser).toBe(1000)
|
||||
expect(got.spec?.securityContext?.runAsGroup).toBe(3000)
|
||||
expect(got.spec?.restartPolicy).toBe('Never')
|
||||
|
||||
// job container
|
||||
expect(got.spec?.containers[0].name).toBe(JOB_CONTAINER_NAME)
|
||||
expect(got.spec?.containers[0].image).toBe('node:14.16')
|
||||
expect(got.spec?.containers[0].image).toBe('node:22')
|
||||
expect(got.spec?.containers[0].command).toEqual(['sh'])
|
||||
expect(got.spec?.containers[0].args).toEqual(['-c', 'sleep 50'])
|
||||
|
||||
@@ -133,9 +176,13 @@ describe('Prepare job', () => {
|
||||
expect(got.spec?.containers[1].image).toBe('redis')
|
||||
expect(got.spec?.containers[1].command).toBeFalsy()
|
||||
expect(got.spec?.containers[1].args).toBeFalsy()
|
||||
expect(got.spec?.containers[1].env).toEqual([
|
||||
{ name: 'ENV2', value: 'value2' }
|
||||
])
|
||||
expect(got.spec?.containers[1].env).toEqual(
|
||||
expect.arrayContaining([
|
||||
{ name: 'CI', value: 'true' },
|
||||
{ name: 'GITHUB_ACTIONS', value: 'true' },
|
||||
{ name: 'ENV2', value: 'value2' }
|
||||
])
|
||||
)
|
||||
expect(got.spec?.containers[1].resources).toEqual({
|
||||
requests: { memory: '1Mi', cpu: '1' },
|
||||
limits: { memory: '1Gi', cpu: '2' }
|
||||
@@ -147,15 +194,24 @@ describe('Prepare job', () => {
|
||||
expect(got.spec?.containers[2].args).toEqual(['-c', 'sleep 60'])
|
||||
})
|
||||
|
||||
it('should not throw exception using kube scheduler', async () => {
|
||||
// only for ReadWriteMany volumes or single node cluster
|
||||
process.env[ENV_USE_KUBE_SCHEDULER] = 'true'
|
||||
it('should put only job and services in output context file', async () => {
|
||||
process.env[ENV_HOOK_TEMPLATE_PATH] = path.join(
|
||||
__dirname,
|
||||
'../../../examples/extension.yaml'
|
||||
)
|
||||
|
||||
await expect(
|
||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
delete process.env[ENV_USE_KUBE_SCHEDULER]
|
||||
const content = JSON.parse(
|
||||
fs.readFileSync(prepareJobOutputFilePath).toString()
|
||||
)
|
||||
|
||||
expect(content.state.jobPod).toBeTruthy()
|
||||
expect(content.context.container).toBeTruthy()
|
||||
expect(content.context.services).toBeTruthy()
|
||||
expect(content.context.services.length).toBe(1)
|
||||
})
|
||||
|
||||
test.each([undefined, null, []])(
|
||||
@@ -171,4 +227,20 @@ describe('Prepare job', () => {
|
||||
expect(() => content.context.services[0].image).not.toThrow()
|
||||
}
|
||||
)
|
||||
|
||||
it('should prepare job with container with non-root user', async () => {
|
||||
prepareJobData.args!.container!.image =
|
||||
'ghcr.io/actions/actions-runner:latest' // known to use user 1001
|
||||
await expect(
|
||||
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
).resolves.not.toThrow()
|
||||
|
||||
const content = JSON.parse(
|
||||
fs.readFileSync(prepareJobOutputFilePath).toString()
|
||||
)
|
||||
expect(content.state.jobPod).toBeTruthy()
|
||||
expect(content.context.container.image).toBe(
|
||||
'ghcr.io/actions/actions-runner:latest'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { runContainerStep } from '../src/hooks'
|
||||
import { prepareJob, runContainerStep } from '../src/hooks'
|
||||
import { TestHelper } from './test-setup'
|
||||
import { ENV_HOOK_TEMPLATE_PATH } from '../src/k8s/utils'
|
||||
import * as fs from 'fs'
|
||||
@@ -10,11 +10,16 @@ jest.useRealTimers()
|
||||
let testHelper: TestHelper
|
||||
|
||||
let runContainerStepData: any
|
||||
let prepareJobData: any
|
||||
let prepareJobOutputFilePath: string
|
||||
|
||||
describe('Run container step', () => {
|
||||
beforeEach(async () => {
|
||||
testHelper = new TestHelper()
|
||||
await testHelper.initialize()
|
||||
prepareJobData = testHelper.getPrepareJobDefinition()
|
||||
prepareJobOutputFilePath = testHelper.createFile('prepare-job-output.json')
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
runContainerStepData = testHelper.getRunContainerStepDefinition()
|
||||
})
|
||||
|
||||
@@ -22,11 +27,6 @@ describe('Run container step', () => {
|
||||
await testHelper.cleanup()
|
||||
})
|
||||
|
||||
it('should not throw', async () => {
|
||||
const exitCode = await runContainerStep(runContainerStepData.args)
|
||||
expect(exitCode).toBe(0)
|
||||
})
|
||||
|
||||
it('should run pod with extensions applied', async () => {
|
||||
const extension = {
|
||||
metadata: {
|
||||
@@ -42,7 +42,7 @@ describe('Run container step', () => {
|
||||
{
|
||||
name: JOB_CONTAINER_EXTENSION_NAME,
|
||||
command: ['sh'],
|
||||
args: ['-c', 'echo test']
|
||||
args: ['-c', 'sleep 10000']
|
||||
},
|
||||
{
|
||||
name: 'side-container',
|
||||
@@ -51,11 +51,7 @@ describe('Run container step', () => {
|
||||
args: ['-c', 'echo test']
|
||||
}
|
||||
],
|
||||
restartPolicy: 'Never',
|
||||
securityContext: {
|
||||
runAsUser: 1000,
|
||||
runAsGroup: 3000
|
||||
}
|
||||
restartPolicy: 'Never'
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,4 +74,15 @@ describe('Run container step', () => {
|
||||
runContainerStep(runContainerStepData.args)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
|
||||
it('should run container step with envs CI and GITHUB_ACTIONS', async () => {
|
||||
runContainerStepData.args.entryPoint = 'bash'
|
||||
runContainerStepData.args.entryPointArgs = [
|
||||
'-c',
|
||||
"'if [[ -z $GITHUB_ACTIONS ]] || [[ -z $CI ]]; then exit 1; fi'"
|
||||
]
|
||||
await expect(
|
||||
runContainerStep(runContainerStepData.args)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as fs from 'fs'
|
||||
import { cleanupJob, prepareJob, runScriptStep } from '../src/hooks'
|
||||
import { TestHelper } from './test-setup'
|
||||
import { PrepareJobArgs, RunScriptStepArgs } from 'hooklib'
|
||||
|
||||
jest.useRealTimers()
|
||||
|
||||
@@ -8,7 +9,9 @@ let testHelper: TestHelper
|
||||
|
||||
let prepareJobOutputData: any
|
||||
|
||||
let runScriptStepDefinition
|
||||
let runScriptStepDefinition: {
|
||||
args: RunScriptStepArgs
|
||||
}
|
||||
|
||||
describe('Run script step', () => {
|
||||
beforeEach(async () => {
|
||||
@@ -19,9 +22,14 @@ describe('Run script step', () => {
|
||||
)
|
||||
|
||||
const prepareJobData = testHelper.getPrepareJobDefinition()
|
||||
runScriptStepDefinition = testHelper.getRunScriptStepDefinition()
|
||||
runScriptStepDefinition = testHelper.getRunScriptStepDefinition() as {
|
||||
args: RunScriptStepArgs
|
||||
}
|
||||
|
||||
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
|
||||
await prepareJob(
|
||||
prepareJobData.args as PrepareJobArgs,
|
||||
prepareJobOutputFilePath
|
||||
)
|
||||
const outputContent = fs.readFileSync(prepareJobOutputFilePath)
|
||||
prepareJobOutputData = JSON.parse(outputContent.toString())
|
||||
})
|
||||
@@ -37,22 +45,14 @@ describe('Run script step', () => {
|
||||
|
||||
it('should not throw an exception', async () => {
|
||||
await expect(
|
||||
runScriptStep(
|
||||
runScriptStepDefinition.args,
|
||||
prepareJobOutputData.state,
|
||||
null
|
||||
)
|
||||
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
|
||||
it('should fail if the working directory does not exist', async () => {
|
||||
runScriptStepDefinition.args.workingDirectory = '/foo/bar'
|
||||
await expect(
|
||||
runScriptStep(
|
||||
runScriptStepDefinition.args,
|
||||
prepareJobOutputData.state,
|
||||
null
|
||||
)
|
||||
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
|
||||
).rejects.toThrow()
|
||||
})
|
||||
|
||||
@@ -64,16 +64,12 @@ describe('Run script step', () => {
|
||||
"'if [[ -z $NODE_ENV ]]; then exit 1; fi'"
|
||||
]
|
||||
await expect(
|
||||
runScriptStep(
|
||||
runScriptStepDefinition.args,
|
||||
prepareJobOutputData.state,
|
||||
null
|
||||
)
|
||||
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
|
||||
it('Should have path variable changed in container with prepend path string', async () => {
|
||||
runScriptStepDefinition.args.prependPath = '/some/path'
|
||||
runScriptStepDefinition.args.prependPath = ['/some/path']
|
||||
runScriptStepDefinition.args.entryPoint = '/bin/bash'
|
||||
runScriptStepDefinition.args.entryPointArgs = [
|
||||
'-c',
|
||||
@@ -81,11 +77,7 @@ describe('Run script step', () => {
|
||||
]
|
||||
|
||||
await expect(
|
||||
runScriptStep(
|
||||
runScriptStepDefinition.args,
|
||||
prepareJobOutputData.state,
|
||||
null
|
||||
)
|
||||
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
|
||||
@@ -103,11 +95,7 @@ describe('Run script step', () => {
|
||||
]
|
||||
|
||||
await expect(
|
||||
runScriptStep(
|
||||
runScriptStepDefinition.args,
|
||||
prepareJobOutputData.state,
|
||||
null
|
||||
)
|
||||
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
|
||||
@@ -122,11 +110,7 @@ describe('Run script step', () => {
|
||||
]
|
||||
|
||||
await expect(
|
||||
runScriptStep(
|
||||
runScriptStepDefinition.args,
|
||||
prepareJobOutputData.state,
|
||||
null
|
||||
)
|
||||
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
|
||||
).resolves.not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -9,87 +9,97 @@ const kc = new k8s.KubeConfig()
|
||||
kc.loadFromDefault()
|
||||
|
||||
const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
|
||||
const k8sStorageApi = kc.makeApiClient(k8s.StorageV1Api)
|
||||
|
||||
export class TestHelper {
|
||||
private tempDirPath: string
|
||||
private podName: string
|
||||
private runnerWorkdir: string
|
||||
private runnerTemp: string
|
||||
|
||||
constructor() {
|
||||
this.tempDirPath = `${__dirname}/_temp/runner`
|
||||
this.runnerWorkdir = `${this.tempDirPath}/_work`
|
||||
this.runnerTemp = `${this.tempDirPath}/_work/_temp`
|
||||
this.podName = uuidv4().replace(/-/g, '')
|
||||
}
|
||||
|
||||
public async initialize(): Promise<void> {
|
||||
async initialize(): Promise<void> {
|
||||
process.env['ACTIONS_RUNNER_POD_NAME'] = `${this.podName}`
|
||||
process.env['RUNNER_WORKSPACE'] = `${this.tempDirPath}/_work/repo`
|
||||
process.env['RUNNER_TEMP'] = `${this.tempDirPath}/_work/_temp`
|
||||
process.env['GITHUB_WORKSPACE'] = `${this.tempDirPath}/_work/repo/repo`
|
||||
process.env['RUNNER_WORKSPACE'] = `${this.runnerWorkdir}/repo`
|
||||
process.env['RUNNER_TEMP'] = `${this.runnerTemp}`
|
||||
process.env['GITHUB_WORKSPACE'] = `${this.runnerWorkdir}/repo/repo`
|
||||
process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE'] = 'default'
|
||||
|
||||
fs.mkdirSync(`${this.tempDirPath}/_work/repo/repo`, { recursive: true })
|
||||
fs.mkdirSync(`${this.runnerWorkdir}/repo/repo`, { recursive: true })
|
||||
fs.mkdirSync(`${this.tempDirPath}/externals`, { recursive: true })
|
||||
fs.mkdirSync(process.env.RUNNER_TEMP, { recursive: true })
|
||||
fs.mkdirSync(this.runnerTemp, { recursive: true })
|
||||
fs.mkdirSync(`${this.runnerTemp}/_github_workflow`, { recursive: true })
|
||||
fs.mkdirSync(`${this.runnerTemp}/_github_home`, { recursive: true })
|
||||
fs.mkdirSync(`${this.runnerTemp}/_runner_file_commands`, {
|
||||
recursive: true
|
||||
})
|
||||
|
||||
fs.copyFileSync(
|
||||
path.resolve(`${__dirname}/../../../examples/example-script.sh`),
|
||||
`${process.env.RUNNER_TEMP}/example-script.sh`
|
||||
`${this.runnerTemp}/example-script.sh`
|
||||
)
|
||||
|
||||
await this.cleanupK8sResources()
|
||||
try {
|
||||
await this.createTestVolume()
|
||||
await this.createTestJobPod()
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
}
|
||||
|
||||
public async cleanup(): Promise<void> {
|
||||
async cleanup(): Promise<void> {
|
||||
try {
|
||||
await this.cleanupK8sResources()
|
||||
fs.rmSync(this.tempDirPath, { recursive: true })
|
||||
} catch {}
|
||||
} catch {
|
||||
// Ignore errors during cleanup
|
||||
}
|
||||
}
|
||||
public async cleanupK8sResources() {
|
||||
|
||||
async cleanupK8sResources(): Promise<void> {
|
||||
await k8sApi
|
||||
.deleteNamespacedPersistentVolumeClaim(
|
||||
`${this.podName}-work`,
|
||||
'default',
|
||||
undefined,
|
||||
undefined,
|
||||
0
|
||||
)
|
||||
.catch(e => {})
|
||||
await k8sApi.deletePersistentVolume(`${this.podName}-pv`).catch(e => {})
|
||||
await k8sStorageApi.deleteStorageClass('local-storage').catch(e => {})
|
||||
.deleteNamespacedPod({
|
||||
name: this.podName,
|
||||
namespace: 'default',
|
||||
gracePeriodSeconds: 0
|
||||
})
|
||||
.catch((e: k8s.ApiException<any>) => {
|
||||
if (e.code !== 404) {
|
||||
console.error(JSON.stringify(e))
|
||||
}
|
||||
})
|
||||
await k8sApi
|
||||
.deleteNamespacedPod(this.podName, 'default', undefined, undefined, 0)
|
||||
.catch(e => {})
|
||||
await k8sApi
|
||||
.deleteNamespacedPod(
|
||||
`${this.podName}-workflow`,
|
||||
'default',
|
||||
undefined,
|
||||
undefined,
|
||||
0
|
||||
)
|
||||
.catch(e => {})
|
||||
.deleteNamespacedPod({
|
||||
name: `${this.podName}-workflow`,
|
||||
namespace: 'default',
|
||||
gracePeriodSeconds: 0
|
||||
})
|
||||
.catch((e: k8s.ApiException<any>) => {
|
||||
if (e.code !== 404) {
|
||||
console.error(JSON.stringify(e))
|
||||
}
|
||||
})
|
||||
}
|
||||
public createFile(fileName?: string): string {
|
||||
createFile(fileName?: string): string {
|
||||
const filePath = `${this.tempDirPath}/${fileName || uuidv4()}`
|
||||
fs.writeFileSync(filePath, '')
|
||||
return filePath
|
||||
}
|
||||
|
||||
public removeFile(fileName: string): void {
|
||||
removeFile(fileName: string): void {
|
||||
const filePath = `${this.tempDirPath}/${fileName}`
|
||||
fs.rmSync(filePath)
|
||||
}
|
||||
|
||||
public async createTestJobPod() {
|
||||
async createTestJobPod(): Promise<void> {
|
||||
const container = {
|
||||
name: 'nginx',
|
||||
image: 'nginx:latest',
|
||||
name: 'runner',
|
||||
image: 'ghcr.io/actions/actions-runner:latest',
|
||||
imagePullPolicy: 'IfNotPresent'
|
||||
} as k8s.V1Container
|
||||
|
||||
@@ -99,59 +109,18 @@ export class TestHelper {
|
||||
},
|
||||
spec: {
|
||||
restartPolicy: 'Never',
|
||||
containers: [container]
|
||||
containers: [container],
|
||||
securityContext: {
|
||||
runAsUser: 1001,
|
||||
runAsGroup: 1001,
|
||||
fsGroup: 1001
|
||||
}
|
||||
}
|
||||
} as k8s.V1Pod
|
||||
await k8sApi.createNamespacedPod('default', pod)
|
||||
await k8sApi.createNamespacedPod({ namespace: 'default', body: pod })
|
||||
}
|
||||
|
||||
public async createTestVolume() {
|
||||
var sc: k8s.V1StorageClass = {
|
||||
metadata: {
|
||||
name: 'local-storage'
|
||||
},
|
||||
provisioner: 'kubernetes.io/no-provisioner',
|
||||
volumeBindingMode: 'Immediate'
|
||||
}
|
||||
await k8sStorageApi.createStorageClass(sc)
|
||||
|
||||
var volume: k8s.V1PersistentVolume = {
|
||||
metadata: {
|
||||
name: `${this.podName}-pv`
|
||||
},
|
||||
spec: {
|
||||
storageClassName: 'local-storage',
|
||||
capacity: {
|
||||
storage: '2Gi'
|
||||
},
|
||||
volumeMode: 'Filesystem',
|
||||
accessModes: ['ReadWriteOnce'],
|
||||
hostPath: {
|
||||
path: `${this.tempDirPath}/_work`
|
||||
}
|
||||
}
|
||||
}
|
||||
await k8sApi.createPersistentVolume(volume)
|
||||
var volumeClaim: k8s.V1PersistentVolumeClaim = {
|
||||
metadata: {
|
||||
name: `${this.podName}-work`
|
||||
},
|
||||
spec: {
|
||||
accessModes: ['ReadWriteOnce'],
|
||||
volumeMode: 'Filesystem',
|
||||
storageClassName: 'local-storage',
|
||||
volumeName: `${this.podName}-pv`,
|
||||
resources: {
|
||||
requests: {
|
||||
storage: '1Gi'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
await k8sApi.createNamespacedPersistentVolumeClaim('default', volumeClaim)
|
||||
}
|
||||
|
||||
public getPrepareJobDefinition(): HookData {
|
||||
getPrepareJobDefinition(): HookData {
|
||||
const prepareJob = JSON.parse(
|
||||
fs.readFileSync(
|
||||
path.resolve(__dirname + '/../../../examples/prepare-job.json'),
|
||||
@@ -168,7 +137,7 @@ export class TestHelper {
|
||||
return prepareJob
|
||||
}
|
||||
|
||||
public getRunScriptStepDefinition(): HookData {
|
||||
getRunScriptStepDefinition(): HookData {
|
||||
const runScriptStep = JSON.parse(
|
||||
fs.readFileSync(
|
||||
path.resolve(__dirname + '/../../../examples/run-script-step.json'),
|
||||
@@ -180,7 +149,7 @@ export class TestHelper {
|
||||
return runScriptStep
|
||||
}
|
||||
|
||||
public getRunContainerStepDefinition(): HookData {
|
||||
getRunContainerStepDefinition(): HookData {
|
||||
const runContainerStep = JSON.parse(
|
||||
fs.readFileSync(
|
||||
path.resolve(__dirname + '/../../../examples/run-container-step.json'),
|
||||
|
||||
@@ -5,7 +5,8 @@
|
||||
"outDir": "./lib",
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
|
||||
"include": [
|
||||
"./src"
|
||||
"src/**/*",
|
||||
]
|
||||
}
|
||||
}
|
||||
6
packages/k8s/tsconfig.test.json
Normal file
6
packages/k8s/tsconfig.test.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"allowJs": true
|
||||
},
|
||||
"extends": "./tsconfig.json"
|
||||
}
|
||||
@@ -1,16 +1,15 @@
|
||||
## Features
|
||||
|
||||
- Extend the hook extension to allow service container modifications [#134]
|
||||
- k8s: remove dependency on the runner's volume [#244]
|
||||
|
||||
## Bugs
|
||||
|
||||
- Mount /github/workflow to docker action pods [#137]
|
||||
- Read logs from failed job container [#135]
|
||||
- docker: fix readOnly volumes in createContainer [#236]
|
||||
|
||||
## Misc
|
||||
|
||||
- Bump jose from 4.11.4 to 4.15.5 [#142]
|
||||
- ADR-0134 - superseding ADR-0096 [#136]
|
||||
- bump all dependencies [#234] [#240] [#239] [#238]
|
||||
- bump actions [#254]
|
||||
|
||||
## SHA-256 Checksums
|
||||
|
||||
|
||||
Reference in New Issue
Block a user