Compare commits

..

7 Commits

Author SHA1 Message Date
Nikola Jokic
70841b6972 bump docker dep as well 2025-04-16 14:43:46 +02:00
Nikola Jokic
1c2ae5d20a rework 2025-04-16 14:25:04 +02:00
Nikola Jokic
928f63d88a exclude eslint.config.js 2025-04-16 13:42:50 +02:00
Nikola Jokic
3bda7ef21e wip 2025-04-16 10:26:22 +02:00
Nikola Jokic
3b0e87c9a7 fmt 2025-04-15 14:53:53 +02:00
Nikola Jokic
a7349e7d70 fix errors and bump client node to stable version 2025-04-15 14:53:21 +02:00
Nikola Jokic
ff583c8917 bump all dependencies 2025-04-15 14:29:29 +02:00
46 changed files with 4291 additions and 9610 deletions

View File

@@ -1,28 +0,0 @@
version: 2
updates:
# Group updates into a single PR per workspace package
- package-ecosystem: npm
directory: "/packages/docker"
schedule:
interval: weekly
groups:
all-dependencies:
patterns:
- "*"
- package-ecosystem: npm
directory: "/packages/hooklib"
schedule:
interval: weekly
groups:
all-dependencies:
patterns:
- "*"
- package-ecosystem: npm
directory: "/packages/k8s"
schedule:
interval: weekly
groups:
all-dependencies:
patterns:
- "*"

View File

@@ -6,50 +6,14 @@ on:
paths-ignore: paths-ignore:
- '**.md' - '**.md'
workflow_dispatch: workflow_dispatch:
jobs: jobs:
format-and-lint: build:
name: Format & Lint Checks
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v4
- run: npm install
name: Install dependencies
- run: npm run bootstrap
name: Bootstrap the packages
- run: npm run build-all
name: Build packages
- run: npm run format-check
name: Check formatting
- name: Check linter
run: |
npm run lint
git diff --exit-code -- . ':!packages/k8s/tests/test-kind.yaml'
docker-tests:
name: Docker Hook Tests
runs-on: ubuntu-latest
needs: format-and-lint
steps:
- uses: actions/checkout@v5
- run: npm install
name: Install dependencies
- run: npm run bootstrap
name: Bootstrap the packages
- run: npm run build-all
name: Build packages
- name: Run Docker tests
run: npm run test --prefix packages/docker
k8s-tests:
name: Kubernetes Hook Tests
runs-on: ubuntu-latest
needs: format-and-lint
steps:
- uses: actions/checkout@v5
- run: sed -i "s|{{PATHTOREPO}}|$(pwd)|" packages/k8s/tests/test-kind.yaml - run: sed -i "s|{{PATHTOREPO}}|$(pwd)|" packages/k8s/tests/test-kind.yaml
name: Setup kind cluster yaml config name: Setup kind cluster yaml config
- uses: helm/kind-action@v1.12.0 - uses: helm/kind-action@v1.2.0
with: with:
config: packages/k8s/tests/test-kind.yaml config: packages/k8s/tests/test-kind.yaml
- run: npm install - run: npm install
@@ -58,5 +22,10 @@ jobs:
name: Bootstrap the packages name: Bootstrap the packages
- run: npm run build-all - run: npm run build-all
name: Build packages name: Build packages
- name: Run Kubernetes tests - run: npm run format-check
run: npm run test --prefix packages/k8s - name: Check linter
run: |
npm run lint
git diff --exit-code -- ':!packages/k8s/tests/test-kind.yaml'
- name: Run tests
run: npm run test

View File

@@ -38,7 +38,7 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v5 uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL

View File

@@ -10,7 +10,7 @@ jobs:
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v4
- name: Install dependencies - name: Install dependencies
run: npm install run: npm install
@@ -21,7 +21,7 @@ jobs:
- name: Build packages - name: Build packages
run: npm run build-all run: npm run build-all
- uses: actions/github-script@v8 - uses: actions/github-script@v7
id: releaseVersion id: releaseVersion
with: with:
result-encoding: string result-encoding: string
@@ -47,7 +47,7 @@ jobs:
- name: Create release notes - name: Create release notes
id: releaseNotes id: releaseNotes
uses: actions/github-script@v8 uses: actions/github-script@v7
with: with:
script: | script: |
const fs = require('fs'); const fs = require('fs');

View File

@@ -1 +1 @@
* @actions/actions-compute @nikola-jokic * @actions/actions-launch

View File

@@ -3,24 +3,6 @@ The Runner Container Hooks repo provides a set of packages that implement the co
More information on how to implement your own hooks can be found in the [adr](https://github.com/actions/runner/pull/1891). The `examples` folder provides example inputs for each hook. More information on how to implement your own hooks can be found in the [adr](https://github.com/actions/runner/pull/1891). The `examples` folder provides example inputs for each hook.
### Note
Thank you for your interest in this GitHub action, however, right now we are not taking contributions.
We continue to focus our resources on strategic areas that help our customers be successful while making developers' lives easier. While GitHub Actions remains a key part of this vision, we are allocating resources towards other areas of Actions and are not taking contributions to this repository at this time. The GitHub public roadmap is the best place to follow along for any updates on features were working on and what stage theyre in.
We are taking the following steps to better direct requests related to GitHub Actions, including:
1. We will be directing questions and support requests to our [Community Discussions area](https://github.com/orgs/community/discussions/categories/actions)
2. High Priority bugs can be reported through Community Discussions or you can report these to our support team https://support.github.com/contact/bug-report.
3. Security Issues should be handled as per our [security.md](security.md)
We will still provide security updates for this project and fix major breaking changes during this time.
You are welcome to still raise bugs in this repo.
## Background ## Background
Three projects are included in the `packages` folder Three projects are included in the `packages` folder
@@ -28,6 +10,10 @@ Three projects are included in the `packages` folder
- docker: A hook implementation of the runner's docker implementation. More details can be found in the [readme](./packages/docker/README.md) - docker: A hook implementation of the runner's docker implementation. More details can be found in the [readme](./packages/docker/README.md)
- hooklib: a shared library which contains typescript definitions and utilities that the other projects consume - hooklib: a shared library which contains typescript definitions and utilities that the other projects consume
### Requirements
We welcome contributions. See [how to contribute to get started](./CONTRIBUTING.md).
## License ## License
This project is licensed under the terms of the MIT open source license. Please refer to [MIT](./LICENSE.md) for the full terms. This project is licensed under the terms of the MIT open source license. Please refer to [MIT](./LICENSE.md) for the full terms.

View File

@@ -1,49 +1,61 @@
const eslint = require('@eslint/js'); import { defineConfig, globalIgnores } from 'eslint/config'
const tseslint = require('@typescript-eslint/eslint-plugin'); import typescriptEslint from '@typescript-eslint/eslint-plugin'
const tsparser = require('@typescript-eslint/parser'); import globals from 'globals'
const globals = require('globals'); import tsParser from '@typescript-eslint/parser'
const pluginJest = require('eslint-plugin-jest'); import github from 'eslint-plugin-github'
module.exports = [ export default defineConfig([
eslint.configs.recommended, globalIgnores(['**/dist/', '**/lib/', '**/node_modules/', '**/tests/**/*', 'eslint.config.js']),
github.getFlatConfigs().recommended,
{ {
files: ['**/*.ts'], plugins: {
'@typescript-eslint': typescriptEslint
},
files: ['packages/**/*.ts'],
languageOptions: { languageOptions: {
parser: tsparser, globals: globals.node,
parser: tsParser,
ecmaVersion: 9,
sourceType: 'module',
parserOptions: { parserOptions: {
ecmaVersion: 2018, project: './tsconfig.json'
sourceType: 'module',
project: ['./tsconfig.json', './packages/*/tsconfig.json']
},
globals: {
...globals.node,
...globals.es6
} }
}, },
plugins: {
'@typescript-eslint': tseslint,
},
rules: { rules: {
// Disabled rules from original config
'eslint-comments/no-use': 'off', 'eslint-comments/no-use': 'off',
'import/no-namespace': 'off', 'import/no-namespace': 'off',
'no-constant-condition': 'off', 'no-constant-condition': 'off',
'no-unused-vars': 'off', 'no-unused-vars': 'off',
'i18n-text/no-en': 'off', 'i18n-text/no-en': 'off',
'camelcase': 'off',
'semi': 'off',
'no-shadow': 'off',
// TypeScript ESLint rules
'@typescript-eslint/no-unused-vars': 'error', '@typescript-eslint/no-unused-vars': 'error',
'@typescript-eslint/explicit-member-accessibility': ['error', { accessibility: 'no-public' }],
'@typescript-eslint/explicit-member-accessibility': [
'error',
{
accessibility: 'no-public'
}
],
'@typescript-eslint/no-require-imports': 'error', '@typescript-eslint/no-require-imports': 'error',
'@typescript-eslint/array-type': 'error', '@typescript-eslint/array-type': 'error',
'@typescript-eslint/await-thenable': 'error', '@typescript-eslint/await-thenable': 'error',
'@typescript-eslint/explicit-function-return-type': ['error', { allowExpressions: true }], camelcase: 'off',
'@typescript-eslint/explicit-function-return-type': [
'error',
{
allowExpressions: true
}
],
'@typescript-eslint/no-array-constructor': 'error', '@typescript-eslint/no-array-constructor': 'error',
'@typescript-eslint/no-empty-interface': 'error', '@typescript-eslint/no-empty-interface': 'error',
'@typescript-eslint/no-explicit-any': 'off', // Fixed: removed duplicate and kept only this one '@typescript-eslint/no-explicit-any': 'warn',
'@typescript-eslint/no-extraneous-class': 'error', '@typescript-eslint/no-extraneous-class': 'error',
'@typescript-eslint/no-floating-promises': 'error', '@typescript-eslint/no-floating-promises': 'error',
'@typescript-eslint/no-for-in-array': 'error', '@typescript-eslint/no-for-in-array': 'error',
@@ -63,60 +75,8 @@ module.exports = [
'@typescript-eslint/require-array-sort-compare': 'error', '@typescript-eslint/require-array-sort-compare': 'error',
'@typescript-eslint/restrict-plus-operands': 'error', '@typescript-eslint/restrict-plus-operands': 'error',
'@typescript-eslint/unbound-method': 'error', '@typescript-eslint/unbound-method': 'error',
'no-shadow': 'off',
'@typescript-eslint/no-shadow': ['error'] '@typescript-eslint/no-shadow': ['error']
} }
},
{
// Test files configuration - Fixed file pattern to match .ts files
files: ['**/*test*.ts', '**/*spec*.ts', '**/tests/**/*.ts'],
languageOptions: {
parser: tsparser,
parserOptions: {
ecmaVersion: 2018,
sourceType: 'module',
project: ['./tsconfig.json', './packages/*/tsconfig.json']
},
globals: {
...globals.node,
...globals.es6,
// Fixed Jest globals
describe: 'readonly',
it: 'readonly',
test: 'readonly',
expect: 'readonly',
beforeEach: 'readonly',
afterEach: 'readonly',
beforeAll: 'readonly',
afterAll: 'readonly',
jest: 'readonly'
}
},
plugins: {
'@typescript-eslint': tseslint,
jest: pluginJest
},
rules: {
// Disable no-undef for test files since Jest globals are handled above
'no-undef': 'off',
// Relax some rules for test files
'@typescript-eslint/no-explicit-any': 'off',
'@typescript-eslint/no-non-null-assertion': 'off',
'@typescript-eslint/explicit-function-return-type': 'off'
}
},
{
files: ['**/jest.config.js', '**/jest.setup.js'],
languageOptions: {
globals: {
...globals.node,
jest: 'readonly',
module: 'writable'
}
},
rules: {
'@typescript-eslint/no-require-imports': 'off',
'@typescript-eslint/no-var-requires': 'off',
'import/no-commonjs': 'off'
}
} }
]; ])

View File

@@ -4,6 +4,9 @@ metadata:
labels: labels:
labeled-by: "extension" labeled-by: "extension"
spec: spec:
securityContext:
runAsUser: 1000
runAsGroup: 3000
restartPolicy: Never restartPolicy: Never
containers: containers:
- name: $job # overwrites job container - name: $job # overwrites job container

View File

@@ -4,7 +4,7 @@
"state": {}, "state": {},
"args": { "args": {
"container": { "container": {
"image": "node:22", "image": "node:14.16",
"workingDirectory": "/__w/repo/repo", "workingDirectory": "/__w/repo/repo",
"createOptions": "--cpus 1", "createOptions": "--cpus 1",
"environmentVariables": { "environmentVariables": {

View File

@@ -9,7 +9,7 @@
} }
}, },
"args": { "args": {
"image": "node:22", "image": "node:14.16",
"dockerfile": null, "dockerfile": null,
"entryPointArgs": [ "entryPointArgs": [
"-e", "-e",

1310
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{ {
"name": "hooks", "name": "hooks",
"version": "0.8.0", "version": "0.6.2",
"description": "Three projects are included - k8s: a kubernetes hook implementation that spins up pods dynamically to run a job - docker: A hook implementation of the runner's docker implementation - A hook lib, which contains shared typescript definitions and utilities that the other packages consume", "description": "Three projects are included - k8s: a kubernetes hook implementation that spins up pods dynamically to run a job - docker: A hook implementation of the runner's docker implementation - A hook lib, which contains shared typescript definitions and utilities that the other packages consume",
"main": "", "main": "",
"directories": { "directories": {
@@ -11,8 +11,7 @@
"bootstrap": "npm install --prefix packages/hooklib && npm install --prefix packages/k8s && npm install --prefix packages/docker", "bootstrap": "npm install --prefix packages/hooklib && npm install --prefix packages/k8s && npm install --prefix packages/docker",
"format": "prettier --write '**/*.ts'", "format": "prettier --write '**/*.ts'",
"format-check": "prettier --check '**/*.ts'", "format-check": "prettier --check '**/*.ts'",
"lint": "eslint packages/**/*.ts", "lint": "eslint",
"lint:fix": "eslint packages/**/*.ts --fix",
"build-all": "npm run build --prefix packages/hooklib && npm run build --prefix packages/k8s && npm run build --prefix packages/docker" "build-all": "npm run build --prefix packages/hooklib && npm run build --prefix packages/k8s && npm run build --prefix packages/docker"
}, },
"repository": { "repository": {
@@ -26,18 +25,12 @@
}, },
"homepage": "https://github.com/actions/runner-container-hooks#readme", "homepage": "https://github.com/actions/runner-container-hooks#readme",
"devDependencies": { "devDependencies": {
"@eslint/js": "^9.31.0", "@types/jest": "^29.5.14",
"@types/jest": "^30.0.0", "@types/node": "^22.14.1",
"@types/node": "^24.0.14", "@typescript-eslint/parser": "^8.30.1",
"@typescript-eslint/eslint-plugin": "^8.37.0", "eslint": "^9.24.0",
"@typescript-eslint/parser": "^8.37.0",
"eslint": "^9.31.0",
"eslint-plugin-github": "^6.0.0", "eslint-plugin-github": "^6.0.0",
"globals": "^15.12.0", "prettier": "^3.5.3",
"prettier": "^3.6.2",
"typescript": "^5.8.3" "typescript": "^5.8.3"
},
"dependencies": {
"eslint-plugin-jest": "^29.0.1"
} }
} }

View File

@@ -1,26 +0,0 @@
module.exports = {
clearMocks: true,
preset: 'ts-jest',
moduleFileExtensions: ['js', 'ts'],
testEnvironment: 'node',
testMatch: ['**/*-test.ts'],
testRunner: 'jest-circus/runner',
verbose: true,
transform: {
'^.+\\.ts$': [
'ts-jest',
{
tsconfig: 'tsconfig.test.json'
}
],
// Transform ESM modules to CommonJS
'^.+\\.(js|mjs)$': ['babel-jest', {
presets: [['@babel/preset-env', { targets: { node: 'current' } }]]
}]
},
transformIgnorePatterns: [
// Transform these ESM packages
'node_modules/(?!(shlex|@kubernetes/client-node|openid-client|oauth4webapi|jose|uuid)/)'
],
setupFilesAfterEnv: ['./jest.setup.js']
}

View File

@@ -0,0 +1,12 @@
{
"clearMocks": true,
"moduleFileExtensions": ["js", "ts"],
"testEnvironment": "node",
"testMatch": ["**/*-test.ts"],
"testRunner": "jest-circus/runner",
"transform": {
"^.+\\.ts$": "ts-jest"
},
"verbose": true,
"testTimeout": 500000
}

View File

@@ -1 +0,0 @@
jest.setTimeout(500000)

File diff suppressed because it is too large Load Diff

View File

@@ -5,29 +5,24 @@
"main": "lib/index.js", "main": "lib/index.js",
"scripts": { "scripts": {
"test": "jest --runInBand", "test": "jest --runInBand",
"build": "npx tsc && npx ncc build", "build": "npx tsc && npx ncc build"
"format": "prettier --write '**/*.ts'",
"format-check": "prettier --check '**/*.ts'",
"lint": "eslint src/**/*.ts"
}, },
"author": "", "author": "",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.11.1", "@actions/core": "^1.11.1",
"@actions/exec": "^2.0.0", "@actions/exec": "^1.1.1",
"hooklib": "file:../hooklib", "hooklib": "file:../hooklib",
"shlex": "^3.0.0", "shlex": "^2.1.2",
"uuid": "^13.0.0" "uuid": "^11.1.0"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "^7.28.5", "@types/jest": "^29.5.14",
"@babel/preset-env": "^7.28.5", "@types/node": "^22.14.1",
"@types/jest": "^30.0.0", "@typescript-eslint/parser": "^8.30.1",
"@types/node": "^24.0.14",
"@typescript-eslint/parser": "^8.49.0",
"@vercel/ncc": "^0.38.3", "@vercel/ncc": "^0.38.3",
"jest": "^30.0.4", "jest": "^29.7.0",
"ts-jest": "^29.4.6", "ts-jest": "^29.3.2",
"ts-node": "^10.9.2", "ts-node": "^10.9.2",
"tsconfig-paths": "^4.2.0", "tsconfig-paths": "^4.2.0",
"typescript": "^5.8.3" "typescript": "^5.8.3"

View File

@@ -43,25 +43,18 @@ export async function createContainer(
if (args.environmentVariables) { if (args.environmentVariables) {
for (const [key] of Object.entries(args.environmentVariables)) { for (const [key] of Object.entries(args.environmentVariables)) {
dockerArgs.push('-e', key) dockerArgs.push('-e')
dockerArgs.push(key)
} }
} }
dockerArgs.push('-e', 'GITHUB_ACTIONS=true')
// Use same behavior as the runner https://github.com/actions/runner/blob/27d9c886ab9a45e0013cb462529ac85d581f8c41/src/Runner.Worker/Container/DockerCommandManager.cs#L150
if (!('CI' in (args.environmentVariables ?? {}))) {
dockerArgs.push('-e', 'CI=true')
}
const mountVolumes = [ const mountVolumes = [
...(args.userMountVolumes || []), ...(args.userMountVolumes || []),
...(args.systemMountVolumes || []) ...(args.systemMountVolumes || [])
] ]
for (const mountVolume of mountVolumes) { for (const mountVolume of mountVolumes) {
dockerArgs.push( dockerArgs.push(
`-v=${mountVolume.sourceVolumePath}:${mountVolume.targetVolumePath}${ `-v=${mountVolume.sourceVolumePath}:${mountVolume.targetVolumePath}`
mountVolume.readOnly ? ':ro' : ''
}`
) )
} }
if (args.entryPoint) { if (args.entryPoint) {
@@ -410,16 +403,11 @@ export async function containerRun(
} }
if (args.environmentVariables) { if (args.environmentVariables) {
for (const [key] of Object.entries(args.environmentVariables)) { for (const [key] of Object.entries(args.environmentVariables)) {
dockerArgs.push('-e', key) dockerArgs.push('-e')
dockerArgs.push(key)
} }
} }
dockerArgs.push('-e', 'GITHUB_ACTIONS=true')
// Use same behavior as the runner https://github.com/actions/runner/blob/27d9c886ab9a45e0013cb462529ac85d581f8c41/src/Runner.Worker/Container/DockerCommandManager.cs#L150
if (!('CI' in (args.environmentVariables ?? {}))) {
dockerArgs.push('-e', 'CI=true')
}
const mountVolumes = [ const mountVolumes = [
...(args.userMountVolumes || []), ...(args.userMountVolumes || []),
...(args.systemMountVolumes || []) ...(args.systemMountVolumes || [])

View File

@@ -1,5 +1,6 @@
/* eslint-disable @typescript-eslint/no-var-requires */ /* eslint-disable @typescript-eslint/no-var-requires */
/* eslint-disable @typescript-eslint/no-require-imports */ /* eslint-disable @typescript-eslint/no-require-imports */
/* eslint-disable import/no-commonjs */
import * as core from '@actions/core' import * as core from '@actions/core'
import { env } from 'process' import { env } from 'process'
// Import this way otherwise typescript has errors // Import this way otherwise typescript has errors

View File

@@ -75,22 +75,4 @@ describe('run script step', () => {
runScriptStep(definitions.runScriptStep.args, prepareJobResponse.state) runScriptStep(definitions.runScriptStep.args, prepareJobResponse.state)
).resolves.not.toThrow() ).resolves.not.toThrow()
}) })
it('Should confirm that CI and GITHUB_ACTIONS are set', async () => {
definitions.runScriptStep.args.entryPoint = '/bin/bash'
definitions.runScriptStep.args.entryPointArgs = [
'-c',
`'if [[ ! $(env | grep "^CI=") = "CI=true" ]]; then exit 1; fi'`
]
await expect(
runScriptStep(definitions.runScriptStep.args, prepareJobResponse.state)
).resolves.not.toThrow()
definitions.runScriptStep.args.entryPointArgs = [
'-c',
`'if [[ ! $(env | grep "^GITHUB_ACTIONS=") = "GITHUB_ACTIONS=true" ]]; then exit 1; fi'`
]
await expect(
runScriptStep(definitions.runScriptStep.args, prepareJobResponse.state)
).resolves.not.toThrow()
})
}) })

View File

@@ -31,7 +31,7 @@ export default class TestSetup {
private get allTestDirectories() { private get allTestDirectories() {
const resp = [this.testdir, this.runnerMockDir, this.runnerOutputDir] const resp = [this.testdir, this.runnerMockDir, this.runnerOutputDir]
for (const [, value] of Object.entries(this.runnerMockSubdirs)) { for (const [key, value] of Object.entries(this.runnerMockSubdirs)) {
resp.push(`${this.runnerMockDir}/${value}`) resp.push(`${this.runnerMockDir}/${value}`)
} }
@@ -42,7 +42,7 @@ export default class TestSetup {
return resp return resp
} }
initialize(): void { public initialize(): void {
env['GITHUB_WORKSPACE'] = this.workingDirectory env['GITHUB_WORKSPACE'] = this.workingDirectory
env['RUNNER_NAME'] = 'test' env['RUNNER_NAME'] = 'test'
env['RUNNER_TEMP'] = env['RUNNER_TEMP'] =
@@ -58,7 +58,7 @@ export default class TestSetup {
) )
} }
teardown(): void { public teardown(): void {
fs.rmdirSync(this.testdir, { recursive: true }) fs.rmdirSync(this.testdir, { recursive: true })
} }
@@ -107,21 +107,21 @@ export default class TestSetup {
] ]
} }
createOutputFile(name: string): string { public createOutputFile(name: string): string {
let filePath = path.join(this.runnerOutputDir, name || `${uuidv4()}.json`) let filePath = path.join(this.runnerOutputDir, name || `${uuidv4()}.json`)
fs.writeFileSync(filePath, '') fs.writeFileSync(filePath, '')
return filePath return filePath
} }
get workingDirectory(): string { public get workingDirectory(): string {
return `${this.runnerMockDir}/_work/${this.projectName}/${this.projectName}` return `${this.runnerMockDir}/_work/${this.projectName}/${this.projectName}`
} }
get containerWorkingDirectory(): string { public get containerWorkingDirectory(): string {
return `/__w/${this.projectName}/${this.projectName}` return `/__w/${this.projectName}/${this.projectName}`
} }
initializeDockerAction(): string { public initializeDockerAction(): string {
const actionPath = `${this.testdir}/_actions/example-handle/example-repo/example-branch/mock-directory` const actionPath = `${this.testdir}/_actions/example-handle/example-repo/example-branch/mock-directory`
fs.mkdirSync(actionPath, { recursive: true }) fs.mkdirSync(actionPath, { recursive: true })
this.writeDockerfile(actionPath) this.writeDockerfile(actionPath)
@@ -146,7 +146,7 @@ echo "::set-output name=time::$time"`
fs.chmodSync(entryPointPath, 0o755) fs.chmodSync(entryPointPath, 0o755)
} }
getPrepareJobDefinition(): HookData { public getPrepareJobDefinition(): HookData {
const prepareJob = JSON.parse( const prepareJob = JSON.parse(
fs.readFileSync( fs.readFileSync(
path.resolve(__dirname + '/../../../examples/prepare-job.json'), path.resolve(__dirname + '/../../../examples/prepare-job.json'),
@@ -165,7 +165,7 @@ echo "::set-output name=time::$time"`
return prepareJob return prepareJob
} }
getRunScriptStepDefinition(): HookData { public getRunScriptStepDefinition(): HookData {
const runScriptStep = JSON.parse( const runScriptStep = JSON.parse(
fs.readFileSync( fs.readFileSync(
path.resolve(__dirname + '/../../../examples/run-script-step.json'), path.resolve(__dirname + '/../../../examples/run-script-step.json'),
@@ -177,7 +177,7 @@ echo "::set-output name=time::$time"`
return runScriptStep return runScriptStep
} }
getRunContainerStepDefinition(): HookData { public getRunContainerStepDefinition(): HookData {
const runContainerStep = JSON.parse( const runContainerStep = JSON.parse(
fs.readFileSync( fs.readFileSync(
path.resolve(__dirname + '/../../../examples/run-container-step.json'), path.resolve(__dirname + '/../../../examples/run-container-step.json'),

View File

@@ -1,6 +0,0 @@
{
"compilerOptions": {
"allowJs": true
},
"extends": "./tsconfig.json"
}

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,7 @@
"version": "0.1.0", "version": "0.1.0",
"description": "", "description": "",
"main": "lib/index.js", "main": "lib/index.js",
"types": "lib/index.d.ts", "types": "index.d.ts",
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1", "test": "echo \"Error: no test specified\" && exit 1",
"build": "tsc", "build": "tsc",
@@ -14,11 +14,12 @@
"author": "", "author": "",
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"@types/node": "^24.0.14", "@types/node": "^22.14.1",
"@typescript-eslint/parser": "^8.30.1",
"@zeit/ncc": "^0.22.3", "@zeit/ncc": "^0.22.3",
"eslint": "^9.31.0", "eslint": "^9.24.0",
"eslint-plugin-github": "^6.0.0", "eslint-plugin-github": "^6.0.0",
"prettier": "^3.6.2", "prettier": "^3.5.3",
"typescript": "^5.8.3" "typescript": "^5.8.3"
}, },
"dependencies": { "dependencies": {

View File

@@ -22,6 +22,9 @@ rules:
- apiGroups: [""] - apiGroups: [""]
resources: ["pods/log"] resources: ["pods/log"]
verbs: ["get", "list", "watch",] verbs: ["get", "list", "watch",]
- apiGroups: ["batch"]
resources: ["jobs"]
verbs: ["get", "list", "create", "delete"]
- apiGroups: [""] - apiGroups: [""]
resources: ["secrets"] resources: ["secrets"]
verbs: ["get", "list", "create", "delete"] verbs: ["get", "list", "create", "delete"]
@@ -40,5 +43,3 @@ rules:
- Building container actions from a dockerfile is not supported at this time - Building container actions from a dockerfile is not supported at this time
- Container actions will not have access to the services network or job container network - Container actions will not have access to the services network or job container network
- Docker [create options](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idcontaineroptions) are not supported - Docker [create options](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idcontaineroptions) are not supported
- Container actions will have to specify the entrypoint, since the default entrypoint will be overridden to run the commands from the workflow.
- Container actions need to have the following binaries in their container image: `sh`, `env`, `tail`.

View File

@@ -1,26 +0,0 @@
module.exports = {
clearMocks: true,
preset: 'ts-jest',
moduleFileExtensions: ['js', 'ts'],
testEnvironment: 'node',
testMatch: ['**/*-test.ts'],
testRunner: 'jest-circus/runner',
verbose: true,
transform: {
'^.+\\.ts$': [
'ts-jest',
{
tsconfig: 'tsconfig.test.json'
}
],
// Transform ESM modules to CommonJS
'^.+\\.(js|mjs)$': ['babel-jest', {
presets: [['@babel/preset-env', { targets: { node: 'current' } }]]
}]
},
transformIgnorePatterns: [
// Transform these ESM packages
'node_modules/(?!(shlex|@kubernetes/client-node|openid-client|oauth4webapi|jose|uuid)/)'
],
setupFilesAfterEnv: ['./jest.setup.js']
}

View File

@@ -0,0 +1,12 @@
{
"clearMocks": true,
"moduleFileExtensions": ["js", "ts"],
"testEnvironment": "node",
"testMatch": ["**/*-test.ts"],
"testRunner": "jest-circus/runner",
"transform": {
"^.+\\.ts$": "ts-jest"
},
"verbose": true,
"testTimeout": 500000
}

View File

@@ -1,2 +0,0 @@
// eslint-disable-next-line filenames/match-regex, no-undef
jest.setTimeout(500000)

File diff suppressed because it is too large Load Diff

View File

@@ -8,7 +8,8 @@
"build": "tsc && npx ncc build", "build": "tsc && npx ncc build",
"format": "prettier --write '**/*.ts'", "format": "prettier --write '**/*.ts'",
"format-check": "prettier --check '**/*.ts'", "format-check": "prettier --check '**/*.ts'",
"lint": "eslint src/**/*.ts" "lint": "eslint src/**/*.ts",
"lint:fix": "eslint src/**/*.ts --fix"
}, },
"author": "", "author": "",
"license": "MIT", "license": "MIT",
@@ -16,22 +17,18 @@
"@actions/core": "^1.11.1", "@actions/core": "^1.11.1",
"@actions/exec": "^1.1.1", "@actions/exec": "^1.1.1",
"@actions/io": "^1.1.3", "@actions/io": "^1.1.3",
"@kubernetes/client-node": "^1.3.0", "@kubernetes/client-node": "^1.1.2",
"hooklib": "file:../hooklib", "hooklib": "file:../hooklib",
"js-yaml": "^4.1.0", "js-yaml": "^4.1.0",
"shlex": "^3.0.0", "shlex": "^2.1.2",
"tar-fs": "^3.1.0",
"uuid": "^11.1.0" "uuid": "^11.1.0"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "^7.28.3", "@types/jest": "^29.5.14",
"@babel/preset-env": "^7.28.3", "@types/node": "^22.14.1",
"@types/jest": "^30.0.0",
"@types/node": "^24.3.0",
"@vercel/ncc": "^0.38.3", "@vercel/ncc": "^0.38.3",
"babel-jest": "^30.1.1", "jest": "^29.7.0",
"jest": "^30.1.1", "ts-jest": "^29.3.2",
"ts-jest": "^29.4.1", "typescript": "^5.8.3"
"typescript": "^5.9.2"
} }
} }

View File

@@ -1,39 +1,32 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import * as io from '@actions/io'
import * as k8s from '@kubernetes/client-node' import * as k8s from '@kubernetes/client-node'
import { import {
JobContainerInfo, JobContainerInfo,
ContextPorts, ContextPorts,
PrepareJobArgs, PrepareJobArgs,
writeToResponseFile, writeToResponseFile
ServiceContainerInfo
} from 'hooklib' } from 'hooklib'
import path from 'path'
import { import {
containerPorts, containerPorts,
createJobPod, createPod,
isPodContainerAlpine, isPodContainerAlpine,
prunePods, prunePods,
waitForPodPhases, waitForPodPhases,
getPrepareJobTimeoutSeconds, getPrepareJobTimeoutSeconds
execCpToPod,
execPodStep
} from '../k8s' } from '../k8s'
import { import {
CONTAINER_VOLUMES, containerVolumes,
DEFAULT_CONTAINER_ENTRY_POINT, DEFAULT_CONTAINER_ENTRY_POINT,
DEFAULT_CONTAINER_ENTRY_POINT_ARGS, DEFAULT_CONTAINER_ENTRY_POINT_ARGS,
generateContainerName, generateContainerName,
mergeContainerWithOptions, mergeContainerWithOptions,
readExtensionFromFile, readExtensionFromFile,
PodPhase, PodPhase,
fixArgs, fixArgs
prepareJobScript
} from '../k8s/utils' } from '../k8s/utils'
import { import { CONTAINER_EXTENSION_PREFIX, JOB_CONTAINER_NAME } from './constants'
CONTAINER_EXTENSION_PREFIX,
getJobPodName,
JOB_CONTAINER_NAME
} from './constants'
import { dirname } from 'path'
export async function prepareJob( export async function prepareJob(
args: PrepareJobArgs, args: PrepareJobArgs,
@@ -46,9 +39,11 @@ export async function prepareJob(
await prunePods() await prunePods()
const extension = readExtensionFromFile() const extension = readExtensionFromFile()
await copyExternalsToRoot()
let container: k8s.V1Container | undefined = undefined let container: k8s.V1Container | undefined = undefined
if (args.container?.image) { if (args.container?.image) {
core.debug(`Using image '${args.container.image}' for job image`)
container = createContainerSpec( container = createContainerSpec(
args.container, args.container,
JOB_CONTAINER_NAME, JOB_CONTAINER_NAME,
@@ -60,6 +55,7 @@ export async function prepareJob(
let services: k8s.V1Container[] = [] let services: k8s.V1Container[] = []
if (args.services?.length) { if (args.services?.length) {
services = args.services.map(service => { services = args.services.map(service => {
core.debug(`Adding service '${service.image}' to pod definition`)
return createContainerSpec( return createContainerSpec(
service, service,
generateContainerName(service.image), generateContainerName(service.image),
@@ -75,8 +71,7 @@ export async function prepareJob(
let createdPod: k8s.V1Pod | undefined = undefined let createdPod: k8s.V1Pod | undefined = undefined
try { try {
createdPod = await createJobPod( createdPod = await createPod(
getJobPodName(),
container, container,
services, services,
args.container.registry, args.container.registry,
@@ -96,13 +91,6 @@ export async function prepareJob(
`Job pod created, waiting for it to come online ${createdPod?.metadata?.name}` `Job pod created, waiting for it to come online ${createdPod?.metadata?.name}`
) )
const runnerWorkspace = dirname(process.env.RUNNER_WORKSPACE as string)
let prepareScript: { containerPath: string; runnerPath: string } | undefined
if (args.container?.userMountVolumes?.length) {
prepareScript = prepareJobScript(args.container.userMountVolumes || [])
}
try { try {
await waitForPodPhases( await waitForPodPhases(
createdPod.metadata.name, createdPod.metadata.name,
@@ -115,28 +103,6 @@ export async function prepareJob(
throw new Error(`pod failed to come online with error: ${err}`) throw new Error(`pod failed to come online with error: ${err}`)
} }
await execCpToPod(createdPod.metadata.name, runnerWorkspace, '/__w')
if (prepareScript) {
await execPodStep(
['sh', '-e', prepareScript.containerPath],
createdPod.metadata.name,
JOB_CONTAINER_NAME
)
const promises: Promise<void>[] = []
for (const vol of args?.container?.userMountVolumes || []) {
promises.push(
execCpToPod(
createdPod.metadata.name,
vol.sourceVolumePath,
vol.targetVolumePath
)
)
}
await Promise.all(promises)
}
core.debug('Job pod is ready for traffic') core.debug('Job pod is ready for traffic')
let isAlpine = false let isAlpine = false
@@ -160,7 +126,7 @@ function generateResponseFile(
responseFile: string, responseFile: string,
args: PrepareJobArgs, args: PrepareJobArgs,
appPod: k8s.V1Pod, appPod: k8s.V1Pod,
isAlpine: boolean isAlpine
): void { ): void {
if (!appPod.metadata?.name) { if (!appPod.metadata?.name) {
throw new Error('app pod must have metadata.name specified') throw new Error('app pod must have metadata.name specified')
@@ -201,9 +167,7 @@ function generateResponseFile(
const ctxPorts: ContextPorts = {} const ctxPorts: ContextPorts = {}
if (c.ports?.length) { if (c.ports?.length) {
for (const port of c.ports) { for (const port of c.ports) {
if (port.containerPort && port.hostPort) { ctxPorts[port.containerPort] = port.hostPort
ctxPorts[port.containerPort.toString()] = port.hostPort.toString()
}
} }
} }
@@ -217,8 +181,19 @@ function generateResponseFile(
writeToResponseFile(responseFile, JSON.stringify(response)) writeToResponseFile(responseFile, JSON.stringify(response))
} }
async function copyExternalsToRoot(): Promise<void> {
const workspace = process.env['RUNNER_WORKSPACE']
if (workspace) {
await io.cp(
path.join(workspace, '../../externals'),
path.join(workspace, '../externals'),
{ force: true, recursive: true, copySourceDirectory: false }
)
}
}
export function createContainerSpec( export function createContainerSpec(
container: JobContainerInfo | ServiceContainerInfo, container: JobContainerInfo,
name: string, name: string,
jobContainer = false, jobContainer = false,
extension?: k8s.V1PodTemplateSpec extension?: k8s.V1PodTemplateSpec
@@ -233,40 +208,31 @@ export function createContainerSpec(
image: container.image, image: container.image,
ports: containerPorts(container) ports: containerPorts(container)
} as k8s.V1Container } as k8s.V1Container
if (container['workingDirectory']) { if (container.workingDirectory) {
podContainer.workingDir = container['workingDirectory'] podContainer.workingDir = container.workingDirectory
} }
if (container.entryPoint) { if (container.entryPoint) {
podContainer.command = [container.entryPoint] podContainer.command = [container.entryPoint]
} }
if (container.entryPointArgs && container.entryPointArgs.length > 0) { if (container.entryPointArgs?.length > 0) {
podContainer.args = fixArgs(container.entryPointArgs) podContainer.args = fixArgs(container.entryPointArgs)
} }
podContainer.env = [] podContainer.env = []
for (const [key, value] of Object.entries( for (const [key, value] of Object.entries(
container['environmentVariables'] || {} container['environmentVariables']
)) { )) {
if (value && key !== 'HOME') { if (value && key !== 'HOME') {
podContainer.env.push({ name: key, value }) podContainer.env.push({ name: key, value: value as string })
} }
} }
podContainer.env.push({ podContainer.volumeMounts = containerVolumes(
name: 'GITHUB_ACTIONS', container.userMountVolumes,
value: 'true' jobContainer
}) )
if (!('CI' in (container['environmentVariables'] || {}))) {
podContainer.env.push({
name: 'CI',
value: 'true'
})
}
podContainer.volumeMounts = CONTAINER_VOLUMES
if (!extension) { if (!extension) {
return podContainer return podContainer

View File

@@ -1,31 +1,23 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import * as fs from 'fs'
import * as k8s from '@kubernetes/client-node' import * as k8s from '@kubernetes/client-node'
import { RunContainerStepArgs } from 'hooklib' import { RunContainerStepArgs } from 'hooklib'
import { dirname } from 'path'
import { import {
createContainerStepPod, createJob,
deletePod, createSecretForEnvs,
execCpFromPod, getContainerJobPodName,
execCpToPod, getPodLogs,
execPodStep, getPodStatus,
getPrepareJobTimeoutSeconds, waitForJobToComplete,
waitForPodPhases waitForPodPhases
} from '../k8s' } from '../k8s'
import { import {
CONTAINER_VOLUMES, containerVolumes,
fixArgs,
mergeContainerWithOptions, mergeContainerWithOptions,
PodPhase, PodPhase,
readExtensionFromFile, readExtensionFromFile
DEFAULT_CONTAINER_ENTRY_POINT_ARGS,
writeContainerStepScript
} from '../k8s/utils' } from '../k8s/utils'
import { import { JOB_CONTAINER_EXTENSION_NAME, JOB_CONTAINER_NAME } from './constants'
getJobPodName,
getStepPodName,
JOB_CONTAINER_EXTENSION_NAME,
JOB_CONTAINER_NAME
} from './constants'
export async function runContainerStep( export async function runContainerStep(
stepContainer: RunContainerStepArgs stepContainer: RunContainerStepArgs
@@ -34,109 +26,112 @@ export async function runContainerStep(
throw new Error('Building container actions is not currently supported') throw new Error('Building container actions is not currently supported')
} }
if (!stepContainer.entryPoint) { let secretName: string | undefined = undefined
throw new Error( if (stepContainer.environmentVariables) {
'failed to start the container since the entrypoint is overwritten' try {
) secretName = await createSecretForEnvs(stepContainer.environmentVariables)
} } catch (err) {
core.debug(`createSecretForEnvs failed: ${JSON.stringify(err)}`)
const envs = stepContainer.environmentVariables || {} const message = (err as any)?.response?.body?.message || err
envs['GITHUB_ACTIONS'] = 'true' throw new Error(`failed to create script environment: ${message}`)
if (!('CI' in envs)) { }
envs.CI = 'true'
} }
const extension = readExtensionFromFile() const extension = readExtensionFromFile()
const container = createContainerSpec(stepContainer, extension) core.debug(`Created secret ${secretName} for container job envs`)
const container = createContainerSpec(stepContainer, secretName, extension)
let pod: k8s.V1Pod let job: k8s.V1Job
try { try {
pod = await createContainerStepPod(getStepPodName(), container, extension) job = await createJob(container, extension)
} catch (err) { } catch (err) {
core.debug(`createJob failed: ${JSON.stringify(err)}`) core.debug(`createJob failed: ${JSON.stringify(err)}`)
const message = (err as any)?.response?.body?.message || err const message = (err as any)?.response?.body?.message || err
throw new Error(`failed to run script step: ${message}`) throw new Error(`failed to run script step: ${message}`)
} }
if (!pod.metadata?.name) { if (!job.metadata?.name) {
throw new Error( throw new Error(
`Expected job ${JSON.stringify( `Expected job ${JSON.stringify(
pod job
)} to have correctly set the metadata.name` )} to have correctly set the metadata.name`
) )
} }
const podName = pod.metadata.name core.debug(`Job created, waiting for pod to start: ${job.metadata?.name}`)
let podName: string
try { try {
await waitForPodPhases( podName = await getContainerJobPodName(job.metadata.name)
podName, } catch (err) {
new Set([PodPhase.RUNNING]), core.debug(`getContainerJobPodName failed: ${JSON.stringify(err)}`)
new Set([PodPhase.PENDING, PodPhase.UNKNOWN]), const message = (err as any)?.response?.body?.message || err
getPrepareJobTimeoutSeconds() throw new Error(`failed to get container job pod name: ${message}`)
)
const runnerWorkspace = dirname(process.env.RUNNER_WORKSPACE as string)
const githubWorkspace = process.env.GITHUB_WORKSPACE as string
const parts = githubWorkspace.split('/').slice(-2)
if (parts.length !== 2) {
throw new Error(`Invalid github workspace directory: ${githubWorkspace}`)
}
const relativeWorkspace = parts.join('/')
core.debug(
`Copying files from pod ${getJobPodName()} to ${runnerWorkspace}/${relativeWorkspace}`
)
await execCpFromPod(getJobPodName(), `/__w`, `${runnerWorkspace}`)
const { containerPath, runnerPath } = writeContainerStepScript(
`${runnerWorkspace}/__w/_temp`,
githubWorkspace,
stepContainer.entryPoint,
stepContainer.entryPointArgs,
envs
)
await execCpToPod(podName, `${runnerWorkspace}/__w`, '/__w')
fs.rmSync(`${runnerWorkspace}/__w`, { recursive: true, force: true })
try {
core.debug(`Executing container step script in pod ${podName}`)
return await execPodStep(
['sh', '-e', containerPath],
pod.metadata.name,
JOB_CONTAINER_NAME
)
} catch (err) {
core.debug(`execPodStep failed: ${JSON.stringify(err)}`)
const message = (err as any)?.response?.body?.message || err
throw new Error(`failed to run script step: ${message}`)
} finally {
fs.rmSync(runnerPath, { force: true })
}
} catch (error) {
core.error(`Failed to run container step: ${error}`)
throw error
} finally {
await deletePod(podName).catch(err => {
core.error(`Failed to delete step pod ${podName}: ${err}`)
})
} }
await waitForPodPhases(
podName,
new Set([
PodPhase.COMPLETED,
PodPhase.RUNNING,
PodPhase.SUCCEEDED,
PodPhase.FAILED
]),
new Set([PodPhase.PENDING, PodPhase.UNKNOWN])
)
core.debug('Container step is running or complete, pulling logs')
await getPodLogs(podName, JOB_CONTAINER_NAME)
core.debug('Waiting for container job to complete')
await waitForJobToComplete(job.metadata.name)
// pod has failed so pull the status code from the container
const status = await getPodStatus(podName)
if (status?.phase === 'Succeeded') {
return 0
}
if (!status?.containerStatuses?.length) {
core.error(
`Can't determine container status from response: ${JSON.stringify(
status
)}`
)
return 1
}
const exitCode =
status.containerStatuses[status.containerStatuses.length - 1].state
?.terminated?.exitCode
return Number(exitCode) || 1
} }
function createContainerSpec( function createContainerSpec(
container: RunContainerStepArgs, container: RunContainerStepArgs,
secretName?: string,
extension?: k8s.V1PodTemplateSpec extension?: k8s.V1PodTemplateSpec
): k8s.V1Container { ): k8s.V1Container {
const podContainer = new k8s.V1Container() const podContainer = new k8s.V1Container()
podContainer.name = JOB_CONTAINER_NAME podContainer.name = JOB_CONTAINER_NAME
podContainer.image = container.image podContainer.image = container.image
podContainer.workingDir = '/__w' podContainer.workingDir = container.workingDirectory
podContainer.command = ['tail'] podContainer.command = container.entryPoint
podContainer.args = DEFAULT_CONTAINER_ENTRY_POINT_ARGS ? [container.entryPoint]
: undefined
podContainer.args = container.entryPointArgs?.length
? fixArgs(container.entryPointArgs)
: undefined
podContainer.volumeMounts = CONTAINER_VOLUMES if (secretName) {
podContainer.envFrom = [
{
secretRef: {
name: secretName,
optional: false
}
}
]
}
podContainer.volumeMounts = containerVolumes(undefined, false, true)
if (!extension) { if (!extension) {
return podContainer return podContainer

View File

@@ -2,19 +2,17 @@
import * as fs from 'fs' import * as fs from 'fs'
import * as core from '@actions/core' import * as core from '@actions/core'
import { RunScriptStepArgs } from 'hooklib' import { RunScriptStepArgs } from 'hooklib'
import { execCpFromPod, execCpToPod, execPodStep } from '../k8s' import { execPodStep } from '../k8s'
import { writeRunScript, sleep, listDirAllCommand } from '../k8s/utils' import { writeEntryPointScript } from '../k8s/utils'
import { JOB_CONTAINER_NAME } from './constants' import { JOB_CONTAINER_NAME } from './constants'
import { dirname } from 'path'
import * as shlex from 'shlex'
export async function runScriptStep( export async function runScriptStep(
args: RunScriptStepArgs, args: RunScriptStepArgs,
state state,
responseFile
): Promise<void> { ): Promise<void> {
// Write the entrypoint first. This will be later coppied to the workflow pod
const { entryPoint, entryPointArgs, environmentVariables } = args const { entryPoint, entryPointArgs, environmentVariables } = args
const { containerPath, runnerPath } = writeRunScript( const { containerPath, runnerPath } = writeEntryPointScript(
args.workingDirectory, args.workingDirectory,
entryPoint, entryPoint,
entryPointArgs, entryPointArgs,
@@ -22,55 +20,6 @@ export async function runScriptStep(
environmentVariables environmentVariables
) )
const workdir = dirname(process.env.RUNNER_WORKSPACE as string)
const runnerTemp = `${workdir}/_temp`
const containerTemp = '/__w/_temp'
const containerTempSrc = '/__w/_temp_pre'
// Ensure base and staging dirs exist before copying
await execPodStep(
[
'sh',
'-c',
'mkdir -p /__w && mkdir -p /__w/_temp && mkdir -p /__w/_temp_pre'
],
state.jobPod,
JOB_CONTAINER_NAME
)
await execCpToPod(state.jobPod, runnerTemp, containerTempSrc)
// Copy GitHub directories from temp to /github
// Merge strategy:
// - Overwrite files in _runner_file_commands
// - Append files not already present elsewhere
const mergeCommands = [
'set -e',
'mkdir -p /__w/_temp /__w/_temp_pre',
'SRC=/__w/_temp_pre',
'DST=/__w/_temp',
// Overwrite _runner_file_commands
`find "$SRC" -type f ! -path "*/_runner_file_commands/*" -exec sh -c '
rel="\${1#$2/}"
target="$3/$rel"
mkdir -p "$(dirname "$target")"
cp -a "$1" "$target"
' _ {} "$SRC" "$DST" \\;`,
// Remove _temp_pre after merging
'rm -rf /__w/_temp_pre'
]
try {
await execPodStep(
['sh', '-c', mergeCommands.join(' && ')],
state.jobPod,
JOB_CONTAINER_NAME
)
} catch (err) {
core.debug(`Failed to merge temp directories: ${JSON.stringify(err)}`)
const message = (err as any)?.response?.body?.message || err
throw new Error(`failed to merge temp dirs: ${message}`)
}
// Execute the entrypoint script
args.entryPoint = 'sh' args.entryPoint = 'sh'
args.entryPointArgs = ['-e', containerPath] args.entryPointArgs = ['-e', containerPath]
try { try {
@@ -84,23 +33,6 @@ export async function runScriptStep(
const message = (err as any)?.response?.body?.message || err const message = (err as any)?.response?.body?.message || err
throw new Error(`failed to run script step: ${message}`) throw new Error(`failed to run script step: ${message}`)
} finally { } finally {
try { fs.rmSync(runnerPath)
fs.rmSync(runnerPath, { force: true })
} catch (removeErr) {
core.debug(`Failed to remove file ${runnerPath}: ${removeErr}`)
}
}
try {
core.debug(
`Copying from job pod '${state.jobPod}' ${containerTemp} to ${runnerTemp}`
)
await execCpFromPod(
state.jobPod,
`${containerTemp}/_runner_file_commands`,
`${workdir}/_temp`
)
} catch (error) {
core.warning('Failed to copy _temp from pod')
} }
} }

View File

@@ -1,11 +1,5 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import { import { Command, getInputFromStdin, prepareJobArgs } from 'hooklib'
Command,
getInputFromStdin,
PrepareJobArgs,
RunContainerStepArgs,
RunScriptStepArgs
} from 'hooklib'
import { import {
cleanupJob, cleanupJob,
prepareJob, prepareJob,
@@ -33,16 +27,16 @@ async function run(): Promise<void> {
let exitCode = 0 let exitCode = 0
switch (command) { switch (command) {
case Command.PrepareJob: case Command.PrepareJob:
await prepareJob(args as PrepareJobArgs, responseFile) await prepareJob(args as prepareJobArgs, responseFile)
return process.exit(0) return process.exit(0)
case Command.CleanupJob: case Command.CleanupJob:
await cleanupJob() await cleanupJob()
return process.exit(0) return process.exit(0)
case Command.RunScriptStep: case Command.RunScriptStep:
await runScriptStep(args as RunScriptStepArgs, state) await runScriptStep(args, state, null)
return process.exit(0) return process.exit(0)
case Command.RunContainerStep: case Command.RunContainerStep:
exitCode = await runContainerStep(args as RunContainerStepArgs) exitCode = await runContainerStep(args)
return process.exit(exitCode) return process.exit(exitCode)
default: default:
throw new Error(`Command not recognized: ${command}`) throw new Error(`Command not recognized: ${command}`)

View File

@@ -1,29 +1,22 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import * as path from 'path'
import { spawn } from 'child_process'
import * as k8s from '@kubernetes/client-node' import * as k8s from '@kubernetes/client-node'
import tar from 'tar-fs' import { ContainerInfo, Registry } from 'hooklib'
import * as stream from 'stream' import * as stream from 'stream'
import { WritableStreamBuffer } from 'stream-buffers'
import { createHash } from 'crypto'
import type { ContainerInfo, Registry } from 'hooklib'
import { import {
getJobPodName,
getRunnerPodName,
getSecretName, getSecretName,
JOB_CONTAINER_NAME, getStepPodName,
getVolumeClaimName,
RunnerInstanceLabel RunnerInstanceLabel
} from '../hooks/constants' } from '../hooks/constants'
import { import {
PodPhase, PodPhase,
mergePodSpecWithOptions, mergePodSpecWithOptions,
mergeObjectMeta, mergeObjectMeta,
fixArgs, useKubeScheduler,
listDirAllCommand, fixArgs
sleep,
EXTERNALS_VOLUME_NAME,
GITHUB_VOLUME_NAME,
WORK_VOLUME
} from './utils' } from './utils'
import * as shlex from 'shlex'
const kc = new k8s.KubeConfig() const kc = new k8s.KubeConfig()
@@ -35,6 +28,8 @@ const k8sAuthorizationV1Api = kc.makeApiClient(k8s.AuthorizationV1Api)
const DEFAULT_WAIT_FOR_POD_TIME_SECONDS = 10 * 60 // 10 min const DEFAULT_WAIT_FOR_POD_TIME_SECONDS = 10 * 60 // 10 min
export const POD_VOLUME_NAME = 'work'
export const requiredPermissions = [ export const requiredPermissions = [
{ {
group: '', group: '',
@@ -54,6 +49,12 @@ export const requiredPermissions = [
resource: 'pods', resource: 'pods',
subresource: 'log' subresource: 'log'
}, },
{
group: 'batch',
verbs: ['get', 'list', 'create', 'delete'],
resource: 'jobs',
subresource: ''
},
{ {
group: '', group: '',
verbs: ['create', 'delete', 'get', 'list'], verbs: ['create', 'delete', 'get', 'list'],
@@ -62,8 +63,7 @@ export const requiredPermissions = [
} }
] ]
export async function createJobPod( export async function createPod(
name: string,
jobContainer?: k8s.V1Container, jobContainer?: k8s.V1Container,
services?: k8s.V1Container[], services?: k8s.V1Container[],
registry?: Registry, registry?: Registry,
@@ -83,7 +83,7 @@ export async function createJobPod(
appPod.kind = 'Pod' appPod.kind = 'Pod'
appPod.metadata = new k8s.V1ObjectMeta() appPod.metadata = new k8s.V1ObjectMeta()
appPod.metadata.name = name appPod.metadata.name = getJobPodName()
const instanceLabel = new RunnerInstanceLabel() const instanceLabel = new RunnerInstanceLabel()
appPod.metadata.labels = { appPod.metadata.labels = {
@@ -93,68 +93,19 @@ export async function createJobPod(
appPod.spec = new k8s.V1PodSpec() appPod.spec = new k8s.V1PodSpec()
appPod.spec.containers = containers appPod.spec.containers = containers
appPod.spec.securityContext = {
fsGroup: 1001
}
// Extract working directory from GITHUB_WORKSPACE
// GITHUB_WORKSPACE is like /__w/repo-name/repo-name
const githubWorkspace = process.env.GITHUB_WORKSPACE
const workingDirPath = githubWorkspace?.split('/').slice(-2).join('/') ?? ''
const initCommands = [
'mkdir -p /mnt/externals',
'mkdir -p /mnt/work',
'mkdir -p /mnt/github',
'mv /home/runner/externals/* /mnt/externals/'
]
if (workingDirPath) {
initCommands.push(`mkdir -p /mnt/work/${workingDirPath}`)
}
appPod.spec.initContainers = [
{
name: 'fs-init',
image:
process.env.ACTIONS_RUNNER_IMAGE ||
'ghcr.io/actions/actions-runner:latest',
command: ['sh', '-c', initCommands.join(' && ')],
securityContext: {
runAsGroup: 1001,
runAsUser: 1001
},
volumeMounts: [
{
name: EXTERNALS_VOLUME_NAME,
mountPath: '/mnt/externals'
},
{
name: WORK_VOLUME,
mountPath: '/mnt/work'
},
{
name: GITHUB_VOLUME_NAME,
mountPath: '/mnt/github'
}
]
}
]
appPod.spec.restartPolicy = 'Never' appPod.spec.restartPolicy = 'Never'
const nodeName = await getCurrentNodeName()
if (useKubeScheduler()) {
appPod.spec.affinity = await getPodAffinity(nodeName)
} else {
appPod.spec.nodeName = nodeName
}
const claimName = getVolumeClaimName()
appPod.spec.volumes = [ appPod.spec.volumes = [
{ {
name: EXTERNALS_VOLUME_NAME, name: 'work',
emptyDir: {} persistentVolumeClaim: { claimName }
},
{
name: GITHUB_VOLUME_NAME,
emptyDir: {}
},
{
name: WORK_VOLUME,
emptyDir: {}
} }
] ]
@@ -182,62 +133,91 @@ export async function createJobPod(
}) })
} }
export async function createContainerStepPod( export async function createJob(
name: string,
container: k8s.V1Container, container: k8s.V1Container,
extension?: k8s.V1PodTemplateSpec extension?: k8s.V1PodTemplateSpec
): Promise<k8s.V1Pod> { ): Promise<k8s.V1Job> {
const appPod = new k8s.V1Pod() const runnerInstanceLabel = new RunnerInstanceLabel()
appPod.apiVersion = 'v1' const job = new k8s.V1Job()
appPod.kind = 'Pod' job.apiVersion = 'batch/v1'
job.kind = 'Job'
job.metadata = new k8s.V1ObjectMeta()
job.metadata.name = getStepPodName()
job.metadata.labels = { [runnerInstanceLabel.key]: runnerInstanceLabel.value }
job.metadata.annotations = {}
appPod.metadata = new k8s.V1ObjectMeta() job.spec = new k8s.V1JobSpec()
appPod.metadata.name = name job.spec.ttlSecondsAfterFinished = 300
job.spec.backoffLimit = 0
job.spec.template = new k8s.V1PodTemplateSpec()
const instanceLabel = new RunnerInstanceLabel() job.spec.template.spec = new k8s.V1PodSpec()
appPod.metadata.labels = { job.spec.template.metadata = new k8s.V1ObjectMeta()
[instanceLabel.key]: instanceLabel.value job.spec.template.metadata.labels = {}
job.spec.template.metadata.annotations = {}
job.spec.template.spec.containers = [container]
job.spec.template.spec.restartPolicy = 'Never'
const nodeName = await getCurrentNodeName()
if (useKubeScheduler()) {
job.spec.template.spec.affinity = await getPodAffinity(nodeName)
} else {
job.spec.template.spec.nodeName = nodeName
} }
appPod.metadata.annotations = {}
appPod.spec = new k8s.V1PodSpec() const claimName = getVolumeClaimName()
appPod.spec.containers = [container] job.spec.template.spec.volumes = [
appPod.spec.restartPolicy = 'Never'
appPod.spec.volumes = [
{ {
name: EXTERNALS_VOLUME_NAME, name: 'work',
emptyDir: {} persistentVolumeClaim: { claimName }
},
{
name: GITHUB_VOLUME_NAME,
emptyDir: {}
},
{
name: WORK_VOLUME,
emptyDir: {}
} }
] ]
if (extension?.metadata) { if (extension) {
mergeObjectMeta(appPod, extension.metadata) if (extension.metadata) {
// apply metadata both to the job and the pod created by the job
mergeObjectMeta(job, extension.metadata)
mergeObjectMeta(job.spec.template, extension.metadata)
}
if (extension.spec) {
mergePodSpecWithOptions(job.spec.template.spec, extension.spec)
}
} }
if (extension?.spec) { return await k8sBatchV1Api.createNamespacedJob({
mergePodSpecWithOptions(appPod.spec, extension.spec)
}
return await k8sApi.createNamespacedPod({
namespace: namespace(), namespace: namespace(),
body: appPod body: job
}) })
} }
export async function deletePod(name: string): Promise<void> { export async function getContainerJobPodName(jobName: string): Promise<string> {
const selector = `job-name=${jobName}`
const backOffManager = new BackOffManager(60)
while (true) {
const podList = await k8sApi.listNamespacedPod({
namespace: namespace(),
labelSelector: selector,
limit: 1
})
if (!podList.items?.length) {
await backOffManager.backOff()
continue
}
if (!podList.items[0].metadata?.name) {
throw new Error(
`Failed to determine the name of the pod for job ${jobName}`
)
}
return podList.items[0].metadata.name
}
}
export async function deletePod(podName: string): Promise<void> {
await k8sApi.deleteNamespacedPod({ await k8sApi.deleteNamespacedPod({
name, name: podName,
namespace: namespace(), namespace: namespace(),
gracePeriodSeconds: 0 gracePeriodSeconds: 0
}) })
@@ -248,11 +228,11 @@ export async function execPodStep(
podName: string, podName: string,
containerName: string, containerName: string,
stdin?: stream.Readable stdin?: stream.Readable
): Promise<number> { ): Promise<void> {
const exec = new k8s.Exec(kc) const exec = new k8s.Exec(kc)
command = fixArgs(command) command = fixArgs(command)
return await new Promise(function (resolve, reject) { // Exec returns a websocket. If websocket fails, we should reject the promise. Otherwise, websocket will call a callback. Since at that point, websocket is not failing, we can safely resolve or reject the promise.
await new Promise(function (resolve, reject) {
exec exec
.exec( .exec(
namespace(), namespace(),
@@ -264,9 +244,9 @@ export async function execPodStep(
stdin ?? null, stdin ?? null,
false /* tty */, false /* tty */,
resp => { resp => {
core.debug(`execPodStep response: ${JSON.stringify(resp)}`) // kube.exec returns an error if exit code is not 0, but we can't actually get the exit code
if (resp.status === 'Success') { if (resp.status === 'Success') {
resolve(resp.code || 0) resolve(resp.code)
} else { } else {
core.debug( core.debug(
JSON.stringify({ JSON.stringify({
@@ -274,298 +254,16 @@ export async function execPodStep(
details: resp?.details details: resp?.details
}) })
) )
reject(new Error(resp?.message || 'execPodStep failed')) reject(resp?.message)
} }
} }
) )
// If exec.exec fails, explicitly reject the outer promise
// eslint-disable-next-line github/no-then
.catch(e => reject(e)) .catch(e => reject(e))
}) })
} }
export async function execCalculateOutputHashSorted(
podName: string,
containerName: string,
command: string[]
): Promise<string> {
const exec = new k8s.Exec(kc)
let output = ''
const outputWriter = new stream.Writable({
write(chunk, _enc, cb) {
try {
output += chunk.toString('utf8')
cb()
} catch (e) {
cb(e as Error)
}
}
})
await new Promise<void>((resolve, reject) => {
exec
.exec(
namespace(),
podName,
containerName,
command,
outputWriter, // capture stdout
process.stderr,
null,
false /* tty */,
resp => {
core.debug(`internalExecOutput response: ${JSON.stringify(resp)}`)
if (resp.status === 'Success') {
resolve()
} else {
core.debug(
JSON.stringify({
message: resp?.message,
details: resp?.details
})
)
reject(new Error(resp?.message || 'internalExecOutput failed'))
}
}
)
.catch(e => reject(e))
})
outputWriter.end()
// Sort lines for consistent ordering across platforms
const sortedOutput =
output
.split('\n')
.filter(line => line.length > 0)
.sort()
.join('\n') + '\n'
const hash = createHash('sha256')
hash.update(sortedOutput)
return hash.digest('hex')
}
export async function localCalculateOutputHashSorted(
commands: string[]
): Promise<string> {
return await new Promise<string>((resolve, reject) => {
const child = spawn(commands[0], commands.slice(1), {
stdio: ['ignore', 'pipe', 'ignore']
})
let output = ''
child.stdout.on('data', chunk => {
output += chunk.toString('utf8')
})
child.on('error', reject)
child.on('close', (code: number) => {
if (code === 0) {
// Sort lines for consistent ordering across distributions/platforms
const sortedOutput =
output
.split('\n')
.filter(line => line.length > 0)
.sort()
.join('\n') + '\n'
const hash = createHash('sha256')
hash.update(sortedOutput)
resolve(hash.digest('hex'))
} else {
reject(new Error(`child process exited with code ${code}`))
}
})
})
}
export async function execCpToPod(
podName: string,
runnerPath: string,
containerPath: string
): Promise<void> {
core.debug(`Copying ${runnerPath} to pod ${podName} at ${containerPath}`)
let attempt = 0
while (true) {
try {
const exec = new k8s.Exec(kc)
// Use tar to extract with --no-same-owner to avoid ownership issues.
// Then use find to fix permissions. The -m flag helps but we also need to fix permissions after.
const command = [
'sh',
'-c',
`tar xf - --no-same-owner -C ${shlex.quote(containerPath)} 2>/dev/null; ` +
`find ${shlex.quote(containerPath)} -type f -exec chmod u+rw {} \\; 2>/dev/null; ` +
`find ${shlex.quote(containerPath)} -type d -exec chmod u+rwx {} \\; 2>/dev/null`
]
const readStream = tar.pack(runnerPath)
const errStream = new WritableStreamBuffer()
await new Promise((resolve, reject) => {
exec
.exec(
namespace(),
podName,
JOB_CONTAINER_NAME,
command,
null,
errStream,
readStream,
false,
async status => {
if (errStream.size()) {
reject(
new Error(
`Error from execCpToPod - status: ${status.status}, details: \n ${errStream.getContentsAsString()}`
)
)
}
resolve(status)
}
)
.catch(e => reject(e))
})
break
} catch (error) {
core.debug(`cpToPod: Attempt ${attempt + 1} failed: ${error}`)
attempt++
if (attempt >= 30) {
throw new Error(
`cpToPod failed after ${attempt} attempts: ${JSON.stringify(error)}`
)
}
await sleep(1000)
}
}
let attempts = 15
const delay = 1000
for (let i = 0; i < attempts; i++) {
try {
const want = await localCalculateOutputHashSorted([
'sh',
'-c',
listDirAllCommand(runnerPath)
])
const got = await execCalculateOutputHashSorted(
podName,
JOB_CONTAINER_NAME,
['sh', '-c', listDirAllCommand(containerPath)]
)
if (got !== want) {
core.debug(
`The hash of the directory does not match the expected value; want='${want}' got='${got}'`
)
await sleep(delay)
continue
}
break
} catch (error) {
core.debug(`Attempt ${i + 1} failed: ${error}`)
await sleep(delay)
}
}
}
export async function execCpFromPod(
podName: string,
containerPath: string,
parentRunnerPath: string
): Promise<void> {
const targetRunnerPath = `${parentRunnerPath}/${path.basename(containerPath)}`
core.debug(
`Copying from pod ${podName} ${containerPath} to ${targetRunnerPath}`
)
let attempt = 0
while (true) {
try {
// make temporary directory
const exec = new k8s.Exec(kc)
const containerPaths = containerPath.split('/')
const dirname = containerPaths.pop() as string
const command = [
'tar',
'cf',
'-',
'-C',
containerPaths.join('/') || '/',
dirname
]
const writerStream = tar.extract(parentRunnerPath)
const errStream = new WritableStreamBuffer()
await new Promise((resolve, reject) => {
exec
.exec(
namespace(),
podName,
JOB_CONTAINER_NAME,
command,
writerStream,
errStream,
null,
false,
async status => {
if (errStream.size()) {
reject(
new Error(
`Error from cpFromPod - details: \n ${errStream.getContentsAsString()}`
)
)
}
resolve(status)
}
)
.catch(e => reject(e))
})
break
} catch (error) {
core.debug(`Attempt ${attempt + 1} failed: ${error}`)
attempt++
if (attempt >= 30) {
throw new Error(
`execCpFromPod failed after ${attempt} attempts: ${JSON.stringify(error)}`
)
}
await sleep(1000)
}
}
let attempts = 15
const delay = 1000
for (let i = 0; i < attempts; i++) {
try {
const want = await execCalculateOutputHashSorted(
podName,
JOB_CONTAINER_NAME,
['sh', '-c', listDirAllCommand(containerPath)]
)
const got = await localCalculateOutputHashSorted([
'sh',
'-c',
listDirAllCommand(targetRunnerPath)
])
if (got !== want) {
core.debug(
`The hash of the directory does not match the expected value; want='${want}' got='${got}'`
)
await sleep(delay)
continue
}
break
} catch (error) {
core.debug(`Attempt ${i + 1} failed: ${error}`)
await sleep(delay)
}
}
}
export async function waitForJobToComplete(jobName: string): Promise<void> { export async function waitForJobToComplete(jobName: string): Promise<void> {
const backOffManager = new BackOffManager() const backOffManager = new BackOffManager()
while (true) { while (true) {
@@ -642,16 +340,13 @@ export async function createSecretForEnvs(envs: {
secret.data[key] = Buffer.from(value).toString('base64') secret.data[key] = Buffer.from(value).toString('base64')
} }
await k8sApi.createNamespacedSecret({ await k8sApi.createNamespacedSecret({ namespace: namespace(), body: secret })
namespace: namespace(),
body: secret
})
return secretName return secretName
} }
export async function deleteSecret(name: string): Promise<void> { export async function deleteSecret(secretName: string): Promise<void> {
await k8sApi.deleteNamespacedSecret({ await k8sApi.deleteNamespacedSecret({
name, name: secretName,
namespace: namespace() namespace: namespace()
}) })
} }
@@ -668,7 +363,7 @@ export async function pruneSecrets(): Promise<void> {
await Promise.all( await Promise.all(
secretList.items.map( secretList.items.map(
async secret => async secret =>
secret.metadata?.name && (await deleteSecret(secret.metadata.name)) secret.metadata?.name && deleteSecret(secret.metadata.name)
) )
) )
} }
@@ -721,7 +416,7 @@ export function getPrepareJobTimeoutSeconds(): number {
return timeoutSeconds return timeoutSeconds
} }
async function getPodPhase(name: string): Promise<PodPhase> { async function getPodPhase(podName: string): Promise<PodPhase> {
const podPhaseLookup = new Set<string>([ const podPhaseLookup = new Set<string>([
PodPhase.PENDING, PodPhase.PENDING,
PodPhase.RUNNING, PodPhase.RUNNING,
@@ -730,7 +425,7 @@ async function getPodPhase(name: string): Promise<PodPhase> {
PodPhase.UNKNOWN PodPhase.UNKNOWN
]) ])
const pod = await k8sApi.readNamespacedPod({ const pod = await k8sApi.readNamespacedPod({
name, name: podName,
namespace: namespace() namespace: namespace()
}) })
@@ -740,13 +435,13 @@ async function getPodPhase(name: string): Promise<PodPhase> {
return pod.status?.phase as PodPhase return pod.status?.phase as PodPhase
} }
async function isJobSucceeded(name: string): Promise<boolean> { async function isJobSucceeded(jobName: string): Promise<boolean> {
const job = await k8sBatchV1Api.readNamespacedJob({ const job = await k8sBatchV1Api.readNamespacedJob({
name, name: jobName,
namespace: namespace() namespace: namespace()
}) })
if (job.status?.failed) { if (job.status?.failed) {
throw new Error(`job ${name} has failed`) throw new Error(`job ${jobName} has failed`)
} }
return !!job.status?.succeeded return !!job.status?.succeeded
} }
@@ -771,7 +466,7 @@ export async function getPodLogs(
pretty: false, pretty: false,
timestamps: false timestamps: false
}) })
await new Promise(resolve => logStream.on('end', () => resolve(null))) await new Promise(resolve => logStream.on('close', () => resolve(null)))
} }
export async function prunePods(): Promise<void> { export async function prunePods(): Promise<void> {
@@ -785,7 +480,7 @@ export async function prunePods(): Promise<void> {
await Promise.all( await Promise.all(
podList.items.map( podList.items.map(
async pod => pod.metadata?.name && (await deletePod(pod.metadata.name)) async pod => pod.metadata?.name && deletePod(pod.metadata.name)
) )
) )
} }
@@ -793,10 +488,7 @@ export async function prunePods(): Promise<void> {
export async function getPodStatus( export async function getPodStatus(
name: string name: string
): Promise<k8s.V1PodStatus | undefined> { ): Promise<k8s.V1PodStatus | undefined> {
const pod = await k8sApi.readNamespacedPod({ const pod = await k8sApi.readNamespacedPod({ name, namespace: namespace() })
name,
namespace: namespace()
})
return pod.status return pod.status
} }
@@ -831,7 +523,7 @@ export async function isPodContainerAlpine(
[ [
'sh', 'sh',
'-c', '-c',
`[ $(cat /etc/*release* | grep -i -e "^ID=*alpine*" -c) != 0 ] || exit 1` `'[ $(cat /etc/*release* | grep -i -e "^ID=*alpine*" -c) != 0 ] || exit 1'`
], ],
podName, podName,
containerName containerName
@@ -843,6 +535,39 @@ export async function isPodContainerAlpine(
return isAlpine return isAlpine
} }
async function getCurrentNodeName(): Promise<string> {
const resp = await k8sApi.readNamespacedPod({
name: getRunnerPodName(),
namespace: namespace()
})
const nodeName = resp.spec?.nodeName
if (!nodeName) {
throw new Error('Failed to determine node name')
}
return nodeName
}
async function getPodAffinity(nodeName: string): Promise<k8s.V1Affinity> {
const affinity = new k8s.V1Affinity()
affinity.nodeAffinity = new k8s.V1NodeAffinity()
affinity.nodeAffinity.requiredDuringSchedulingIgnoredDuringExecution =
new k8s.V1NodeSelector()
affinity.nodeAffinity.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms =
[
{
matchExpressions: [
{
key: 'kubernetes.io/hostname',
operator: 'In',
values: [nodeName]
}
]
}
]
return affinity
}
export function namespace(): string { export function namespace(): string {
if (process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']) { if (process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']) {
return process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE'] return process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']
@@ -926,8 +651,5 @@ export function containerPorts(
} }
export async function getPodByName(name): Promise<k8s.V1Pod> { export async function getPodByName(name): Promise<k8s.V1Pod> {
return await k8sApi.readNamespacedPod({ return await k8sApi.readNamespacedPod({ name, namespace: namespace() })
name,
namespace: namespace()
})
} }

View File

@@ -2,10 +2,12 @@ import * as k8s from '@kubernetes/client-node'
import * as fs from 'fs' import * as fs from 'fs'
import * as yaml from 'js-yaml' import * as yaml from 'js-yaml'
import * as core from '@actions/core' import * as core from '@actions/core'
import { Mount } from 'hooklib'
import * as path from 'path'
import { v1 as uuidv4 } from 'uuid' import { v1 as uuidv4 } from 'uuid'
import { POD_VOLUME_NAME } from './index'
import { CONTAINER_EXTENSION_PREFIX } from '../hooks/constants' import { CONTAINER_EXTENSION_PREFIX } from '../hooks/constants'
import * as shlex from 'shlex' import * as shlex from 'shlex'
import { Mount } from 'hooklib'
export const DEFAULT_CONTAINER_ENTRY_POINT_ARGS = [`-f`, `/dev/null`] export const DEFAULT_CONTAINER_ENTRY_POINT_ARGS = [`-f`, `/dev/null`]
export const DEFAULT_CONTAINER_ENTRY_POINT = 'tail' export const DEFAULT_CONTAINER_ENTRY_POINT = 'tail'
@@ -13,48 +15,101 @@ export const DEFAULT_CONTAINER_ENTRY_POINT = 'tail'
export const ENV_HOOK_TEMPLATE_PATH = 'ACTIONS_RUNNER_CONTAINER_HOOK_TEMPLATE' export const ENV_HOOK_TEMPLATE_PATH = 'ACTIONS_RUNNER_CONTAINER_HOOK_TEMPLATE'
export const ENV_USE_KUBE_SCHEDULER = 'ACTIONS_RUNNER_USE_KUBE_SCHEDULER' export const ENV_USE_KUBE_SCHEDULER = 'ACTIONS_RUNNER_USE_KUBE_SCHEDULER'
export const EXTERNALS_VOLUME_NAME = 'externals' export function containerVolumes(
export const GITHUB_VOLUME_NAME = 'github' userMountVolumes: Mount[] = [],
export const WORK_VOLUME = 'work' jobContainer = true,
containerAction = false
): k8s.V1VolumeMount[] {
const mounts: k8s.V1VolumeMount[] = [
{
name: POD_VOLUME_NAME,
mountPath: '/__w'
}
]
export const CONTAINER_VOLUMES: k8s.V1VolumeMount[] = [ const workspacePath = process.env.GITHUB_WORKSPACE as string
{ if (containerAction) {
name: EXTERNALS_VOLUME_NAME, const i = workspacePath.lastIndexOf('_work/')
mountPath: '/__e' const workspaceRelativePath = workspacePath.slice(i + '_work/'.length)
}, mounts.push(
{ {
name: WORK_VOLUME, name: POD_VOLUME_NAME,
mountPath: '/__w' mountPath: '/github/workspace',
}, subPath: workspaceRelativePath
{ },
name: GITHUB_VOLUME_NAME, {
mountPath: '/github' name: POD_VOLUME_NAME,
mountPath: '/github/file_commands',
subPath: '_temp/_runner_file_commands'
},
{
name: POD_VOLUME_NAME,
mountPath: '/github/home',
subPath: '_temp/_github_home'
},
{
name: POD_VOLUME_NAME,
mountPath: '/github/workflow',
subPath: '_temp/_github_workflow'
}
)
return mounts
} }
]
export function prepareJobScript(userVolumeMounts: Mount[]): { if (!jobContainer) {
containerPath: string return mounts
runnerPath: string
} {
let mountDirs = userVolumeMounts.map(m => m.targetVolumePath).join(' ')
const content = `#!/bin/sh -l
set -e
cp -R /__w/_temp/_github_home /github/home
cp -R /__w/_temp/_github_workflow /github/workflow
mkdir -p ${mountDirs}
`
const filename = `${uuidv4()}.sh`
const entryPointPath = `${process.env.RUNNER_TEMP}/${filename}`
fs.writeFileSync(entryPointPath, content)
return {
containerPath: `/__w/_temp/${filename}`,
runnerPath: entryPointPath
} }
mounts.push(
{
name: POD_VOLUME_NAME,
mountPath: '/__e',
subPath: 'externals'
},
{
name: POD_VOLUME_NAME,
mountPath: '/github/home',
subPath: '_temp/_github_home'
},
{
name: POD_VOLUME_NAME,
mountPath: '/github/workflow',
subPath: '_temp/_github_workflow'
}
)
if (!userMountVolumes?.length) {
return mounts
}
for (const userVolume of userMountVolumes) {
let sourceVolumePath = ''
if (path.isAbsolute(userVolume.sourceVolumePath)) {
if (!userVolume.sourceVolumePath.startsWith(workspacePath)) {
throw new Error(
'Volume mounts outside of the work folder are not supported'
)
}
// source volume path should be relative path
sourceVolumePath = userVolume.sourceVolumePath.slice(
workspacePath.length + 1
)
} else {
sourceVolumePath = userVolume.sourceVolumePath
}
mounts.push({
name: POD_VOLUME_NAME,
mountPath: userVolume.targetVolumePath,
subPath: sourceVolumePath,
readOnly: userVolume.readOnly
})
}
return mounts
} }
export function writeRunScript( export function writeEntryPointScript(
workingDirectory: string, workingDirectory: string,
entryPoint: string, entryPoint: string,
entryPointArgs?: string[], entryPointArgs?: string[],
@@ -68,12 +123,33 @@ export function writeRunScript(
typeof prependPath === 'string' ? prependPath : prependPath.join(':') typeof prependPath === 'string' ? prependPath : prependPath.join(':')
exportPath = `export PATH=${prepend}:$PATH` exportPath = `export PATH=${prepend}:$PATH`
} }
let environmentPrefix = ''
let environmentPrefix = scriptEnv(environmentVariables) if (environmentVariables && Object.entries(environmentVariables).length) {
const envBuffer: string[] = []
for (const [key, value] of Object.entries(environmentVariables)) {
if (
key.includes(`=`) ||
key.includes(`'`) ||
key.includes(`"`) ||
key.includes(`$`)
) {
throw new Error(
`environment key ${key} is invalid - the key must not contain =, $, ', or "`
)
}
envBuffer.push(
`"${key}=${value
.replace(/\\/g, '\\\\')
.replace(/"/g, '\\"')
.replace(/\$/g, '\\$')
.replace(/`/g, '\\`')}"`
)
}
environmentPrefix = `env ${envBuffer.join(' ')} `
}
const content = `#!/bin/sh -l const content = `#!/bin/sh -l
set -e
rm "$0" # remove script after running
${exportPath} ${exportPath}
cd ${workingDirectory} && \ cd ${workingDirectory} && \
exec ${environmentPrefix} ${entryPoint} ${ exec ${environmentPrefix} ${entryPoint} ${
@@ -89,76 +165,9 @@ exec ${environmentPrefix} ${entryPoint} ${
} }
} }
export function writeContainerStepScript(
dst: string,
workingDirectory: string,
entryPoint: string,
entryPointArgs?: string[],
environmentVariables?: { [key: string]: string }
): { containerPath: string; runnerPath: string } {
let environmentPrefix = scriptEnv(environmentVariables)
const parts = workingDirectory.split('/').slice(-2)
if (parts.length !== 2) {
throw new Error(`Invalid working directory: ${workingDirectory}`)
}
const content = `#!/bin/sh -l
rm "$0" # remove script after running
mv /__w/_temp/_github_home /github/home && \
mv /__w/_temp/_github_workflow /github/workflow && \
mv /__w/_temp/_runner_file_commands /github/file_commands || true && \
mv /__w/${parts.join('/')}/ /github/workspace && \
cd /github/workspace && \
exec ${environmentPrefix} ${entryPoint} ${
entryPointArgs?.length ? entryPointArgs.join(' ') : ''
}
`
const filename = `${uuidv4()}.sh`
const entryPointPath = `${dst}/${filename}`
core.debug(`Writing container step script to ${entryPointPath}`)
fs.writeFileSync(entryPointPath, content)
return {
containerPath: `/__w/_temp/${filename}`,
runnerPath: entryPointPath
}
}
function scriptEnv(envs?: { [key: string]: string }): string {
if (!envs || !Object.entries(envs).length) {
return ''
}
const envBuffer: string[] = []
for (const [key, value] of Object.entries(envs)) {
if (
key.includes(`=`) ||
key.includes(`'`) ||
key.includes(`"`) ||
key.includes(`$`)
) {
throw new Error(
`environment key ${key} is invalid - the key must not contain =, $, ', or "`
)
}
envBuffer.push(
`"${key}=${value
.replace(/\\/g, '\\\\')
.replace(/"/g, '\\"')
.replace(/\$/g, '\\$')
.replace(/`/g, '\\`')}"`
)
}
if (!envBuffer?.length) {
return ''
}
return `env ${envBuffer.join(' ')} `
}
export function generateContainerName(image: string): string { export function generateContainerName(image: string): string {
const nameWithTag = image.split('/').pop() const nameWithTag = image.split('/').pop()
const name = nameWithTag?.split(':')[0] const name = nameWithTag?.split(':').at(0)
if (!name) { if (!name) {
throw new Error(`Image definition '${image}' is invalid`) throw new Error(`Image definition '${image}' is invalid`)
@@ -288,18 +297,5 @@ function mergeLists<T>(base?: T[], from?: T[]): T[] {
} }
export function fixArgs(args: string[]): string[] { export function fixArgs(args: string[]): string[] {
// Preserve shell command strings passed via `sh -c` without re-tokenizing.
// Retokenizing would split the script into multiple args, breaking `sh -c`.
if (args.length >= 2 && args[0] === 'sh' && args[1] === '-c') {
return args
}
return shlex.split(args.join(' ')) return shlex.split(args.join(' '))
} }
export async function sleep(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms))
}
export function listDirAllCommand(dir: string): string {
return `cd ${shlex.quote(dir)} && find . -not -path '*/_runner_hook_responses*' -exec stat -c '%s %n' {} \\;`
}

View File

@@ -3,7 +3,6 @@ import { cleanupJob, prepareJob } from '../src/hooks'
import { RunnerInstanceLabel } from '../src/hooks/constants' import { RunnerInstanceLabel } from '../src/hooks/constants'
import { namespace } from '../src/k8s' import { namespace } from '../src/k8s'
import { TestHelper } from './test-setup' import { TestHelper } from './test-setup'
import { PrepareJobArgs } from 'hooklib'
let testHelper: TestHelper let testHelper: TestHelper
@@ -15,10 +14,7 @@ describe('Cleanup Job', () => {
const prepareJobOutputFilePath = testHelper.createFile( const prepareJobOutputFilePath = testHelper.createFile(
'prepare-job-output.json' 'prepare-job-output.json'
) )
await prepareJob( await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
prepareJobData.args as PrepareJobArgs,
prepareJobOutputFilePath
)
}) })
afterEach(async () => { afterEach(async () => {

View File

@@ -6,7 +6,6 @@ import {
runScriptStep runScriptStep
} from '../src/hooks' } from '../src/hooks'
import { TestHelper } from './test-setup' import { TestHelper } from './test-setup'
import { RunContainerStepArgs, RunScriptStepArgs } from 'hooklib'
jest.useRealTimers() jest.useRealTimers()
@@ -26,7 +25,6 @@ describe('e2e', () => {
afterEach(async () => { afterEach(async () => {
await testHelper.cleanup() await testHelper.cleanup()
}) })
it('should prepare job, run script step, run container step then cleanup without errors', async () => { it('should prepare job, run script step, run container step then cleanup without errors', async () => {
await expect( await expect(
prepareJob(prepareJobData.args, prepareJobOutputFilePath) prepareJob(prepareJobData.args, prepareJobOutputFilePath)
@@ -38,16 +36,13 @@ describe('e2e', () => {
const prepareJobOutputData = JSON.parse(prepareJobOutputJson.toString()) const prepareJobOutputData = JSON.parse(prepareJobOutputJson.toString())
await expect( await expect(
runScriptStep( runScriptStep(scriptStepData.args, prepareJobOutputData.state, null)
scriptStepData.args as RunScriptStepArgs,
prepareJobOutputData.state
)
).resolves.not.toThrow() ).resolves.not.toThrow()
const runContainerStepData = testHelper.getRunContainerStepDefinition() const runContainerStepData = testHelper.getRunContainerStepDefinition()
await expect( await expect(
runContainerStep(runContainerStepData.args as RunContainerStepArgs) runContainerStep(runContainerStepData.args)
).resolves.not.toThrow() ).resolves.not.toThrow()
await expect(cleanupJob()).resolves.not.toThrow() await expect(cleanupJob()).resolves.not.toThrow()

View File

@@ -1,8 +1,9 @@
import * as fs from 'fs' import * as fs from 'fs'
import { containerPorts } from '../src/k8s' import { containerPorts, POD_VOLUME_NAME } from '../src/k8s'
import { import {
containerVolumes,
generateContainerName, generateContainerName,
writeRunScript, writeEntryPointScript,
mergePodSpecWithOptions, mergePodSpecWithOptions,
mergeContainerWithOptions, mergeContainerWithOptions,
readExtensionFromFile, readExtensionFromFile,
@@ -26,55 +27,91 @@ describe('k8s utils', () => {
it('should not throw', () => { it('should not throw', () => {
expect(() => expect(() =>
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], { writeEntryPointScript(
SOME_ENV: 'SOME_VALUE' '/test',
}) 'sh',
['-e', 'script.sh'],
['/prepend/path'],
{
SOME_ENV: 'SOME_VALUE'
}
)
).not.toThrow() ).not.toThrow()
}) })
it('should throw if RUNNER_TEMP is not set', () => { it('should throw if RUNNER_TEMP is not set', () => {
delete process.env.RUNNER_TEMP delete process.env.RUNNER_TEMP
expect(() => expect(() =>
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], { writeEntryPointScript(
SOME_ENV: 'SOME_VALUE' '/test',
}) 'sh',
['-e', 'script.sh'],
['/prepend/path'],
{
SOME_ENV: 'SOME_VALUE'
}
)
).toThrow() ).toThrow()
}) })
it('should throw if environment variable name contains double quote', () => { it('should throw if environment variable name contains double quote', () => {
expect(() => expect(() =>
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], { writeEntryPointScript(
'SOME"_ENV': 'SOME_VALUE' '/test',
}) 'sh',
['-e', 'script.sh'],
['/prepend/path'],
{
'SOME"_ENV': 'SOME_VALUE'
}
)
).toThrow() ).toThrow()
}) })
it('should throw if environment variable name contains =', () => { it('should throw if environment variable name contains =', () => {
expect(() => expect(() =>
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], { writeEntryPointScript(
'SOME=ENV': 'SOME_VALUE' '/test',
}) 'sh',
['-e', 'script.sh'],
['/prepend/path'],
{
'SOME=ENV': 'SOME_VALUE'
}
)
).toThrow() ).toThrow()
}) })
it('should throw if environment variable name contains single quote', () => { it('should throw if environment variable name contains single quote', () => {
expect(() => expect(() =>
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], { writeEntryPointScript(
"SOME'_ENV": 'SOME_VALUE' '/test',
}) 'sh',
['-e', 'script.sh'],
['/prepend/path'],
{
"SOME'_ENV": 'SOME_VALUE'
}
)
).toThrow() ).toThrow()
}) })
it('should throw if environment variable name contains dollar', () => { it('should throw if environment variable name contains dollar', () => {
expect(() => expect(() =>
writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], { writeEntryPointScript(
SOME_$_ENV: 'SOME_VALUE' '/test',
}) 'sh',
['-e', 'script.sh'],
['/prepend/path'],
{
SOME_$_ENV: 'SOME_VALUE'
}
)
).toThrow() ).toThrow()
}) })
it('should escape double quote, dollar and backslash in environment variable values', () => { it('should escape double quote, dollar and backslash in environment variable values', () => {
const { runnerPath } = writeRunScript( const { runnerPath } = writeEntryPointScript(
'/test', '/test',
'sh', 'sh',
['-e', 'script.sh'], ['-e', 'script.sh'],
@@ -93,7 +130,7 @@ describe('k8s utils', () => {
}) })
it('should return object with containerPath and runnerPath', () => { it('should return object with containerPath and runnerPath', () => {
const { containerPath, runnerPath } = writeRunScript( const { containerPath, runnerPath } = writeEntryPointScript(
'/test', '/test',
'sh', 'sh',
['-e', 'script.sh'], ['-e', 'script.sh'],
@@ -108,7 +145,7 @@ describe('k8s utils', () => {
}) })
it('should write entrypoint path and the file should exist', () => { it('should write entrypoint path and the file should exist', () => {
const { runnerPath } = writeRunScript( const { runnerPath } = writeEntryPointScript(
'/test', '/test',
'sh', 'sh',
['-e', 'script.sh'], ['-e', 'script.sh'],
@@ -131,6 +168,88 @@ describe('k8s utils', () => {
await testHelper.cleanup() await testHelper.cleanup()
}) })
it('should throw if container action and GITHUB_WORKSPACE env is not set', () => {
delete process.env.GITHUB_WORKSPACE
expect(() => containerVolumes([], true, true)).toThrow()
expect(() => containerVolumes([], false, true)).toThrow()
})
it('should always have work mount', () => {
let volumes = containerVolumes([], true, true)
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
volumes = containerVolumes([], true, false)
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
volumes = containerVolumes([], false, true)
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
volumes = containerVolumes([], false, false)
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
})
it('should always have /github/workflow mount if working on container job or container action', () => {
let volumes = containerVolumes([], true, true)
expect(volumes.find(e => e.mountPath === '/github/workflow')).toBeTruthy()
volumes = containerVolumes([], true, false)
expect(volumes.find(e => e.mountPath === '/github/workflow')).toBeTruthy()
volumes = containerVolumes([], false, true)
expect(volumes.find(e => e.mountPath === '/github/workflow')).toBeTruthy()
volumes = containerVolumes([], false, false)
expect(
volumes.find(e => e.mountPath === '/github/workflow')
).toBeUndefined()
})
it('should have container action volumes', () => {
let volumes = containerVolumes([], true, true)
let workspace = volumes.find(e => e.mountPath === '/github/workspace')
let fileCommands = volumes.find(
e => e.mountPath === '/github/file_commands'
)
expect(workspace).toBeTruthy()
expect(workspace?.subPath).toBe('repo/repo')
expect(fileCommands).toBeTruthy()
expect(fileCommands?.subPath).toBe('_temp/_runner_file_commands')
volumes = containerVolumes([], false, true)
workspace = volumes.find(e => e.mountPath === '/github/workspace')
fileCommands = volumes.find(e => e.mountPath === '/github/file_commands')
expect(workspace).toBeTruthy()
expect(workspace?.subPath).toBe('repo/repo')
expect(fileCommands).toBeTruthy()
expect(fileCommands?.subPath).toBe('_temp/_runner_file_commands')
})
it('should have externals, github home mounts if job container', () => {
const volumes = containerVolumes()
expect(volumes.find(e => e.mountPath === '/__e')).toBeTruthy()
expect(volumes.find(e => e.mountPath === '/github/home')).toBeTruthy()
})
it('should throw if user volume source volume path is not in workspace', () => {
expect(() =>
containerVolumes(
[
{
sourceVolumePath: '/outside/of/workdir'
}
],
true,
false
)
).toThrow()
})
it(`all volumes should have name ${POD_VOLUME_NAME}`, () => {
let volumes = containerVolumes([], true, true)
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
volumes = containerVolumes([], true, false)
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
volumes = containerVolumes([], false, true)
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
volumes = containerVolumes([], false, false)
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
})
it('should parse container ports', () => { it('should parse container ports', () => {
const tt = [ const tt = [
{ {
@@ -275,7 +394,7 @@ metadata:
spec: spec:
containers: containers:
- name: test - name: test
image: node:22 image: node:14.16
- name: job - name: job
image: ubuntu:latest` image: ubuntu:latest`
) )
@@ -288,7 +407,7 @@ spec:
it('should merge container spec', () => { it('should merge container spec', () => {
const base = { const base = {
image: 'node:22', image: 'node:14.16',
name: 'test', name: 'test',
env: [ env: [
{ {
@@ -343,7 +462,7 @@ spec:
const base = { const base = {
containers: [ containers: [
{ {
image: 'node:22', image: 'node:14.16',
name: 'test', name: 'test',
env: [ env: [
{ {

View File

@@ -3,9 +3,15 @@ import * as path from 'path'
import { cleanupJob } from '../src/hooks' import { cleanupJob } from '../src/hooks'
import { createContainerSpec, prepareJob } from '../src/hooks/prepare-job' import { createContainerSpec, prepareJob } from '../src/hooks/prepare-job'
import { TestHelper } from './test-setup' import { TestHelper } from './test-setup'
import { ENV_HOOK_TEMPLATE_PATH, generateContainerName } from '../src/k8s/utils' import {
import { execPodStep, getPodByName } from '../src/k8s' ENV_HOOK_TEMPLATE_PATH,
ENV_USE_KUBE_SCHEDULER,
generateContainerName,
readExtensionFromFile
} from '../src/k8s/utils'
import { getPodByName } from '../src/k8s'
import { V1Container } from '@kubernetes/client-node' import { V1Container } from '@kubernetes/client-node'
import * as yaml from 'js-yaml'
import { JOB_CONTAINER_NAME } from '../src/hooks/constants' import { JOB_CONTAINER_NAME } from '../src/hooks/constants'
jest.useRealTimers() jest.useRealTimers()
@@ -41,82 +47,32 @@ describe('Prepare job', () => {
}) })
it('should prepare job with absolute path for userVolumeMount', async () => { it('should prepare job with absolute path for userVolumeMount', async () => {
const userVolumeMount = path.join(
process.env.GITHUB_WORKSPACE as string,
'myvolume'
)
fs.mkdirSync(userVolumeMount, { recursive: true })
fs.writeFileSync(path.join(userVolumeMount, 'file.txt'), 'hello')
prepareJobData.args.container.userMountVolumes = [ prepareJobData.args.container.userMountVolumes = [
{ {
sourceVolumePath: userVolumeMount, sourceVolumePath: path.join(
targetVolumePath: '/__w/myvolume', process.env.GITHUB_WORKSPACE as string,
'/myvolume'
),
targetVolumePath: '/volume_mount',
readOnly: false readOnly: false
} }
] ]
await expect( await expect(
prepareJob(prepareJobData.args, prepareJobOutputFilePath) prepareJob(prepareJobData.args, prepareJobOutputFilePath)
).resolves.not.toThrow() ).resolves.not.toThrow()
const content = JSON.parse(
fs.readFileSync(prepareJobOutputFilePath).toString()
)
await execPodStep(
['sh', '-c', '[ "$(cat /__w/myvolume/file.txt)" = "hello" ] || exit 5'],
content!.state!.jobPod,
JOB_CONTAINER_NAME
).then(output => {
expect(output).toBe(0)
})
}) })
it('should prepare job with envs CI and GITHUB_ACTIONS', async () => { it('should throw an exception if the user volume mount is absolute path outside of GITHUB_WORKSPACE', async () => {
await prepareJob(prepareJobData.args, prepareJobOutputFilePath) prepareJobData.args.container.userMountVolumes = [
{
const content = JSON.parse( sourceVolumePath: '/somewhere/not/in/gh-workspace',
fs.readFileSync(prepareJobOutputFilePath).toString() targetVolumePath: '/containermount',
) readOnly: false
}
const got = await getPodByName(content.state.jobPod) ]
expect(got.spec?.containers[0].env).toEqual( await expect(
expect.arrayContaining([ prepareJob(prepareJobData.args, prepareJobOutputFilePath)
{ name: 'CI', value: 'true' }, ).rejects.toThrow()
{ name: 'GITHUB_ACTIONS', value: 'true' }
])
)
expect(got.spec?.containers[1].env).toEqual(
expect.arrayContaining([
{ name: 'CI', value: 'true' },
{ name: 'GITHUB_ACTIONS', value: 'true' }
])
)
})
it('should not override CI env var if already set', async () => {
prepareJobData.args.container.environmentVariables = {
CI: 'false'
}
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
const content = JSON.parse(
fs.readFileSync(prepareJobOutputFilePath).toString()
)
const got = await getPodByName(content.state.jobPod)
expect(got.spec?.containers[0].env).toEqual(
expect.arrayContaining([
{ name: 'CI', value: 'false' },
{ name: 'GITHUB_ACTIONS', value: 'true' }
])
)
expect(got.spec?.containers[1].env).toEqual(
expect.arrayContaining([
{ name: 'CI', value: 'true' },
{ name: 'GITHUB_ACTIONS', value: 'true' }
])
)
}) })
it('should not run prepare job without the job container', async () => { it('should not run prepare job without the job container', async () => {
@@ -164,11 +120,12 @@ describe('Prepare job', () => {
expect(got.metadata?.annotations?.['annotated-by']).toBe('extension') expect(got.metadata?.annotations?.['annotated-by']).toBe('extension')
expect(got.metadata?.labels?.['labeled-by']).toBe('extension') expect(got.metadata?.labels?.['labeled-by']).toBe('extension')
expect(got.spec?.restartPolicy).toBe('Never') expect(got.spec?.securityContext?.runAsUser).toBe(1000)
expect(got.spec?.securityContext?.runAsGroup).toBe(3000)
// job container // job container
expect(got.spec?.containers[0].name).toBe(JOB_CONTAINER_NAME) expect(got.spec?.containers[0].name).toBe(JOB_CONTAINER_NAME)
expect(got.spec?.containers[0].image).toBe('node:22') expect(got.spec?.containers[0].image).toBe('node:14.16')
expect(got.spec?.containers[0].command).toEqual(['sh']) expect(got.spec?.containers[0].command).toEqual(['sh'])
expect(got.spec?.containers[0].args).toEqual(['-c', 'sleep 50']) expect(got.spec?.containers[0].args).toEqual(['-c', 'sleep 50'])
@@ -176,13 +133,9 @@ describe('Prepare job', () => {
expect(got.spec?.containers[1].image).toBe('redis') expect(got.spec?.containers[1].image).toBe('redis')
expect(got.spec?.containers[1].command).toBeFalsy() expect(got.spec?.containers[1].command).toBeFalsy()
expect(got.spec?.containers[1].args).toBeFalsy() expect(got.spec?.containers[1].args).toBeFalsy()
expect(got.spec?.containers[1].env).toEqual( expect(got.spec?.containers[1].env).toEqual([
expect.arrayContaining([ { name: 'ENV2', value: 'value2' }
{ name: 'CI', value: 'true' }, ])
{ name: 'GITHUB_ACTIONS', value: 'true' },
{ name: 'ENV2', value: 'value2' }
])
)
expect(got.spec?.containers[1].resources).toEqual({ expect(got.spec?.containers[1].resources).toEqual({
requests: { memory: '1Mi', cpu: '1' }, requests: { memory: '1Mi', cpu: '1' },
limits: { memory: '1Gi', cpu: '2' } limits: { memory: '1Gi', cpu: '2' }
@@ -214,6 +167,17 @@ describe('Prepare job', () => {
expect(content.context.services.length).toBe(1) expect(content.context.services.length).toBe(1)
}) })
it('should not throw exception using kube scheduler', async () => {
// only for ReadWriteMany volumes or single node cluster
process.env[ENV_USE_KUBE_SCHEDULER] = 'true'
await expect(
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
).resolves.not.toThrow()
delete process.env[ENV_USE_KUBE_SCHEDULER]
})
test.each([undefined, null, []])( test.each([undefined, null, []])(
'should not throw exception when portMapping=%p', 'should not throw exception when portMapping=%p',
async pm => { async pm => {
@@ -227,20 +191,4 @@ describe('Prepare job', () => {
expect(() => content.context.services[0].image).not.toThrow() expect(() => content.context.services[0].image).not.toThrow()
} }
) )
it('should prepare job with container with non-root user', async () => {
prepareJobData.args!.container!.image =
'ghcr.io/actions/actions-runner:latest' // known to use user 1001
await expect(
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
).resolves.not.toThrow()
const content = JSON.parse(
fs.readFileSync(prepareJobOutputFilePath).toString()
)
expect(content.state.jobPod).toBeTruthy()
expect(content.context.container.image).toBe(
'ghcr.io/actions/actions-runner:latest'
)
})
}) })

View File

@@ -1,4 +1,4 @@
import { prepareJob, runContainerStep } from '../src/hooks' import { runContainerStep } from '../src/hooks'
import { TestHelper } from './test-setup' import { TestHelper } from './test-setup'
import { ENV_HOOK_TEMPLATE_PATH } from '../src/k8s/utils' import { ENV_HOOK_TEMPLATE_PATH } from '../src/k8s/utils'
import * as fs from 'fs' import * as fs from 'fs'
@@ -10,16 +10,11 @@ jest.useRealTimers()
let testHelper: TestHelper let testHelper: TestHelper
let runContainerStepData: any let runContainerStepData: any
let prepareJobData: any
let prepareJobOutputFilePath: string
describe('Run container step', () => { describe('Run container step', () => {
beforeEach(async () => { beforeEach(async () => {
testHelper = new TestHelper() testHelper = new TestHelper()
await testHelper.initialize() await testHelper.initialize()
prepareJobData = testHelper.getPrepareJobDefinition()
prepareJobOutputFilePath = testHelper.createFile('prepare-job-output.json')
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
runContainerStepData = testHelper.getRunContainerStepDefinition() runContainerStepData = testHelper.getRunContainerStepDefinition()
}) })
@@ -27,6 +22,11 @@ describe('Run container step', () => {
await testHelper.cleanup() await testHelper.cleanup()
}) })
it('should not throw', async () => {
const exitCode = await runContainerStep(runContainerStepData.args)
expect(exitCode).toBe(0)
})
it('should run pod with extensions applied', async () => { it('should run pod with extensions applied', async () => {
const extension = { const extension = {
metadata: { metadata: {
@@ -42,7 +42,7 @@ describe('Run container step', () => {
{ {
name: JOB_CONTAINER_EXTENSION_NAME, name: JOB_CONTAINER_EXTENSION_NAME,
command: ['sh'], command: ['sh'],
args: ['-c', 'sleep 10000'] args: ['-c', 'echo test']
}, },
{ {
name: 'side-container', name: 'side-container',
@@ -51,7 +51,11 @@ describe('Run container step', () => {
args: ['-c', 'echo test'] args: ['-c', 'echo test']
} }
], ],
restartPolicy: 'Never' restartPolicy: 'Never',
securityContext: {
runAsUser: 1000,
runAsGroup: 3000
}
} }
} }
@@ -74,15 +78,4 @@ describe('Run container step', () => {
runContainerStep(runContainerStepData.args) runContainerStep(runContainerStepData.args)
).resolves.not.toThrow() ).resolves.not.toThrow()
}) })
it('should run container step with envs CI and GITHUB_ACTIONS', async () => {
runContainerStepData.args.entryPoint = 'bash'
runContainerStepData.args.entryPointArgs = [
'-c',
"'if [[ -z $GITHUB_ACTIONS ]] || [[ -z $CI ]]; then exit 1; fi'"
]
await expect(
runContainerStep(runContainerStepData.args)
).resolves.not.toThrow()
})
}) })

View File

@@ -1,7 +1,6 @@
import * as fs from 'fs' import * as fs from 'fs'
import { cleanupJob, prepareJob, runScriptStep } from '../src/hooks' import { cleanupJob, prepareJob, runScriptStep } from '../src/hooks'
import { TestHelper } from './test-setup' import { TestHelper } from './test-setup'
import { PrepareJobArgs, RunScriptStepArgs } from 'hooklib'
jest.useRealTimers() jest.useRealTimers()
@@ -9,9 +8,7 @@ let testHelper: TestHelper
let prepareJobOutputData: any let prepareJobOutputData: any
let runScriptStepDefinition: { let runScriptStepDefinition
args: RunScriptStepArgs
}
describe('Run script step', () => { describe('Run script step', () => {
beforeEach(async () => { beforeEach(async () => {
@@ -22,14 +19,9 @@ describe('Run script step', () => {
) )
const prepareJobData = testHelper.getPrepareJobDefinition() const prepareJobData = testHelper.getPrepareJobDefinition()
runScriptStepDefinition = testHelper.getRunScriptStepDefinition() as { runScriptStepDefinition = testHelper.getRunScriptStepDefinition()
args: RunScriptStepArgs
}
await prepareJob( await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
prepareJobData.args as PrepareJobArgs,
prepareJobOutputFilePath
)
const outputContent = fs.readFileSync(prepareJobOutputFilePath) const outputContent = fs.readFileSync(prepareJobOutputFilePath)
prepareJobOutputData = JSON.parse(outputContent.toString()) prepareJobOutputData = JSON.parse(outputContent.toString())
}) })
@@ -45,14 +37,22 @@ describe('Run script step', () => {
it('should not throw an exception', async () => { it('should not throw an exception', async () => {
await expect( await expect(
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state) runScriptStep(
runScriptStepDefinition.args,
prepareJobOutputData.state,
null
)
).resolves.not.toThrow() ).resolves.not.toThrow()
}) })
it('should fail if the working directory does not exist', async () => { it('should fail if the working directory does not exist', async () => {
runScriptStepDefinition.args.workingDirectory = '/foo/bar' runScriptStepDefinition.args.workingDirectory = '/foo/bar'
await expect( await expect(
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state) runScriptStep(
runScriptStepDefinition.args,
prepareJobOutputData.state,
null
)
).rejects.toThrow() ).rejects.toThrow()
}) })
@@ -64,12 +64,16 @@ describe('Run script step', () => {
"'if [[ -z $NODE_ENV ]]; then exit 1; fi'" "'if [[ -z $NODE_ENV ]]; then exit 1; fi'"
] ]
await expect( await expect(
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state) runScriptStep(
runScriptStepDefinition.args,
prepareJobOutputData.state,
null
)
).resolves.not.toThrow() ).resolves.not.toThrow()
}) })
it('Should have path variable changed in container with prepend path string', async () => { it('Should have path variable changed in container with prepend path string', async () => {
runScriptStepDefinition.args.prependPath = ['/some/path'] runScriptStepDefinition.args.prependPath = '/some/path'
runScriptStepDefinition.args.entryPoint = '/bin/bash' runScriptStepDefinition.args.entryPoint = '/bin/bash'
runScriptStepDefinition.args.entryPointArgs = [ runScriptStepDefinition.args.entryPointArgs = [
'-c', '-c',
@@ -77,7 +81,11 @@ describe('Run script step', () => {
] ]
await expect( await expect(
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state) runScriptStep(
runScriptStepDefinition.args,
prepareJobOutputData.state,
null
)
).resolves.not.toThrow() ).resolves.not.toThrow()
}) })
@@ -95,7 +103,11 @@ describe('Run script step', () => {
] ]
await expect( await expect(
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state) runScriptStep(
runScriptStepDefinition.args,
prepareJobOutputData.state,
null
)
).resolves.not.toThrow() ).resolves.not.toThrow()
}) })
@@ -110,7 +122,11 @@ describe('Run script step', () => {
] ]
await expect( await expect(
runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state) runScriptStep(
runScriptStepDefinition.args,
prepareJobOutputData.state,
null
)
).resolves.not.toThrow() ).resolves.not.toThrow()
}) })
}) })

View File

@@ -9,97 +9,87 @@ const kc = new k8s.KubeConfig()
kc.loadFromDefault() kc.loadFromDefault()
const k8sApi = kc.makeApiClient(k8s.CoreV1Api) const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
const k8sStorageApi = kc.makeApiClient(k8s.StorageV1Api)
export class TestHelper { export class TestHelper {
private tempDirPath: string private tempDirPath: string
private podName: string private podName: string
private runnerWorkdir: string
private runnerTemp: string
constructor() { constructor() {
this.tempDirPath = `${__dirname}/_temp/runner` this.tempDirPath = `${__dirname}/_temp/runner`
this.runnerWorkdir = `${this.tempDirPath}/_work`
this.runnerTemp = `${this.tempDirPath}/_work/_temp`
this.podName = uuidv4().replace(/-/g, '') this.podName = uuidv4().replace(/-/g, '')
} }
async initialize(): Promise<void> { public async initialize(): Promise<void> {
process.env['ACTIONS_RUNNER_POD_NAME'] = `${this.podName}` process.env['ACTIONS_RUNNER_POD_NAME'] = `${this.podName}`
process.env['RUNNER_WORKSPACE'] = `${this.runnerWorkdir}/repo` process.env['RUNNER_WORKSPACE'] = `${this.tempDirPath}/_work/repo`
process.env['RUNNER_TEMP'] = `${this.runnerTemp}` process.env['RUNNER_TEMP'] = `${this.tempDirPath}/_work/_temp`
process.env['GITHUB_WORKSPACE'] = `${this.runnerWorkdir}/repo/repo` process.env['GITHUB_WORKSPACE'] = `${this.tempDirPath}/_work/repo/repo`
process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE'] = 'default' process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE'] = 'default'
fs.mkdirSync(`${this.runnerWorkdir}/repo/repo`, { recursive: true }) fs.mkdirSync(`${this.tempDirPath}/_work/repo/repo`, { recursive: true })
fs.mkdirSync(`${this.tempDirPath}/externals`, { recursive: true }) fs.mkdirSync(`${this.tempDirPath}/externals`, { recursive: true })
fs.mkdirSync(this.runnerTemp, { recursive: true }) fs.mkdirSync(process.env.RUNNER_TEMP, { recursive: true })
fs.mkdirSync(`${this.runnerTemp}/_github_workflow`, { recursive: true })
fs.mkdirSync(`${this.runnerTemp}/_github_home`, { recursive: true })
fs.mkdirSync(`${this.runnerTemp}/_runner_file_commands`, {
recursive: true
})
fs.copyFileSync( fs.copyFileSync(
path.resolve(`${__dirname}/../../../examples/example-script.sh`), path.resolve(`${__dirname}/../../../examples/example-script.sh`),
`${this.runnerTemp}/example-script.sh` `${process.env.RUNNER_TEMP}/example-script.sh`
) )
await this.cleanupK8sResources() await this.cleanupK8sResources()
try { try {
await this.createTestVolume()
await this.createTestJobPod() await this.createTestJobPod()
} catch (e) { } catch (e) {
console.log(e) console.log(e)
} }
} }
async cleanup(): Promise<void> { public async cleanup(): Promise<void> {
try { try {
await this.cleanupK8sResources() await this.cleanupK8sResources()
fs.rmSync(this.tempDirPath, { recursive: true }) fs.rmSync(this.tempDirPath, { recursive: true })
} catch { } catch {}
// Ignore errors during cleanup
}
} }
public async cleanupK8sResources() {
async cleanupK8sResources(): Promise<void> {
await k8sApi await k8sApi
.deleteNamespacedPod({ .deleteNamespacedPersistentVolumeClaim(
name: this.podName, `${this.podName}-work`,
namespace: 'default', 'default',
gracePeriodSeconds: 0 undefined,
}) undefined,
.catch((e: k8s.ApiException<any>) => { 0
if (e.code !== 404) { )
console.error(JSON.stringify(e)) .catch(e => {})
} await k8sApi.deletePersistentVolume(`${this.podName}-pv`).catch(e => {})
}) await k8sStorageApi.deleteStorageClass('local-storage').catch(e => {})
await k8sApi await k8sApi
.deleteNamespacedPod({ .deleteNamespacedPod(this.podName, 'default', undefined, undefined, 0)
name: `${this.podName}-workflow`, .catch(e => {})
namespace: 'default', await k8sApi
gracePeriodSeconds: 0 .deleteNamespacedPod(
}) `${this.podName}-workflow`,
.catch((e: k8s.ApiException<any>) => { 'default',
if (e.code !== 404) { undefined,
console.error(JSON.stringify(e)) undefined,
} 0
}) )
.catch(e => {})
} }
createFile(fileName?: string): string { public createFile(fileName?: string): string {
const filePath = `${this.tempDirPath}/${fileName || uuidv4()}` const filePath = `${this.tempDirPath}/${fileName || uuidv4()}`
fs.writeFileSync(filePath, '') fs.writeFileSync(filePath, '')
return filePath return filePath
} }
removeFile(fileName: string): void { public removeFile(fileName: string): void {
const filePath = `${this.tempDirPath}/${fileName}` const filePath = `${this.tempDirPath}/${fileName}`
fs.rmSync(filePath) fs.rmSync(filePath)
} }
async createTestJobPod(): Promise<void> { public async createTestJobPod() {
const container = { const container = {
name: 'runner', name: 'nginx',
image: 'ghcr.io/actions/actions-runner:latest', image: 'nginx:latest',
imagePullPolicy: 'IfNotPresent' imagePullPolicy: 'IfNotPresent'
} as k8s.V1Container } as k8s.V1Container
@@ -109,18 +99,59 @@ export class TestHelper {
}, },
spec: { spec: {
restartPolicy: 'Never', restartPolicy: 'Never',
containers: [container], containers: [container]
securityContext: {
runAsUser: 1001,
runAsGroup: 1001,
fsGroup: 1001
}
} }
} as k8s.V1Pod } as k8s.V1Pod
await k8sApi.createNamespacedPod({ namespace: 'default', body: pod }) await k8sApi.createNamespacedPod('default', pod)
} }
getPrepareJobDefinition(): HookData { public async createTestVolume() {
var sc: k8s.V1StorageClass = {
metadata: {
name: 'local-storage'
},
provisioner: 'kubernetes.io/no-provisioner',
volumeBindingMode: 'Immediate'
}
await k8sStorageApi.createStorageClass(sc)
var volume: k8s.V1PersistentVolume = {
metadata: {
name: `${this.podName}-pv`
},
spec: {
storageClassName: 'local-storage',
capacity: {
storage: '2Gi'
},
volumeMode: 'Filesystem',
accessModes: ['ReadWriteOnce'],
hostPath: {
path: `${this.tempDirPath}/_work`
}
}
}
await k8sApi.createPersistentVolume(volume)
var volumeClaim: k8s.V1PersistentVolumeClaim = {
metadata: {
name: `${this.podName}-work`
},
spec: {
accessModes: ['ReadWriteOnce'],
volumeMode: 'Filesystem',
storageClassName: 'local-storage',
volumeName: `${this.podName}-pv`,
resources: {
requests: {
storage: '1Gi'
}
}
}
}
await k8sApi.createNamespacedPersistentVolumeClaim('default', volumeClaim)
}
public getPrepareJobDefinition(): HookData {
const prepareJob = JSON.parse( const prepareJob = JSON.parse(
fs.readFileSync( fs.readFileSync(
path.resolve(__dirname + '/../../../examples/prepare-job.json'), path.resolve(__dirname + '/../../../examples/prepare-job.json'),
@@ -137,7 +168,7 @@ export class TestHelper {
return prepareJob return prepareJob
} }
getRunScriptStepDefinition(): HookData { public getRunScriptStepDefinition(): HookData {
const runScriptStep = JSON.parse( const runScriptStep = JSON.parse(
fs.readFileSync( fs.readFileSync(
path.resolve(__dirname + '/../../../examples/run-script-step.json'), path.resolve(__dirname + '/../../../examples/run-script-step.json'),
@@ -149,7 +180,7 @@ export class TestHelper {
return runScriptStep return runScriptStep
} }
getRunContainerStepDefinition(): HookData { public getRunContainerStepDefinition(): HookData {
const runContainerStep = JSON.parse( const runContainerStep = JSON.parse(
fs.readFileSync( fs.readFileSync(
path.resolve(__dirname + '/../../../examples/run-container-step.json'), path.resolve(__dirname + '/../../../examples/run-container-step.json'),

View File

@@ -5,8 +5,7 @@
"outDir": "./lib", "outDir": "./lib",
"rootDir": "./src" "rootDir": "./src"
}, },
"esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
"include": [ "include": [
"src/**/*", "./src"
] ]
} }

View File

@@ -1,6 +0,0 @@
{
"compilerOptions": {
"allowJs": true
},
"extends": "./tsconfig.json"
}

View File

@@ -1,15 +1,10 @@
## Features <!-- ## Features -->
- k8s: remove dependency on the runner's volume [#244] <!-- ## Bugs -->
## Bugs
- docker: fix readOnly volumes in createContainer [#236]
## Misc ## Misc
- bump all dependencies [#234] [#240] [#239] [#238] - Bump `@kubernetes/client-node` from 0.18.1 to 0.22.0 in /packages/k8s [#182]
- bump actions [#254]
## SHA-256 Checksums ## SHA-256 Checksums