Remove dependency on the runner's volume (#244)

* bump actions

* experiment using init container to prepare working environment

* rm script before continuing

* fix

* Update packages/k8s/src/hooks/run-script-step.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* leverage exec stat instead of printf

* npm update

* document the new constraint

---------

Co-authored-by: DenisPalnitsky <DenisPalnitsky@users.noreply.github.com>
This commit is contained in:
Nikola Jokic
2025-10-02 16:23:07 +02:00
committed by GitHub
parent c67938c536
commit 96c35e7cc6
17 changed files with 1831 additions and 9036 deletions

View File

@@ -4,9 +4,6 @@ metadata:
labels: labels:
labeled-by: "extension" labeled-by: "extension"
spec: spec:
securityContext:
runAsUser: 1000
runAsGroup: 3000
restartPolicy: Never restartPolicy: Never
containers: containers:
- name: $job # overwrites job container - name: $job # overwrites job container

3630
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -22,9 +22,6 @@ rules:
- apiGroups: [""] - apiGroups: [""]
resources: ["pods/log"] resources: ["pods/log"]
verbs: ["get", "list", "watch",] verbs: ["get", "list", "watch",]
- apiGroups: ["batch"]
resources: ["jobs"]
verbs: ["get", "list", "create", "delete"]
- apiGroups: [""] - apiGroups: [""]
resources: ["secrets"] resources: ["secrets"]
verbs: ["get", "list", "create", "delete"] verbs: ["get", "list", "create", "delete"]
@@ -43,3 +40,4 @@ rules:
- Building container actions from a dockerfile is not supported at this time - Building container actions from a dockerfile is not supported at this time
- Container actions will not have access to the services network or job container network - Container actions will not have access to the services network or job container network
- Docker [create options](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idcontaineroptions) are not supported - Docker [create options](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idcontaineroptions) are not supported
- Container actions will have to specify the entrypoint, since the default entrypoint will be overridden to run the commands from the workflow.

File diff suppressed because it is too large Load Diff

View File

@@ -20,17 +20,18 @@
"hooklib": "file:../hooklib", "hooklib": "file:../hooklib",
"js-yaml": "^4.1.0", "js-yaml": "^4.1.0",
"shlex": "^3.0.0", "shlex": "^3.0.0",
"tar-fs": "^3.1.0",
"uuid": "^11.1.0" "uuid": "^11.1.0"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "^7.25.2", "@babel/core": "^7.28.3",
"@babel/preset-env": "^7.25.4", "@babel/preset-env": "^7.28.3",
"@types/jest": "^30.0.0", "@types/jest": "^30.0.0",
"@types/node": "^24.0.14", "@types/node": "^24.3.0",
"@vercel/ncc": "^0.38.3", "@vercel/ncc": "^0.38.3",
"babel-jest": "^30.0.4", "babel-jest": "^30.1.1",
"jest": "^30.0.4", "jest": "^30.1.1",
"ts-jest": "^29.4.0", "ts-jest": "^29.4.1",
"typescript": "^5.8.3" "typescript": "^5.9.2"
} }
} }

View File

@@ -1,5 +1,4 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import * as io from '@actions/io'
import * as k8s from '@kubernetes/client-node' import * as k8s from '@kubernetes/client-node'
import { import {
JobContainerInfo, JobContainerInfo,
@@ -8,26 +7,33 @@ import {
writeToResponseFile, writeToResponseFile,
ServiceContainerInfo ServiceContainerInfo
} from 'hooklib' } from 'hooklib'
import path from 'path'
import { import {
containerPorts, containerPorts,
createPod, createJobPod,
isPodContainerAlpine, isPodContainerAlpine,
prunePods, prunePods,
waitForPodPhases, waitForPodPhases,
getPrepareJobTimeoutSeconds getPrepareJobTimeoutSeconds,
execCpToPod,
execPodStep
} from '../k8s' } from '../k8s'
import { import {
containerVolumes, CONTAINER_VOLUMES,
DEFAULT_CONTAINER_ENTRY_POINT, DEFAULT_CONTAINER_ENTRY_POINT,
DEFAULT_CONTAINER_ENTRY_POINT_ARGS, DEFAULT_CONTAINER_ENTRY_POINT_ARGS,
generateContainerName, generateContainerName,
mergeContainerWithOptions, mergeContainerWithOptions,
readExtensionFromFile, readExtensionFromFile,
PodPhase, PodPhase,
fixArgs fixArgs,
prepareJobScript
} from '../k8s/utils' } from '../k8s/utils'
import { CONTAINER_EXTENSION_PREFIX, JOB_CONTAINER_NAME } from './constants' import {
CONTAINER_EXTENSION_PREFIX,
getJobPodName,
JOB_CONTAINER_NAME
} from './constants'
import { dirname } from 'path'
export async function prepareJob( export async function prepareJob(
args: PrepareJobArgs, args: PrepareJobArgs,
@@ -40,11 +46,9 @@ export async function prepareJob(
await prunePods() await prunePods()
const extension = readExtensionFromFile() const extension = readExtensionFromFile()
await copyExternalsToRoot()
let container: k8s.V1Container | undefined = undefined let container: k8s.V1Container | undefined = undefined
if (args.container?.image) { if (args.container?.image) {
core.debug(`Using image '${args.container.image}' for job image`)
container = createContainerSpec( container = createContainerSpec(
args.container, args.container,
JOB_CONTAINER_NAME, JOB_CONTAINER_NAME,
@@ -56,7 +60,6 @@ export async function prepareJob(
let services: k8s.V1Container[] = [] let services: k8s.V1Container[] = []
if (args.services?.length) { if (args.services?.length) {
services = args.services.map(service => { services = args.services.map(service => {
core.debug(`Adding service '${service.image}' to pod definition`)
return createContainerSpec( return createContainerSpec(
service, service,
generateContainerName(service.image), generateContainerName(service.image),
@@ -72,7 +75,8 @@ export async function prepareJob(
let createdPod: k8s.V1Pod | undefined = undefined let createdPod: k8s.V1Pod | undefined = undefined
try { try {
createdPod = await createPod( createdPod = await createJobPod(
getJobPodName(),
container, container,
services, services,
args.container.registry, args.container.registry,
@@ -92,6 +96,13 @@ export async function prepareJob(
`Job pod created, waiting for it to come online ${createdPod?.metadata?.name}` `Job pod created, waiting for it to come online ${createdPod?.metadata?.name}`
) )
const runnerWorkspace = dirname(process.env.RUNNER_WORKSPACE as string)
let prepareScript: { containerPath: string; runnerPath: string } | undefined
if (args.container?.userMountVolumes?.length) {
prepareScript = prepareJobScript(args.container.userMountVolumes || [])
}
try { try {
await waitForPodPhases( await waitForPodPhases(
createdPod.metadata.name, createdPod.metadata.name,
@@ -104,6 +115,28 @@ export async function prepareJob(
throw new Error(`pod failed to come online with error: ${err}`) throw new Error(`pod failed to come online with error: ${err}`)
} }
await execCpToPod(createdPod.metadata.name, runnerWorkspace, '/__w')
if (prepareScript) {
await execPodStep(
['sh', '-e', prepareScript.containerPath],
createdPod.metadata.name,
JOB_CONTAINER_NAME
)
const promises: Promise<void>[] = []
for (const vol of args?.container?.userMountVolumes || []) {
promises.push(
execCpToPod(
createdPod.metadata.name,
vol.sourceVolumePath,
vol.targetVolumePath
)
)
}
await Promise.all(promises)
}
core.debug('Job pod is ready for traffic') core.debug('Job pod is ready for traffic')
let isAlpine = false let isAlpine = false
@@ -127,7 +160,7 @@ function generateResponseFile(
responseFile: string, responseFile: string,
args: PrepareJobArgs, args: PrepareJobArgs,
appPod: k8s.V1Pod, appPod: k8s.V1Pod,
isAlpine isAlpine: boolean
): void { ): void {
if (!appPod.metadata?.name) { if (!appPod.metadata?.name) {
throw new Error('app pod must have metadata.name specified') throw new Error('app pod must have metadata.name specified')
@@ -184,17 +217,6 @@ function generateResponseFile(
writeToResponseFile(responseFile, JSON.stringify(response)) writeToResponseFile(responseFile, JSON.stringify(response))
} }
async function copyExternalsToRoot(): Promise<void> {
const workspace = process.env['RUNNER_WORKSPACE']
if (workspace) {
await io.cp(
path.join(workspace, '../../externals'),
path.join(workspace, '../externals'),
{ force: true, recursive: true, copySourceDirectory: false }
)
}
}
export function createContainerSpec( export function createContainerSpec(
container: JobContainerInfo | ServiceContainerInfo, container: JobContainerInfo | ServiceContainerInfo,
name: string, name: string,
@@ -244,10 +266,7 @@ export function createContainerSpec(
}) })
} }
podContainer.volumeMounts = containerVolumes( podContainer.volumeMounts = CONTAINER_VOLUMES
container.userMountVolumes,
jobContainer
)
if (!extension) { if (!extension) {
return podContainer return podContainer

View File

@@ -1,23 +1,31 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import * as fs from 'fs'
import * as k8s from '@kubernetes/client-node' import * as k8s from '@kubernetes/client-node'
import { RunContainerStepArgs } from 'hooklib' import { RunContainerStepArgs } from 'hooklib'
import { dirname } from 'path'
import { import {
createJob, createContainerStepPod,
createSecretForEnvs, deletePod,
getContainerJobPodName, execCpFromPod,
getPodLogs, execCpToPod,
getPodStatus, execPodStep,
waitForJobToComplete, getPrepareJobTimeoutSeconds,
waitForPodPhases waitForPodPhases
} from '../k8s' } from '../k8s'
import { import {
containerVolumes, CONTAINER_VOLUMES,
fixArgs,
mergeContainerWithOptions, mergeContainerWithOptions,
PodPhase, PodPhase,
readExtensionFromFile readExtensionFromFile,
DEFAULT_CONTAINER_ENTRY_POINT_ARGS,
writeContainerStepScript
} from '../k8s/utils' } from '../k8s/utils'
import { JOB_CONTAINER_EXTENSION_NAME, JOB_CONTAINER_NAME } from './constants' import {
getJobPodName,
getStepPodName,
JOB_CONTAINER_EXTENSION_NAME,
JOB_CONTAINER_NAME
} from './constants'
export async function runContainerStep( export async function runContainerStep(
stepContainer: RunContainerStepArgs stepContainer: RunContainerStepArgs
@@ -26,119 +34,109 @@ export async function runContainerStep(
throw new Error('Building container actions is not currently supported') throw new Error('Building container actions is not currently supported')
} }
let secretName: string | undefined = undefined if (!stepContainer.entryPoint) {
if (stepContainer.environmentVariables) { throw new Error(
try { 'failed to start the container since the entrypoint is overwritten'
const envs = JSON.parse( )
JSON.stringify(stepContainer.environmentVariables) }
)
envs['GITHUB_ACTIONS'] = 'true' const envs = stepContainer.environmentVariables || {}
if (!('CI' in envs)) { envs['GITHUB_ACTIONS'] = 'true'
envs.CI = 'true' if (!('CI' in envs)) {
} envs.CI = 'true'
secretName = await createSecretForEnvs(envs)
} catch (err) {
core.debug(`createSecretForEnvs failed: ${JSON.stringify(err)}`)
const message = (err as any)?.response?.body?.message || err
throw new Error(`failed to create script environment: ${message}`)
}
} }
const extension = readExtensionFromFile() const extension = readExtensionFromFile()
core.debug(`Created secret ${secretName} for container job envs`) const container = createContainerSpec(stepContainer, extension)
const container = createContainerSpec(stepContainer, secretName, extension)
let job: k8s.V1Job let pod: k8s.V1Pod
try { try {
job = await createJob(container, extension) pod = await createContainerStepPod(getStepPodName(), container, extension)
} catch (err) { } catch (err) {
core.debug(`createJob failed: ${JSON.stringify(err)}`) core.debug(`createJob failed: ${JSON.stringify(err)}`)
const message = (err as any)?.response?.body?.message || err const message = (err as any)?.response?.body?.message || err
throw new Error(`failed to run script step: ${message}`) throw new Error(`failed to run script step: ${message}`)
} }
if (!job.metadata?.name) { if (!pod.metadata?.name) {
throw new Error( throw new Error(
`Expected job ${JSON.stringify( `Expected job ${JSON.stringify(
job pod
)} to have correctly set the metadata.name` )} to have correctly set the metadata.name`
) )
} }
core.debug(`Job created, waiting for pod to start: ${job.metadata?.name}`) const podName = pod.metadata.name
let podName: string
try { try {
podName = await getContainerJobPodName(job.metadata.name) await waitForPodPhases(
} catch (err) { podName,
core.debug(`getContainerJobPodName failed: ${JSON.stringify(err)}`) new Set([PodPhase.RUNNING]),
const message = (err as any)?.response?.body?.message || err new Set([PodPhase.PENDING, PodPhase.UNKNOWN]),
throw new Error(`failed to get container job pod name: ${message}`) getPrepareJobTimeoutSeconds()
}
await waitForPodPhases(
podName,
new Set([
PodPhase.COMPLETED,
PodPhase.RUNNING,
PodPhase.SUCCEEDED,
PodPhase.FAILED
]),
new Set([PodPhase.PENDING, PodPhase.UNKNOWN])
)
core.debug('Container step is running or complete, pulling logs')
await getPodLogs(podName, JOB_CONTAINER_NAME)
core.debug('Waiting for container job to complete')
await waitForJobToComplete(job.metadata.name)
// pod has failed so pull the status code from the container
const status = await getPodStatus(podName)
if (status?.phase === 'Succeeded') {
return 0
}
if (!status?.containerStatuses?.length) {
core.error(
`Can't determine container status from response: ${JSON.stringify(
status
)}`
) )
return 1
const runnerWorkspace = dirname(process.env.RUNNER_WORKSPACE as string)
const githubWorkspace = process.env.GITHUB_WORKSPACE as string
const parts = githubWorkspace.split('/').slice(-2)
if (parts.length !== 2) {
throw new Error(`Invalid github workspace directory: ${githubWorkspace}`)
}
const relativeWorkspace = parts.join('/')
core.debug(
`Copying files from pod ${getJobPodName()} to ${runnerWorkspace}/${relativeWorkspace}`
)
await execCpFromPod(getJobPodName(), `/__w`, `${runnerWorkspace}`)
const { containerPath, runnerPath } = writeContainerStepScript(
`${runnerWorkspace}/__w/_temp`,
githubWorkspace,
stepContainer.entryPoint,
stepContainer.entryPointArgs,
envs
)
await execCpToPod(podName, `${runnerWorkspace}/__w`, '/__w')
fs.rmSync(`${runnerWorkspace}/__w`, { recursive: true, force: true })
try {
core.debug(`Executing container step script in pod ${podName}`)
return await execPodStep(
['/__e/sh', '-e', containerPath],
pod.metadata.name,
JOB_CONTAINER_NAME
)
} catch (err) {
core.debug(`execPodStep failed: ${JSON.stringify(err)}`)
const message = (err as any)?.response?.body?.message || err
throw new Error(`failed to run script step: ${message}`)
} finally {
fs.rmSync(runnerPath, { force: true })
}
} catch (error) {
core.error(`Failed to run container step: ${error}`)
throw error
} finally {
await deletePod(podName).catch(err => {
core.error(`Failed to delete step pod ${podName}: ${err}`)
})
} }
const exitCode =
status.containerStatuses[status.containerStatuses.length - 1].state
?.terminated?.exitCode
return Number(exitCode) || 1
} }
function createContainerSpec( function createContainerSpec(
container: RunContainerStepArgs, container: RunContainerStepArgs,
secretName?: string,
extension?: k8s.V1PodTemplateSpec extension?: k8s.V1PodTemplateSpec
): k8s.V1Container { ): k8s.V1Container {
const podContainer = new k8s.V1Container() const podContainer = new k8s.V1Container()
podContainer.name = JOB_CONTAINER_NAME podContainer.name = JOB_CONTAINER_NAME
podContainer.image = container.image podContainer.image = container.image
podContainer.workingDir = container.workingDirectory podContainer.workingDir = '/__w'
podContainer.command = container.entryPoint podContainer.command = ['/__e/tail']
? [container.entryPoint] podContainer.args = DEFAULT_CONTAINER_ENTRY_POINT_ARGS
: undefined
podContainer.args = container.entryPointArgs?.length
? fixArgs(container.entryPointArgs)
: undefined
if (secretName) { podContainer.volumeMounts = CONTAINER_VOLUMES
podContainer.envFrom = [
{
secretRef: {
name: secretName,
optional: false
}
}
]
}
podContainer.volumeMounts = containerVolumes(undefined, false, true)
if (!extension) { if (!extension) {
return podContainer return podContainer

View File

@@ -2,17 +2,18 @@
import * as fs from 'fs' import * as fs from 'fs'
import * as core from '@actions/core' import * as core from '@actions/core'
import { RunScriptStepArgs } from 'hooklib' import { RunScriptStepArgs } from 'hooklib'
import { execPodStep } from '../k8s' import { execCpFromPod, execCpToPod, execPodStep } from '../k8s'
import { writeEntryPointScript } from '../k8s/utils' import { writeRunScript, sleep, listDirAllCommand } from '../k8s/utils'
import { JOB_CONTAINER_NAME } from './constants' import { JOB_CONTAINER_NAME } from './constants'
import { dirname } from 'path'
export async function runScriptStep( export async function runScriptStep(
args: RunScriptStepArgs, args: RunScriptStepArgs,
state, state
responseFile
): Promise<void> { ): Promise<void> {
// Write the entrypoint first. This will be later coppied to the workflow pod
const { entryPoint, entryPointArgs, environmentVariables } = args const { entryPoint, entryPointArgs, environmentVariables } = args
const { containerPath, runnerPath } = writeEntryPointScript( const { containerPath, runnerPath } = writeRunScript(
args.workingDirectory, args.workingDirectory,
entryPoint, entryPoint,
entryPointArgs, entryPointArgs,
@@ -20,6 +21,12 @@ export async function runScriptStep(
environmentVariables environmentVariables
) )
const workdir = dirname(process.env.RUNNER_WORKSPACE as string)
const containerTemp = '/__w/_temp'
const runnerTemp = `${workdir}/_temp`
await execCpToPod(state.jobPod, runnerTemp, containerTemp)
// Execute the entrypoint script
args.entryPoint = 'sh' args.entryPoint = 'sh'
args.entryPointArgs = ['-e', containerPath] args.entryPointArgs = ['-e', containerPath]
try { try {
@@ -33,6 +40,19 @@ export async function runScriptStep(
const message = (err as any)?.response?.body?.message || err const message = (err as any)?.response?.body?.message || err
throw new Error(`failed to run script step: ${message}`) throw new Error(`failed to run script step: ${message}`)
} finally { } finally {
fs.rmSync(runnerPath) try {
fs.rmSync(runnerPath, { force: true })
} catch (removeErr) {
core.debug(`Failed to remove file ${runnerPath}: ${removeErr}`)
}
}
try {
core.debug(
`Copying from job pod '${state.jobPod}' ${containerTemp} to ${runnerTemp}`
)
await execCpFromPod(state.jobPod, containerTemp, workdir)
} catch (error) {
core.warning('Failed to copy _temp from pod')
} }
} }

View File

@@ -39,7 +39,7 @@ async function run(): Promise<void> {
await cleanupJob() await cleanupJob()
return process.exit(0) return process.exit(0)
case Command.RunScriptStep: case Command.RunScriptStep:
await runScriptStep(args as RunScriptStepArgs, state, null) await runScriptStep(args as RunScriptStepArgs, state)
return process.exit(0) return process.exit(0)
case Command.RunContainerStep: case Command.RunContainerStep:
exitCode = await runContainerStep(args as RunContainerStepArgs) exitCode = await runContainerStep(args as RunContainerStepArgs)

View File

@@ -1,21 +1,26 @@
import * as core from '@actions/core' import * as core from '@actions/core'
import * as path from 'path'
import { spawn } from 'child_process'
import * as k8s from '@kubernetes/client-node' import * as k8s from '@kubernetes/client-node'
import tar from 'tar-fs'
import * as stream from 'stream' import * as stream from 'stream'
import { WritableStreamBuffer } from 'stream-buffers'
import { createHash } from 'crypto'
import type { ContainerInfo, Registry } from 'hooklib' import type { ContainerInfo, Registry } from 'hooklib'
import { import {
getJobPodName,
getRunnerPodName,
getSecretName, getSecretName,
getStepPodName, JOB_CONTAINER_NAME,
getVolumeClaimName,
RunnerInstanceLabel RunnerInstanceLabel
} from '../hooks/constants' } from '../hooks/constants'
import { import {
PodPhase, PodPhase,
mergePodSpecWithOptions, mergePodSpecWithOptions,
mergeObjectMeta, mergeObjectMeta,
useKubeScheduler, fixArgs,
fixArgs listDirAllCommand,
sleep,
EXTERNALS_VOLUME_NAME,
GITHUB_VOLUME_NAME
} from './utils' } from './utils'
const kc = new k8s.KubeConfig() const kc = new k8s.KubeConfig()
@@ -28,8 +33,6 @@ const k8sAuthorizationV1Api = kc.makeApiClient(k8s.AuthorizationV1Api)
const DEFAULT_WAIT_FOR_POD_TIME_SECONDS = 10 * 60 // 10 min const DEFAULT_WAIT_FOR_POD_TIME_SECONDS = 10 * 60 // 10 min
export const POD_VOLUME_NAME = 'work'
export const requiredPermissions = [ export const requiredPermissions = [
{ {
group: '', group: '',
@@ -49,12 +52,6 @@ export const requiredPermissions = [
resource: 'pods', resource: 'pods',
subresource: 'log' subresource: 'log'
}, },
{
group: 'batch',
verbs: ['get', 'list', 'create', 'delete'],
resource: 'jobs',
subresource: ''
},
{ {
group: '', group: '',
verbs: ['create', 'delete', 'get', 'list'], verbs: ['create', 'delete', 'get', 'list'],
@@ -63,7 +60,8 @@ export const requiredPermissions = [
} }
] ]
export async function createPod( export async function createJobPod(
name: string,
jobContainer?: k8s.V1Container, jobContainer?: k8s.V1Container,
services?: k8s.V1Container[], services?: k8s.V1Container[],
registry?: Registry, registry?: Registry,
@@ -83,7 +81,7 @@ export async function createPod(
appPod.kind = 'Pod' appPod.kind = 'Pod'
appPod.metadata = new k8s.V1ObjectMeta() appPod.metadata = new k8s.V1ObjectMeta()
appPod.metadata.name = getJobPodName() appPod.metadata.name = name
const instanceLabel = new RunnerInstanceLabel() const instanceLabel = new RunnerInstanceLabel()
appPod.metadata.labels = { appPod.metadata.labels = {
@@ -93,19 +91,36 @@ export async function createPod(
appPod.spec = new k8s.V1PodSpec() appPod.spec = new k8s.V1PodSpec()
appPod.spec.containers = containers appPod.spec.containers = containers
appPod.spec.initContainers = [
{
name: 'fs-init',
image:
process.env.ACTIONS_RUNNER_IMAGE ||
'ghcr.io/actions/actions-runner:latest',
command: ['sh', '-c', 'sudo mv /home/runner/externals/* /mnt/externals'],
securityContext: {
runAsGroup: 1001,
runAsUser: 1001
},
volumeMounts: [
{
name: EXTERNALS_VOLUME_NAME,
mountPath: '/mnt/externals'
}
]
}
]
appPod.spec.restartPolicy = 'Never' appPod.spec.restartPolicy = 'Never'
const nodeName = await getCurrentNodeName()
if (useKubeScheduler()) {
appPod.spec.affinity = await getPodAffinity(nodeName)
} else {
appPod.spec.nodeName = nodeName
}
const claimName = getVolumeClaimName()
appPod.spec.volumes = [ appPod.spec.volumes = [
{ {
name: 'work', name: EXTERNALS_VOLUME_NAME,
persistentVolumeClaim: { claimName } emptyDir: {}
},
{
name: GITHUB_VOLUME_NAME,
emptyDir: {}
} }
] ]
@@ -133,88 +148,82 @@ export async function createPod(
}) })
} }
export async function createJob( export async function createContainerStepPod(
name: string,
container: k8s.V1Container, container: k8s.V1Container,
extension?: k8s.V1PodTemplateSpec extension?: k8s.V1PodTemplateSpec
): Promise<k8s.V1Job> { ): Promise<k8s.V1Pod> {
const runnerInstanceLabel = new RunnerInstanceLabel() const appPod = new k8s.V1Pod()
const job = new k8s.V1Job() appPod.apiVersion = 'v1'
job.apiVersion = 'batch/v1' appPod.kind = 'Pod'
job.kind = 'Job'
job.metadata = new k8s.V1ObjectMeta()
job.metadata.name = getStepPodName()
job.metadata.labels = { [runnerInstanceLabel.key]: runnerInstanceLabel.value }
job.metadata.annotations = {}
job.spec = new k8s.V1JobSpec() appPod.metadata = new k8s.V1ObjectMeta()
job.spec.ttlSecondsAfterFinished = 300 appPod.metadata.name = name
job.spec.backoffLimit = 0
job.spec.template = new k8s.V1PodTemplateSpec()
job.spec.template.spec = new k8s.V1PodSpec() const instanceLabel = new RunnerInstanceLabel()
job.spec.template.metadata = new k8s.V1ObjectMeta() appPod.metadata.labels = {
job.spec.template.metadata.labels = {} [instanceLabel.key]: instanceLabel.value
job.spec.template.metadata.annotations = {}
job.spec.template.spec.containers = [container]
job.spec.template.spec.restartPolicy = 'Never'
const nodeName = await getCurrentNodeName()
if (useKubeScheduler()) {
job.spec.template.spec.affinity = await getPodAffinity(nodeName)
} else {
job.spec.template.spec.nodeName = nodeName
} }
appPod.metadata.annotations = {}
const claimName = getVolumeClaimName() appPod.spec = new k8s.V1PodSpec()
job.spec.template.spec.volumes = [ appPod.spec.containers = [container]
appPod.spec.initContainers = [
{ {
name: 'work', name: 'fs-init',
persistentVolumeClaim: { claimName } image:
process.env.ACTIONS_RUNNER_IMAGE ||
'ghcr.io/actions/actions-runner:latest',
command: [
'bash',
'-c',
`sudo cp $(which sh) /mnt/externals/sh \
&& sudo cp $(which tail) /mnt/externals/tail \
&& sudo cp $(which env) /mnt/externals/env \
&& sudo chmod -R 777 /mnt/externals`
],
securityContext: {
runAsGroup: 1001,
runAsUser: 1001,
privileged: true
},
volumeMounts: [
{
name: EXTERNALS_VOLUME_NAME,
mountPath: '/mnt/externals'
}
]
} }
] ]
if (extension) { appPod.spec.restartPolicy = 'Never'
if (extension.metadata) {
// apply metadata both to the job and the pod created by the job appPod.spec.volumes = [
mergeObjectMeta(job, extension.metadata) {
mergeObjectMeta(job.spec.template, extension.metadata) name: EXTERNALS_VOLUME_NAME,
} emptyDir: {}
if (extension.spec) { },
mergePodSpecWithOptions(job.spec.template.spec, extension.spec) {
name: GITHUB_VOLUME_NAME,
emptyDir: {}
} }
]
if (extension?.metadata) {
mergeObjectMeta(appPod, extension.metadata)
} }
return await k8sBatchV1Api.createNamespacedJob({ if (extension?.spec) {
mergePodSpecWithOptions(appPod.spec, extension.spec)
}
return await k8sApi.createNamespacedPod({
namespace: namespace(), namespace: namespace(),
body: job body: appPod
}) })
} }
export async function getContainerJobPodName(jobName: string): Promise<string> {
const selector = `job-name=${jobName}`
const backOffManager = new BackOffManager(60)
while (true) {
const podList = await k8sApi.listNamespacedPod({
namespace: namespace(),
labelSelector: selector,
limit: 1
})
if (!podList.items?.length) {
await backOffManager.backOff()
continue
}
if (!podList.items[0].metadata?.name) {
throw new Error(
`Failed to determine the name of the pod for job ${jobName}`
)
}
return podList.items[0].metadata.name
}
}
export async function deletePod(name: string): Promise<void> { export async function deletePod(name: string): Promise<void> {
await k8sApi.deleteNamespacedPod({ await k8sApi.deleteNamespacedPod({
name, name,
@@ -228,11 +237,11 @@ export async function execPodStep(
podName: string, podName: string,
containerName: string, containerName: string,
stdin?: stream.Readable stdin?: stream.Readable
): Promise<void> { ): Promise<number> {
const exec = new k8s.Exec(kc) const exec = new k8s.Exec(kc)
command = fixArgs(command) command = fixArgs(command)
// Exec returns a websocket. If websocket fails, we should reject the promise. Otherwise, websocket will call a callback. Since at that point, websocket is not failing, we can safely resolve or reject the promise. return await new Promise(function (resolve, reject) {
await new Promise(function (resolve, reject) {
exec exec
.exec( .exec(
namespace(), namespace(),
@@ -244,9 +253,9 @@ export async function execPodStep(
stdin ?? null, stdin ?? null,
false /* tty */, false /* tty */,
resp => { resp => {
// kube.exec returns an error if exit code is not 0, but we can't actually get the exit code core.debug(`execPodStep response: ${JSON.stringify(resp)}`)
if (resp.status === 'Success') { if (resp.status === 'Success') {
resolve(resp.code) resolve(resp.code || 0)
} else { } else {
core.debug( core.debug(
JSON.stringify({ JSON.stringify({
@@ -254,15 +263,271 @@ export async function execPodStep(
details: resp?.details details: resp?.details
}) })
) )
reject(resp?.message) reject(new Error(resp?.message || 'execPodStep failed'))
} }
} }
) )
// If exec.exec fails, explicitly reject the outer promise
.catch(e => reject(e)) .catch(e => reject(e))
}) })
} }
export async function execCalculateOutputHash(
podName: string,
containerName: string,
command: string[]
): Promise<string> {
const exec = new k8s.Exec(kc)
// Create a writable stream that updates a SHA-256 hash with stdout data
const hash = createHash('sha256')
const hashWriter = new stream.Writable({
write(chunk, _enc, cb) {
try {
hash.update(chunk.toString('utf8') as Buffer)
cb()
} catch (e) {
cb(e as Error)
}
}
})
await new Promise<void>((resolve, reject) => {
exec
.exec(
namespace(),
podName,
containerName,
command,
hashWriter, // capture stdout for hashing
process.stderr,
null,
false /* tty */,
resp => {
core.debug(`internalExecOutput response: ${JSON.stringify(resp)}`)
if (resp.status === 'Success') {
resolve()
} else {
core.debug(
JSON.stringify({
message: resp?.message,
details: resp?.details
})
)
reject(new Error(resp?.message || 'internalExecOutput failed'))
}
}
)
.catch(e => reject(e))
})
// finalize hash and return digest
hashWriter.end()
return hash.digest('hex')
}
export async function localCalculateOutputHash(
commands: string[]
): Promise<string> {
return await new Promise<string>((resolve, reject) => {
const hash = createHash('sha256')
const child = spawn(commands[0], commands.slice(1), {
stdio: ['ignore', 'pipe', 'ignore']
})
child.stdout.on('data', chunk => {
hash.update(chunk)
})
child.on('error', reject)
child.on('close', (code: number) => {
if (code === 0) {
resolve(hash.digest('hex'))
} else {
reject(new Error(`child process exited with code ${code}`))
}
})
})
}
export async function execCpToPod(
podName: string,
runnerPath: string,
containerPath: string
): Promise<void> {
core.debug(`Copying ${runnerPath} to pod ${podName} at ${containerPath}`)
let attempt = 0
while (true) {
try {
const exec = new k8s.Exec(kc)
const command = ['tar', 'xf', '-', '-C', containerPath]
const readStream = tar.pack(runnerPath)
const errStream = new WritableStreamBuffer()
await new Promise((resolve, reject) => {
exec
.exec(
namespace(),
podName,
JOB_CONTAINER_NAME,
command,
null,
errStream,
readStream,
false,
async status => {
if (errStream.size()) {
reject(
new Error(
`Error from cpFromPod - details: \n ${errStream.getContentsAsString()}`
)
)
}
resolve(status)
}
)
.catch(e => reject(e))
})
break
} catch (error) {
core.debug(`cpToPod: Attempt ${attempt + 1} failed: ${error}`)
attempt++
if (attempt >= 30) {
throw new Error(
`cpToPod failed after ${attempt} attempts: ${JSON.stringify(error)}`
)
}
await sleep(1000)
}
}
const want = await localCalculateOutputHash([
'sh',
'-c',
listDirAllCommand(runnerPath)
])
let attempts = 15
const delay = 1000
for (let i = 0; i < attempts; i++) {
try {
const got = await execCalculateOutputHash(podName, JOB_CONTAINER_NAME, [
'sh',
'-c',
listDirAllCommand(containerPath)
])
if (got !== want) {
core.debug(
`The hash of the directory does not match the expected value; want='${want}' got='${got}'`
)
await sleep(delay)
continue
}
break
} catch (error) {
core.debug(`Attempt ${i + 1} failed: ${error}`)
await sleep(delay)
}
}
}
export async function execCpFromPod(
podName: string,
containerPath: string,
parentRunnerPath: string
): Promise<void> {
const targetRunnerPath = `${parentRunnerPath}/${path.basename(containerPath)}`
core.debug(
`Copying from pod ${podName} ${containerPath} to ${targetRunnerPath}`
)
const want = await execCalculateOutputHash(podName, JOB_CONTAINER_NAME, [
'sh',
'-c',
listDirAllCommand(containerPath)
])
let attempt = 0
while (true) {
try {
// make temporary directory
const exec = new k8s.Exec(kc)
const containerPaths = containerPath.split('/')
const dirname = containerPaths.pop() as string
const command = [
'tar',
'cf',
'-',
'-C',
containerPaths.join('/') || '/',
dirname
]
const writerStream = tar.extract(parentRunnerPath)
const errStream = new WritableStreamBuffer()
await new Promise((resolve, reject) => {
exec
.exec(
namespace(),
podName,
JOB_CONTAINER_NAME,
command,
writerStream,
errStream,
null,
false,
async status => {
if (errStream.size()) {
reject(
new Error(
`Error from cpFromPod - details: \n ${errStream.getContentsAsString()}`
)
)
}
resolve(status)
}
)
.catch(e => reject(e))
})
break
} catch (error) {
core.debug(`Attempt ${attempt + 1} failed: ${error}`)
attempt++
if (attempt >= 30) {
throw new Error(
`execCpFromPod failed after ${attempt} attempts: ${JSON.stringify(error)}`
)
}
await sleep(1000)
}
}
let attempts = 15
const delay = 1000
for (let i = 0; i < attempts; i++) {
try {
const got = await localCalculateOutputHash([
'sh',
'-c',
listDirAllCommand(targetRunnerPath)
])
if (got !== want) {
core.debug(
`The hash of the directory does not match the expected value; want='${want}' got='${got}'`
)
await sleep(delay)
continue
}
break
} catch (error) {
core.debug(`Attempt ${i + 1} failed: ${error}`)
await sleep(delay)
}
}
}
export async function waitForJobToComplete(jobName: string): Promise<void> { export async function waitForJobToComplete(jobName: string): Promise<void> {
const backOffManager = new BackOffManager() const backOffManager = new BackOffManager()
while (true) { while (true) {
@@ -540,39 +805,6 @@ export async function isPodContainerAlpine(
return isAlpine return isAlpine
} }
async function getCurrentNodeName(): Promise<string> {
const resp = await k8sApi.readNamespacedPod({
name: getRunnerPodName(),
namespace: namespace()
})
const nodeName = resp.spec?.nodeName
if (!nodeName) {
throw new Error('Failed to determine node name')
}
return nodeName
}
async function getPodAffinity(nodeName: string): Promise<k8s.V1Affinity> {
const affinity = new k8s.V1Affinity()
affinity.nodeAffinity = new k8s.V1NodeAffinity()
affinity.nodeAffinity.requiredDuringSchedulingIgnoredDuringExecution =
new k8s.V1NodeSelector()
affinity.nodeAffinity.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms =
[
{
matchExpressions: [
{
key: 'kubernetes.io/hostname',
operator: 'In',
values: [nodeName]
}
]
}
]
return affinity
}
export function namespace(): string { export function namespace(): string {
if (process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']) { if (process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']) {
return process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE'] return process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE']

View File

@@ -2,12 +2,10 @@ import * as k8s from '@kubernetes/client-node'
import * as fs from 'fs' import * as fs from 'fs'
import * as yaml from 'js-yaml' import * as yaml from 'js-yaml'
import * as core from '@actions/core' import * as core from '@actions/core'
import { Mount } from 'hooklib'
import * as path from 'path'
import { v1 as uuidv4 } from 'uuid' import { v1 as uuidv4 } from 'uuid'
import { POD_VOLUME_NAME } from './index'
import { CONTAINER_EXTENSION_PREFIX } from '../hooks/constants' import { CONTAINER_EXTENSION_PREFIX } from '../hooks/constants'
import * as shlex from 'shlex' import * as shlex from 'shlex'
import { Mount } from 'hooklib'
export const DEFAULT_CONTAINER_ENTRY_POINT_ARGS = [`-f`, `/dev/null`] export const DEFAULT_CONTAINER_ENTRY_POINT_ARGS = [`-f`, `/dev/null`]
export const DEFAULT_CONTAINER_ENTRY_POINT = 'tail' export const DEFAULT_CONTAINER_ENTRY_POINT = 'tail'
@@ -15,101 +13,43 @@ export const DEFAULT_CONTAINER_ENTRY_POINT = 'tail'
export const ENV_HOOK_TEMPLATE_PATH = 'ACTIONS_RUNNER_CONTAINER_HOOK_TEMPLATE' export const ENV_HOOK_TEMPLATE_PATH = 'ACTIONS_RUNNER_CONTAINER_HOOK_TEMPLATE'
export const ENV_USE_KUBE_SCHEDULER = 'ACTIONS_RUNNER_USE_KUBE_SCHEDULER' export const ENV_USE_KUBE_SCHEDULER = 'ACTIONS_RUNNER_USE_KUBE_SCHEDULER'
export function containerVolumes( export const EXTERNALS_VOLUME_NAME = 'externals'
userMountVolumes: Mount[] = [], export const GITHUB_VOLUME_NAME = 'github'
jobContainer = true,
containerAction = false
): k8s.V1VolumeMount[] {
const mounts: k8s.V1VolumeMount[] = [
{
name: POD_VOLUME_NAME,
mountPath: '/__w'
}
]
const workspacePath = process.env.GITHUB_WORKSPACE as string export const CONTAINER_VOLUMES: k8s.V1VolumeMount[] = [
if (containerAction) { {
const i = workspacePath.lastIndexOf('_work/') name: EXTERNALS_VOLUME_NAME,
const workspaceRelativePath = workspacePath.slice(i + '_work/'.length) mountPath: '/__e'
mounts.push( },
{ {
name: POD_VOLUME_NAME, name: GITHUB_VOLUME_NAME,
mountPath: '/github/workspace', mountPath: '/github'
subPath: workspaceRelativePath
},
{
name: POD_VOLUME_NAME,
mountPath: '/github/file_commands',
subPath: '_temp/_runner_file_commands'
},
{
name: POD_VOLUME_NAME,
mountPath: '/github/home',
subPath: '_temp/_github_home'
},
{
name: POD_VOLUME_NAME,
mountPath: '/github/workflow',
subPath: '_temp/_github_workflow'
}
)
return mounts
} }
]
if (!jobContainer) { export function prepareJobScript(userVolumeMounts: Mount[]): {
return mounts containerPath: string
runnerPath: string
} {
let mountDirs = userVolumeMounts.map(m => m.targetVolumePath).join(' ')
const content = `#!/bin/sh -l
set -e
cp -R /__w/_temp/_github_home /github/home
cp -R /__w/_temp/_github_workflow /github/workflow
mkdir -p ${mountDirs}
`
const filename = `${uuidv4()}.sh`
const entryPointPath = `${process.env.RUNNER_TEMP}/${filename}`
fs.writeFileSync(entryPointPath, content)
return {
containerPath: `/__w/_temp/${filename}`,
runnerPath: entryPointPath
} }
mounts.push(
{
name: POD_VOLUME_NAME,
mountPath: '/__e',
subPath: 'externals'
},
{
name: POD_VOLUME_NAME,
mountPath: '/github/home',
subPath: '_temp/_github_home'
},
{
name: POD_VOLUME_NAME,
mountPath: '/github/workflow',
subPath: '_temp/_github_workflow'
}
)
if (!userMountVolumes?.length) {
return mounts
}
for (const userVolume of userMountVolumes) {
let sourceVolumePath = ''
if (path.isAbsolute(userVolume.sourceVolumePath)) {
if (!userVolume.sourceVolumePath.startsWith(workspacePath)) {
throw new Error(
'Volume mounts outside of the work folder are not supported'
)
}
// source volume path should be relative path
sourceVolumePath = userVolume.sourceVolumePath.slice(
workspacePath.length + 1
)
} else {
sourceVolumePath = userVolume.sourceVolumePath
}
mounts.push({
name: POD_VOLUME_NAME,
mountPath: userVolume.targetVolumePath,
subPath: sourceVolumePath,
readOnly: userVolume.readOnly
})
}
return mounts
} }
export function writeEntryPointScript( export function writeRunScript(
workingDirectory: string, workingDirectory: string,
entryPoint: string, entryPoint: string,
entryPointArgs?: string[], entryPointArgs?: string[],
@@ -123,33 +63,12 @@ export function writeEntryPointScript(
typeof prependPath === 'string' ? prependPath : prependPath.join(':') typeof prependPath === 'string' ? prependPath : prependPath.join(':')
exportPath = `export PATH=${prepend}:$PATH` exportPath = `export PATH=${prepend}:$PATH`
} }
let environmentPrefix = ''
if (environmentVariables && Object.entries(environmentVariables).length) { let environmentPrefix = scriptEnv(environmentVariables)
const envBuffer: string[] = []
for (const [key, value] of Object.entries(environmentVariables)) {
if (
key.includes(`=`) ||
key.includes(`'`) ||
key.includes(`"`) ||
key.includes(`$`)
) {
throw new Error(
`environment key ${key} is invalid - the key must not contain =, $, ', or "`
)
}
envBuffer.push(
`"${key}=${value
.replace(/\\/g, '\\\\')
.replace(/"/g, '\\"')
.replace(/\$/g, '\\$')
.replace(/`/g, '\\`')}"`
)
}
environmentPrefix = `env ${envBuffer.join(' ')} `
}
const content = `#!/bin/sh -l const content = `#!/bin/sh -l
set -e
rm "$0" # remove script after running
${exportPath} ${exportPath}
cd ${workingDirectory} && \ cd ${workingDirectory} && \
exec ${environmentPrefix} ${entryPoint} ${ exec ${environmentPrefix} ${entryPoint} ${
@@ -165,6 +84,73 @@ exec ${environmentPrefix} ${entryPoint} ${
} }
} }
export function writeContainerStepScript(
dst: string,
workingDirectory: string,
entryPoint: string,
entryPointArgs?: string[],
environmentVariables?: { [key: string]: string }
): { containerPath: string; runnerPath: string } {
let environmentPrefix = scriptEnv(environmentVariables)
const parts = workingDirectory.split('/').slice(-2)
if (parts.length !== 2) {
throw new Error(`Invalid working directory: ${workingDirectory}`)
}
const content = `#!/bin/sh -l
rm "$0" # remove script after running
mv /__w/_temp/_github_home /github/home && \
mv /__w/_temp/_github_workflow /github/workflow && \
mv /__w/_temp/_runner_file_commands /github/file_commands && \
mv /__w/${parts.join('/')}/ /github/workspace && \
cd /github/workspace && \
exec ${environmentPrefix} ${entryPoint} ${
entryPointArgs?.length ? entryPointArgs.join(' ') : ''
}
`
const filename = `${uuidv4()}.sh`
const entryPointPath = `${dst}/${filename}`
core.debug(`Writing container step script to ${entryPointPath}`)
fs.writeFileSync(entryPointPath, content)
return {
containerPath: `/__w/_temp/${filename}`,
runnerPath: entryPointPath
}
}
function scriptEnv(envs?: { [key: string]: string }): string {
if (!envs || !Object.entries(envs).length) {
return ''
}
const envBuffer: string[] = []
for (const [key, value] of Object.entries(envs)) {
if (
key.includes(`=`) ||
key.includes(`'`) ||
key.includes(`"`) ||
key.includes(`$`)
) {
throw new Error(
`environment key ${key} is invalid - the key must not contain =, $, ', or "`
)
}
envBuffer.push(
`"${key}=${value
.replace(/\\/g, '\\\\')
.replace(/"/g, '\\"')
.replace(/\$/g, '\\$')
.replace(/`/g, '\\`')}"`
)
}
if (!envBuffer?.length) {
return ''
}
return `env ${envBuffer.join(' ')} `
}
export function generateContainerName(image: string): string { export function generateContainerName(image: string): string {
const nameWithTag = image.split('/').pop() const nameWithTag = image.split('/').pop()
const name = nameWithTag?.split(':')[0] const name = nameWithTag?.split(':')[0]
@@ -299,3 +285,11 @@ function mergeLists<T>(base?: T[], from?: T[]): T[] {
export function fixArgs(args: string[]): string[] { export function fixArgs(args: string[]): string[] {
return shlex.split(args.join(' ')) return shlex.split(args.join(' '))
} }
export async function sleep(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms))
}
export function listDirAllCommand(dir: string): string {
return `cd ${shlex.quote(dir)} && find . -not -path '*/_runner_hook_responses*' -exec stat -c '%b %n' {} \\;`
}

View File

@@ -39,8 +39,7 @@ describe('e2e', () => {
await expect( await expect(
runScriptStep( runScriptStep(
scriptStepData.args as RunScriptStepArgs, scriptStepData.args as RunScriptStepArgs,
prepareJobOutputData.state, prepareJobOutputData.state
null
) )
).resolves.not.toThrow() ).resolves.not.toThrow()

View File

@@ -1,9 +1,8 @@
import * as fs from 'fs' import * as fs from 'fs'
import { containerPorts, POD_VOLUME_NAME } from '../src/k8s' import { containerPorts } from '../src/k8s'
import { import {
containerVolumes,
generateContainerName, generateContainerName,
writeEntryPointScript, writeRunScript,
mergePodSpecWithOptions, mergePodSpecWithOptions,
mergeContainerWithOptions, mergeContainerWithOptions,
readExtensionFromFile, readExtensionFromFile,
@@ -27,91 +26,55 @@ describe('k8s utils', () => {
it('should not throw', () => { it('should not throw', () => {
expect(() => expect(() =>
writeEntryPointScript( writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
'/test', SOME_ENV: 'SOME_VALUE'
'sh', })
['-e', 'script.sh'],
['/prepend/path'],
{
SOME_ENV: 'SOME_VALUE'
}
)
).not.toThrow() ).not.toThrow()
}) })
it('should throw if RUNNER_TEMP is not set', () => { it('should throw if RUNNER_TEMP is not set', () => {
delete process.env.RUNNER_TEMP delete process.env.RUNNER_TEMP
expect(() => expect(() =>
writeEntryPointScript( writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
'/test', SOME_ENV: 'SOME_VALUE'
'sh', })
['-e', 'script.sh'],
['/prepend/path'],
{
SOME_ENV: 'SOME_VALUE'
}
)
).toThrow() ).toThrow()
}) })
it('should throw if environment variable name contains double quote', () => { it('should throw if environment variable name contains double quote', () => {
expect(() => expect(() =>
writeEntryPointScript( writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
'/test', 'SOME"_ENV': 'SOME_VALUE'
'sh', })
['-e', 'script.sh'],
['/prepend/path'],
{
'SOME"_ENV': 'SOME_VALUE'
}
)
).toThrow() ).toThrow()
}) })
it('should throw if environment variable name contains =', () => { it('should throw if environment variable name contains =', () => {
expect(() => expect(() =>
writeEntryPointScript( writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
'/test', 'SOME=ENV': 'SOME_VALUE'
'sh', })
['-e', 'script.sh'],
['/prepend/path'],
{
'SOME=ENV': 'SOME_VALUE'
}
)
).toThrow() ).toThrow()
}) })
it('should throw if environment variable name contains single quote', () => { it('should throw if environment variable name contains single quote', () => {
expect(() => expect(() =>
writeEntryPointScript( writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
'/test', "SOME'_ENV": 'SOME_VALUE'
'sh', })
['-e', 'script.sh'],
['/prepend/path'],
{
"SOME'_ENV": 'SOME_VALUE'
}
)
).toThrow() ).toThrow()
}) })
it('should throw if environment variable name contains dollar', () => { it('should throw if environment variable name contains dollar', () => {
expect(() => expect(() =>
writeEntryPointScript( writeRunScript('/test', 'sh', ['-e', 'script.sh'], ['/prepend/path'], {
'/test', SOME_$_ENV: 'SOME_VALUE'
'sh', })
['-e', 'script.sh'],
['/prepend/path'],
{
SOME_$_ENV: 'SOME_VALUE'
}
)
).toThrow() ).toThrow()
}) })
it('should escape double quote, dollar and backslash in environment variable values', () => { it('should escape double quote, dollar and backslash in environment variable values', () => {
const { runnerPath } = writeEntryPointScript( const { runnerPath } = writeRunScript(
'/test', '/test',
'sh', 'sh',
['-e', 'script.sh'], ['-e', 'script.sh'],
@@ -130,7 +93,7 @@ describe('k8s utils', () => {
}) })
it('should return object with containerPath and runnerPath', () => { it('should return object with containerPath and runnerPath', () => {
const { containerPath, runnerPath } = writeEntryPointScript( const { containerPath, runnerPath } = writeRunScript(
'/test', '/test',
'sh', 'sh',
['-e', 'script.sh'], ['-e', 'script.sh'],
@@ -145,7 +108,7 @@ describe('k8s utils', () => {
}) })
it('should write entrypoint path and the file should exist', () => { it('should write entrypoint path and the file should exist', () => {
const { runnerPath } = writeEntryPointScript( const { runnerPath } = writeRunScript(
'/test', '/test',
'sh', 'sh',
['-e', 'script.sh'], ['-e', 'script.sh'],
@@ -168,90 +131,6 @@ describe('k8s utils', () => {
await testHelper.cleanup() await testHelper.cleanup()
}) })
it('should throw if container action and GITHUB_WORKSPACE env is not set', () => {
delete process.env.GITHUB_WORKSPACE
expect(() => containerVolumes([], true, true)).toThrow()
expect(() => containerVolumes([], false, true)).toThrow()
})
it('should always have work mount', () => {
let volumes = containerVolumes([], true, true)
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
volumes = containerVolumes([], true, false)
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
volumes = containerVolumes([], false, true)
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
volumes = containerVolumes([], false, false)
expect(volumes.find(e => e.mountPath === '/__w')).toBeTruthy()
})
it('should always have /github/workflow mount if working on container job or container action', () => {
let volumes = containerVolumes([], true, true)
expect(volumes.find(e => e.mountPath === '/github/workflow')).toBeTruthy()
volumes = containerVolumes([], true, false)
expect(volumes.find(e => e.mountPath === '/github/workflow')).toBeTruthy()
volumes = containerVolumes([], false, true)
expect(volumes.find(e => e.mountPath === '/github/workflow')).toBeTruthy()
volumes = containerVolumes([], false, false)
expect(
volumes.find(e => e.mountPath === '/github/workflow')
).toBeUndefined()
})
it('should have container action volumes', () => {
let volumes = containerVolumes([], true, true)
let workspace = volumes.find(e => e.mountPath === '/github/workspace')
let fileCommands = volumes.find(
e => e.mountPath === '/github/file_commands'
)
expect(workspace).toBeTruthy()
expect(workspace?.subPath).toBe('repo/repo')
expect(fileCommands).toBeTruthy()
expect(fileCommands?.subPath).toBe('_temp/_runner_file_commands')
volumes = containerVolumes([], false, true)
workspace = volumes.find(e => e.mountPath === '/github/workspace')
fileCommands = volumes.find(e => e.mountPath === '/github/file_commands')
expect(workspace).toBeTruthy()
expect(workspace?.subPath).toBe('repo/repo')
expect(fileCommands).toBeTruthy()
expect(fileCommands?.subPath).toBe('_temp/_runner_file_commands')
})
it('should have externals, github home mounts if job container', () => {
const volumes = containerVolumes()
expect(volumes.find(e => e.mountPath === '/__e')).toBeTruthy()
expect(volumes.find(e => e.mountPath === '/github/home')).toBeTruthy()
})
it('should throw if user volume source volume path is not in workspace', () => {
expect(() =>
containerVolumes(
[
{
sourceVolumePath: '/outside/of/workdir',
targetVolumePath: '/some/target/path',
readOnly: false
}
],
true,
false
)
).toThrow()
})
it(`all volumes should have name ${POD_VOLUME_NAME}`, () => {
let volumes = containerVolumes([], true, true)
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
volumes = containerVolumes([], true, false)
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
volumes = containerVolumes([], false, true)
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
volumes = containerVolumes([], false, false)
expect(volumes.every(e => e.name === POD_VOLUME_NAME)).toBeTruthy()
})
it('should parse container ports', () => { it('should parse container ports', () => {
const tt = [ const tt = [
{ {

View File

@@ -3,12 +3,8 @@ import * as path from 'path'
import { cleanupJob } from '../src/hooks' import { cleanupJob } from '../src/hooks'
import { createContainerSpec, prepareJob } from '../src/hooks/prepare-job' import { createContainerSpec, prepareJob } from '../src/hooks/prepare-job'
import { TestHelper } from './test-setup' import { TestHelper } from './test-setup'
import { import { ENV_HOOK_TEMPLATE_PATH, generateContainerName } from '../src/k8s/utils'
ENV_HOOK_TEMPLATE_PATH, import { execPodStep, getPodByName } from '../src/k8s'
ENV_USE_KUBE_SCHEDULER,
generateContainerName
} from '../src/k8s/utils'
import { getPodByName } from '../src/k8s'
import { V1Container } from '@kubernetes/client-node' import { V1Container } from '@kubernetes/client-node'
import { JOB_CONTAINER_NAME } from '../src/hooks/constants' import { JOB_CONTAINER_NAME } from '../src/hooks/constants'
@@ -45,19 +41,38 @@ describe('Prepare job', () => {
}) })
it('should prepare job with absolute path for userVolumeMount', async () => { it('should prepare job with absolute path for userVolumeMount', async () => {
const userVolumeMount = path.join(
process.env.GITHUB_WORKSPACE as string,
'myvolume'
)
fs.mkdirSync(userVolumeMount)
fs.writeFileSync(path.join(userVolumeMount, 'file.txt'), 'hello')
prepareJobData.args.container.userMountVolumes = [ prepareJobData.args.container.userMountVolumes = [
{ {
sourceVolumePath: path.join( sourceVolumePath: userVolumeMount,
process.env.GITHUB_WORKSPACE as string, targetVolumePath: '/__w/myvolume',
'/myvolume'
),
targetVolumePath: '/volume_mount',
readOnly: false readOnly: false
} }
] ]
await expect( await expect(
prepareJob(prepareJobData.args, prepareJobOutputFilePath) prepareJob(prepareJobData.args, prepareJobOutputFilePath)
).resolves.not.toThrow() ).resolves.not.toThrow()
const content = JSON.parse(
fs.readFileSync(prepareJobOutputFilePath).toString()
)
await execPodStep(
[
'sh',
'-c',
'\'[ "$(cat /__w/myvolume/file.txt)" = "hello" ] || exit 5\''
],
content!.state!.jobPod,
JOB_CONTAINER_NAME
).then(output => {
expect(output).toBe(0)
})
}) })
it('should prepare job with envs CI and GITHUB_ACTIONS', async () => { it('should prepare job with envs CI and GITHUB_ACTIONS', async () => {
@@ -108,19 +123,6 @@ describe('Prepare job', () => {
) )
}) })
it('should throw an exception if the user volume mount is absolute path outside of GITHUB_WORKSPACE', async () => {
prepareJobData.args.container.userMountVolumes = [
{
sourceVolumePath: '/somewhere/not/in/gh-workspace',
targetVolumePath: '/containermount',
readOnly: false
}
]
await expect(
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
).rejects.toThrow()
})
it('should not run prepare job without the job container', async () => { it('should not run prepare job without the job container', async () => {
prepareJobData.args.container = undefined prepareJobData.args.container = undefined
await expect( await expect(
@@ -166,8 +168,7 @@ describe('Prepare job', () => {
expect(got.metadata?.annotations?.['annotated-by']).toBe('extension') expect(got.metadata?.annotations?.['annotated-by']).toBe('extension')
expect(got.metadata?.labels?.['labeled-by']).toBe('extension') expect(got.metadata?.labels?.['labeled-by']).toBe('extension')
expect(got.spec?.securityContext?.runAsUser).toBe(1000) expect(got.spec?.restartPolicy).toBe('Never')
expect(got.spec?.securityContext?.runAsGroup).toBe(3000)
// job container // job container
expect(got.spec?.containers[0].name).toBe(JOB_CONTAINER_NAME) expect(got.spec?.containers[0].name).toBe(JOB_CONTAINER_NAME)
@@ -217,17 +218,6 @@ describe('Prepare job', () => {
expect(content.context.services.length).toBe(1) expect(content.context.services.length).toBe(1)
}) })
it('should not throw exception using kube scheduler', async () => {
// only for ReadWriteMany volumes or single node cluster
process.env[ENV_USE_KUBE_SCHEDULER] = 'true'
await expect(
prepareJob(prepareJobData.args, prepareJobOutputFilePath)
).resolves.not.toThrow()
delete process.env[ENV_USE_KUBE_SCHEDULER]
})
test.each([undefined, null, []])( test.each([undefined, null, []])(
'should not throw exception when portMapping=%p', 'should not throw exception when portMapping=%p',
async pm => { async pm => {

View File

@@ -1,4 +1,4 @@
import { runContainerStep } from '../src/hooks' import { prepareJob, runContainerStep } from '../src/hooks'
import { TestHelper } from './test-setup' import { TestHelper } from './test-setup'
import { ENV_HOOK_TEMPLATE_PATH } from '../src/k8s/utils' import { ENV_HOOK_TEMPLATE_PATH } from '../src/k8s/utils'
import * as fs from 'fs' import * as fs from 'fs'
@@ -10,11 +10,16 @@ jest.useRealTimers()
let testHelper: TestHelper let testHelper: TestHelper
let runContainerStepData: any let runContainerStepData: any
let prepareJobData: any
let prepareJobOutputFilePath: string
describe('Run container step', () => { describe('Run container step', () => {
beforeEach(async () => { beforeEach(async () => {
testHelper = new TestHelper() testHelper = new TestHelper()
await testHelper.initialize() await testHelper.initialize()
prepareJobData = testHelper.getPrepareJobDefinition()
prepareJobOutputFilePath = testHelper.createFile('prepare-job-output.json')
await prepareJob(prepareJobData.args, prepareJobOutputFilePath)
runContainerStepData = testHelper.getRunContainerStepDefinition() runContainerStepData = testHelper.getRunContainerStepDefinition()
}) })
@@ -22,11 +27,6 @@ describe('Run container step', () => {
await testHelper.cleanup() await testHelper.cleanup()
}) })
it('should not throw', async () => {
const exitCode = await runContainerStep(runContainerStepData.args)
expect(exitCode).toBe(0)
})
it('should run pod with extensions applied', async () => { it('should run pod with extensions applied', async () => {
const extension = { const extension = {
metadata: { metadata: {
@@ -42,7 +42,7 @@ describe('Run container step', () => {
{ {
name: JOB_CONTAINER_EXTENSION_NAME, name: JOB_CONTAINER_EXTENSION_NAME,
command: ['sh'], command: ['sh'],
args: ['-c', 'echo test'] args: ['-c', 'sleep 10000']
}, },
{ {
name: 'side-container', name: 'side-container',
@@ -51,11 +51,7 @@ describe('Run container step', () => {
args: ['-c', 'echo test'] args: ['-c', 'echo test']
} }
], ],
restartPolicy: 'Never', restartPolicy: 'Never'
securityContext: {
runAsUser: 1000,
runAsGroup: 3000
}
} }
} }

View File

@@ -1,7 +1,7 @@
import * as fs from 'fs' import * as fs from 'fs'
import { cleanupJob, prepareJob, runScriptStep } from '../src/hooks' import { cleanupJob, prepareJob, runScriptStep } from '../src/hooks'
import { TestHelper } from './test-setup' import { TestHelper } from './test-setup'
import { PrepareJobArgs } from 'hooklib' import { PrepareJobArgs, RunScriptStepArgs } from 'hooklib'
jest.useRealTimers() jest.useRealTimers()
@@ -9,7 +9,9 @@ let testHelper: TestHelper
let prepareJobOutputData: any let prepareJobOutputData: any
let runScriptStepDefinition let runScriptStepDefinition: {
args: RunScriptStepArgs
}
describe('Run script step', () => { describe('Run script step', () => {
beforeEach(async () => { beforeEach(async () => {
@@ -20,7 +22,9 @@ describe('Run script step', () => {
) )
const prepareJobData = testHelper.getPrepareJobDefinition() const prepareJobData = testHelper.getPrepareJobDefinition()
runScriptStepDefinition = testHelper.getRunScriptStepDefinition() runScriptStepDefinition = testHelper.getRunScriptStepDefinition() as {
args: RunScriptStepArgs
}
await prepareJob( await prepareJob(
prepareJobData.args as PrepareJobArgs, prepareJobData.args as PrepareJobArgs,
@@ -41,22 +45,14 @@ describe('Run script step', () => {
it('should not throw an exception', async () => { it('should not throw an exception', async () => {
await expect( await expect(
runScriptStep( runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
runScriptStepDefinition.args,
prepareJobOutputData.state,
null
)
).resolves.not.toThrow() ).resolves.not.toThrow()
}) })
it('should fail if the working directory does not exist', async () => { it('should fail if the working directory does not exist', async () => {
runScriptStepDefinition.args.workingDirectory = '/foo/bar' runScriptStepDefinition.args.workingDirectory = '/foo/bar'
await expect( await expect(
runScriptStep( runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
runScriptStepDefinition.args,
prepareJobOutputData.state,
null
)
).rejects.toThrow() ).rejects.toThrow()
}) })
@@ -68,16 +64,12 @@ describe('Run script step', () => {
"'if [[ -z $NODE_ENV ]]; then exit 1; fi'" "'if [[ -z $NODE_ENV ]]; then exit 1; fi'"
] ]
await expect( await expect(
runScriptStep( runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
runScriptStepDefinition.args,
prepareJobOutputData.state,
null
)
).resolves.not.toThrow() ).resolves.not.toThrow()
}) })
it('Should have path variable changed in container with prepend path string', async () => { it('Should have path variable changed in container with prepend path string', async () => {
runScriptStepDefinition.args.prependPath = '/some/path' runScriptStepDefinition.args.prependPath = ['/some/path']
runScriptStepDefinition.args.entryPoint = '/bin/bash' runScriptStepDefinition.args.entryPoint = '/bin/bash'
runScriptStepDefinition.args.entryPointArgs = [ runScriptStepDefinition.args.entryPointArgs = [
'-c', '-c',
@@ -85,11 +77,7 @@ describe('Run script step', () => {
] ]
await expect( await expect(
runScriptStep( runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
runScriptStepDefinition.args,
prepareJobOutputData.state,
null
)
).resolves.not.toThrow() ).resolves.not.toThrow()
}) })
@@ -107,11 +95,7 @@ describe('Run script step', () => {
] ]
await expect( await expect(
runScriptStep( runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
runScriptStepDefinition.args,
prepareJobOutputData.state,
null
)
).resolves.not.toThrow() ).resolves.not.toThrow()
}) })
@@ -126,11 +110,7 @@ describe('Run script step', () => {
] ]
await expect( await expect(
runScriptStep( runScriptStep(runScriptStepDefinition.args, prepareJobOutputData.state)
runScriptStepDefinition.args,
prepareJobOutputData.state,
null
)
).resolves.not.toThrow() ).resolves.not.toThrow()
}) })
}) })

View File

@@ -9,35 +9,43 @@ const kc = new k8s.KubeConfig()
kc.loadFromDefault() kc.loadFromDefault()
const k8sApi = kc.makeApiClient(k8s.CoreV1Api) const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
const k8sStorageApi = kc.makeApiClient(k8s.StorageV1Api)
export class TestHelper { export class TestHelper {
private tempDirPath: string private tempDirPath: string
private podName: string private podName: string
private runnerWorkdir: string
private runnerTemp: string
constructor() { constructor() {
this.tempDirPath = `${__dirname}/_temp/runner` this.tempDirPath = `${__dirname}/_temp/runner`
this.runnerWorkdir = `${this.tempDirPath}/_work`
this.runnerTemp = `${this.tempDirPath}/_work/_temp`
this.podName = uuidv4().replace(/-/g, '') this.podName = uuidv4().replace(/-/g, '')
} }
async initialize(): Promise<void> { async initialize(): Promise<void> {
process.env['ACTIONS_RUNNER_POD_NAME'] = `${this.podName}` process.env['ACTIONS_RUNNER_POD_NAME'] = `${this.podName}`
process.env['RUNNER_WORKSPACE'] = `${this.tempDirPath}/_work/repo` process.env['RUNNER_WORKSPACE'] = `${this.runnerWorkdir}/repo`
process.env['RUNNER_TEMP'] = `${this.tempDirPath}/_work/_temp` process.env['RUNNER_TEMP'] = `${this.runnerTemp}`
process.env['GITHUB_WORKSPACE'] = `${this.tempDirPath}/_work/repo/repo` process.env['GITHUB_WORKSPACE'] = `${this.runnerWorkdir}/repo/repo`
process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE'] = 'default' process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE'] = 'default'
fs.mkdirSync(`${this.tempDirPath}/_work/repo/repo`, { recursive: true }) fs.mkdirSync(`${this.runnerWorkdir}/repo/repo`, { recursive: true })
fs.mkdirSync(`${this.tempDirPath}/externals`, { recursive: true }) fs.mkdirSync(`${this.tempDirPath}/externals`, { recursive: true })
fs.mkdirSync(process.env.RUNNER_TEMP, { recursive: true }) fs.mkdirSync(this.runnerTemp, { recursive: true })
fs.mkdirSync(`${this.runnerTemp}/_github_workflow`, { recursive: true })
fs.mkdirSync(`${this.runnerTemp}/_github_home`, { recursive: true })
fs.mkdirSync(`${this.runnerTemp}/_runner_file_commands`, {
recursive: true
})
fs.copyFileSync( fs.copyFileSync(
path.resolve(`${__dirname}/../../../examples/example-script.sh`), path.resolve(`${__dirname}/../../../examples/example-script.sh`),
`${process.env.RUNNER_TEMP}/example-script.sh` `${this.runnerTemp}/example-script.sh`
) )
await this.cleanupK8sResources() await this.cleanupK8sResources()
try { try {
await this.createTestVolume()
await this.createTestJobPod() await this.createTestJobPod()
} catch (e) { } catch (e) {
console.log(e) console.log(e)
@@ -54,33 +62,16 @@ export class TestHelper {
} }
async cleanupK8sResources(): Promise<void> { async cleanupK8sResources(): Promise<void> {
await k8sApi
.deleteNamespacedPersistentVolumeClaim({
name: `${this.podName}-work`,
namespace: 'default',
gracePeriodSeconds: 0
})
.catch(e => {
console.error(e)
})
await k8sApi
.deletePersistentVolume({ name: `${this.podName}-pv` })
.catch(e => {
console.error(e)
})
await k8sStorageApi
.deleteStorageClass({ name: 'local-storage' })
.catch(e => {
console.error(e)
})
await k8sApi await k8sApi
.deleteNamespacedPod({ .deleteNamespacedPod({
name: this.podName, name: this.podName,
namespace: 'default', namespace: 'default',
gracePeriodSeconds: 0 gracePeriodSeconds: 0
}) })
.catch(e => { .catch((e: k8s.ApiException<any>) => {
console.error(e) if (e.code !== 404) {
console.error(JSON.stringify(e))
}
}) })
await k8sApi await k8sApi
.deleteNamespacedPod({ .deleteNamespacedPod({
@@ -88,8 +79,10 @@ export class TestHelper {
namespace: 'default', namespace: 'default',
gracePeriodSeconds: 0 gracePeriodSeconds: 0
}) })
.catch(e => { .catch((e: k8s.ApiException<any>) => {
console.error(e) if (e.code !== 404) {
console.error(JSON.stringify(e))
}
}) })
} }
createFile(fileName?: string): string { createFile(fileName?: string): string {
@@ -105,8 +98,8 @@ export class TestHelper {
async createTestJobPod(): Promise<void> { async createTestJobPod(): Promise<void> {
const container = { const container = {
name: 'nginx', name: 'runner',
image: 'nginx:latest', image: 'ghcr.io/actions/actions-runner:latest',
imagePullPolicy: 'IfNotPresent' imagePullPolicy: 'IfNotPresent'
} as k8s.V1Container } as k8s.V1Container
@@ -116,61 +109,17 @@ export class TestHelper {
}, },
spec: { spec: {
restartPolicy: 'Never', restartPolicy: 'Never',
containers: [container] containers: [container],
securityContext: {
runAsUser: 1001,
runAsGroup: 1001,
fsGroup: 1001
}
} }
} as k8s.V1Pod } as k8s.V1Pod
await k8sApi.createNamespacedPod({ namespace: 'default', body: pod }) await k8sApi.createNamespacedPod({ namespace: 'default', body: pod })
} }
async createTestVolume(): Promise<void> {
var sc: k8s.V1StorageClass = {
metadata: {
name: 'local-storage'
},
provisioner: 'kubernetes.io/no-provisioner',
volumeBindingMode: 'Immediate'
}
await k8sStorageApi.createStorageClass({ body: sc })
var volume: k8s.V1PersistentVolume = {
metadata: {
name: `${this.podName}-pv`
},
spec: {
storageClassName: 'local-storage',
capacity: {
storage: '2Gi'
},
volumeMode: 'Filesystem',
accessModes: ['ReadWriteOnce'],
hostPath: {
path: `${this.tempDirPath}/_work`
}
}
}
await k8sApi.createPersistentVolume({ body: volume })
var volumeClaim: k8s.V1PersistentVolumeClaim = {
metadata: {
name: `${this.podName}-work`
},
spec: {
accessModes: ['ReadWriteOnce'],
volumeMode: 'Filesystem',
storageClassName: 'local-storage',
volumeName: `${this.podName}-pv`,
resources: {
requests: {
storage: '1Gi'
}
}
}
}
await k8sApi.createNamespacedPersistentVolumeClaim({
namespace: 'default',
body: volumeClaim
})
}
getPrepareJobDefinition(): HookData { getPrepareJobDefinition(): HookData {
const prepareJob = JSON.parse( const prepareJob = JSON.parse(
fs.readFileSync( fs.readFileSync(