Remove dependency on the runner's volume (#244)

* bump actions

* experiment using init container to prepare working environment

* rm script before continuing

* fix

* Update packages/k8s/src/hooks/run-script-step.ts

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* leverage exec stat instead of printf

* npm update

* document the new constraint

---------

Co-authored-by: DenisPalnitsky <DenisPalnitsky@users.noreply.github.com>
This commit is contained in:
Nikola Jokic
2025-10-02 16:23:07 +02:00
committed by GitHub
parent c67938c536
commit 96c35e7cc6
17 changed files with 1831 additions and 9036 deletions

View File

@@ -9,35 +9,43 @@ const kc = new k8s.KubeConfig()
kc.loadFromDefault()
const k8sApi = kc.makeApiClient(k8s.CoreV1Api)
const k8sStorageApi = kc.makeApiClient(k8s.StorageV1Api)
export class TestHelper {
private tempDirPath: string
private podName: string
private runnerWorkdir: string
private runnerTemp: string
constructor() {
this.tempDirPath = `${__dirname}/_temp/runner`
this.runnerWorkdir = `${this.tempDirPath}/_work`
this.runnerTemp = `${this.tempDirPath}/_work/_temp`
this.podName = uuidv4().replace(/-/g, '')
}
async initialize(): Promise<void> {
process.env['ACTIONS_RUNNER_POD_NAME'] = `${this.podName}`
process.env['RUNNER_WORKSPACE'] = `${this.tempDirPath}/_work/repo`
process.env['RUNNER_TEMP'] = `${this.tempDirPath}/_work/_temp`
process.env['GITHUB_WORKSPACE'] = `${this.tempDirPath}/_work/repo/repo`
process.env['RUNNER_WORKSPACE'] = `${this.runnerWorkdir}/repo`
process.env['RUNNER_TEMP'] = `${this.runnerTemp}`
process.env['GITHUB_WORKSPACE'] = `${this.runnerWorkdir}/repo/repo`
process.env['ACTIONS_RUNNER_KUBERNETES_NAMESPACE'] = 'default'
fs.mkdirSync(`${this.tempDirPath}/_work/repo/repo`, { recursive: true })
fs.mkdirSync(`${this.runnerWorkdir}/repo/repo`, { recursive: true })
fs.mkdirSync(`${this.tempDirPath}/externals`, { recursive: true })
fs.mkdirSync(process.env.RUNNER_TEMP, { recursive: true })
fs.mkdirSync(this.runnerTemp, { recursive: true })
fs.mkdirSync(`${this.runnerTemp}/_github_workflow`, { recursive: true })
fs.mkdirSync(`${this.runnerTemp}/_github_home`, { recursive: true })
fs.mkdirSync(`${this.runnerTemp}/_runner_file_commands`, {
recursive: true
})
fs.copyFileSync(
path.resolve(`${__dirname}/../../../examples/example-script.sh`),
`${process.env.RUNNER_TEMP}/example-script.sh`
`${this.runnerTemp}/example-script.sh`
)
await this.cleanupK8sResources()
try {
await this.createTestVolume()
await this.createTestJobPod()
} catch (e) {
console.log(e)
@@ -54,33 +62,16 @@ export class TestHelper {
}
async cleanupK8sResources(): Promise<void> {
await k8sApi
.deleteNamespacedPersistentVolumeClaim({
name: `${this.podName}-work`,
namespace: 'default',
gracePeriodSeconds: 0
})
.catch(e => {
console.error(e)
})
await k8sApi
.deletePersistentVolume({ name: `${this.podName}-pv` })
.catch(e => {
console.error(e)
})
await k8sStorageApi
.deleteStorageClass({ name: 'local-storage' })
.catch(e => {
console.error(e)
})
await k8sApi
.deleteNamespacedPod({
name: this.podName,
namespace: 'default',
gracePeriodSeconds: 0
})
.catch(e => {
console.error(e)
.catch((e: k8s.ApiException<any>) => {
if (e.code !== 404) {
console.error(JSON.stringify(e))
}
})
await k8sApi
.deleteNamespacedPod({
@@ -88,8 +79,10 @@ export class TestHelper {
namespace: 'default',
gracePeriodSeconds: 0
})
.catch(e => {
console.error(e)
.catch((e: k8s.ApiException<any>) => {
if (e.code !== 404) {
console.error(JSON.stringify(e))
}
})
}
createFile(fileName?: string): string {
@@ -105,8 +98,8 @@ export class TestHelper {
async createTestJobPod(): Promise<void> {
const container = {
name: 'nginx',
image: 'nginx:latest',
name: 'runner',
image: 'ghcr.io/actions/actions-runner:latest',
imagePullPolicy: 'IfNotPresent'
} as k8s.V1Container
@@ -116,61 +109,17 @@ export class TestHelper {
},
spec: {
restartPolicy: 'Never',
containers: [container]
containers: [container],
securityContext: {
runAsUser: 1001,
runAsGroup: 1001,
fsGroup: 1001
}
}
} as k8s.V1Pod
await k8sApi.createNamespacedPod({ namespace: 'default', body: pod })
}
async createTestVolume(): Promise<void> {
var sc: k8s.V1StorageClass = {
metadata: {
name: 'local-storage'
},
provisioner: 'kubernetes.io/no-provisioner',
volumeBindingMode: 'Immediate'
}
await k8sStorageApi.createStorageClass({ body: sc })
var volume: k8s.V1PersistentVolume = {
metadata: {
name: `${this.podName}-pv`
},
spec: {
storageClassName: 'local-storage',
capacity: {
storage: '2Gi'
},
volumeMode: 'Filesystem',
accessModes: ['ReadWriteOnce'],
hostPath: {
path: `${this.tempDirPath}/_work`
}
}
}
await k8sApi.createPersistentVolume({ body: volume })
var volumeClaim: k8s.V1PersistentVolumeClaim = {
metadata: {
name: `${this.podName}-work`
},
spec: {
accessModes: ['ReadWriteOnce'],
volumeMode: 'Filesystem',
storageClassName: 'local-storage',
volumeName: `${this.podName}-pv`,
resources: {
requests: {
storage: '1Gi'
}
}
}
}
await k8sApi.createNamespacedPersistentVolumeClaim({
namespace: 'default',
body: volumeClaim
})
}
getPrepareJobDefinition(): HookData {
const prepareJob = JSON.parse(
fs.readFileSync(