Merge pull request #10 from actions/nikola-jokic/docker-test-refactor

Added prepend path and refactored tests, adding isAlpine test and basic run-script-step test
This commit is contained in:
Thomas Boop
2022-06-08 09:40:49 -04:00
committed by GitHub
11 changed files with 203 additions and 165 deletions

View File

@@ -5,7 +5,7 @@
"args": { "args": {
"container": { "container": {
"image": "node:14.16", "image": "node:14.16",
"workingDirectory": "/__w/thboop-test2/thboop-test2", "workingDirectory": "/__w/repo/repo",
"createOptions": "--cpus 1", "createOptions": "--cpus 1",
"environmentVariables": { "environmentVariables": {
"NODE_ENV": "development" "NODE_ENV": "development"

View File

@@ -16,7 +16,7 @@
"echo \"hello world2\"" "echo \"hello world2\""
], ],
"entryPoint": "bash", "entryPoint": "bash",
"workingDirectory": "/__w/thboop-test2/thboop-test2", "workingDirectory": "/__w/repo/repo",
"createOptions": "--cpus 1", "createOptions": "--cpus 1",
"environmentVariables": { "environmentVariables": {
"NODE_ENV": "development" "NODE_ENV": "development"

View File

@@ -21,6 +21,6 @@
"/foo/bar", "/foo/bar",
"bar/foo" "bar/foo"
], ],
"workingDirectory": "/__w/thboop-test2/thboop-test2" "workingDirectory": "/__w/repo/repo"
} }
} }

View File

@@ -2,10 +2,8 @@ import * as core from '@actions/core'
import * as fs from 'fs' import * as fs from 'fs'
import { import {
ContainerInfo, ContainerInfo,
JobContainerInfo,
RunContainerStepArgs, RunContainerStepArgs,
ServiceContainerInfo, ServiceContainerInfo
StepContainerInfo
} from 'hooklib/lib' } from 'hooklib/lib'
import path from 'path' import path from 'path'
import { env } from 'process' import { env } from 'process'
@@ -55,7 +53,7 @@ export async function createContainer(
const mountVolumes = [ const mountVolumes = [
...(args.userMountVolumes || []), ...(args.userMountVolumes || []),
...((args as JobContainerInfo | StepContainerInfo).systemMountVolumes || []) ...(args.systemMountVolumes || [])
] ]
for (const mountVolume of mountVolumes) { for (const mountVolume of mountVolumes) {
dockerArgs.push( dockerArgs.push(
@@ -328,8 +326,9 @@ export async function containerExecStep(
} }
} }
// Todo figure out prepend path and update it here if (args.prependPath?.length) {
// (we need to pass path in as -e Path={fullpath}) where {fullpath is the prepend path added to the current containers path} dockerArgs.push('-e', `"PATH=${args.prependPath.join(':')}:$PATH"`)
}
dockerArgs.push(containerId) dockerArgs.push(containerId)
dockerArgs.push(args.entryPoint) dockerArgs.push(args.entryPoint)

View File

@@ -1,51 +1,30 @@
import * as fs from 'fs' import * as fs from 'fs'
import * as path from 'path'
import { v4 as uuidv4 } from 'uuid'
import { cleanupJob, prepareJob } from '../src/hooks' import { cleanupJob, prepareJob } from '../src/hooks'
import TestSetup from './test-setup' import TestSetup from './test-setup'
const prepareJobInputPath = path.resolve(
`${__dirname}/../../../examples/prepare-job.json`
)
const tmpOutputDir = `${__dirname}/${uuidv4()}`
let prepareJobOutputPath: string
let prepareJobData: any
let testSetup: TestSetup let testSetup: TestSetup
jest.useRealTimers() jest.useRealTimers()
describe('cleanup job', () => { describe('cleanup job', () => {
beforeAll(() => {
fs.mkdirSync(tmpOutputDir, { recursive: true })
})
afterAll(() => {
fs.rmSync(tmpOutputDir, { recursive: true })
})
beforeEach(async () => { beforeEach(async () => {
const prepareJobRawData = fs.readFileSync(prepareJobInputPath, 'utf8')
prepareJobData = JSON.parse(prepareJobRawData.toString())
prepareJobOutputPath = `${tmpOutputDir}/prepare-job-output-${uuidv4()}.json`
fs.writeFileSync(prepareJobOutputPath, '')
testSetup = new TestSetup() testSetup = new TestSetup()
testSetup.initialize() testSetup.initialize()
prepareJobData.args.container.userMountVolumes = testSetup.userMountVolumes const prepareJobDefinition = JSON.parse(
prepareJobData.args.container.systemMountVolumes = fs.readFileSync(
testSetup.systemMountVolumes `${__dirname}/../../../examples/prepare-job.json`,
prepareJobData.args.container.workingDirectory = testSetup.workingDirectory 'utf-8'
)
)
await prepareJob(prepareJobData.args, prepareJobOutputPath) const prepareJobOutput = testSetup.createOutputFile(
'prepare-job-output.json'
)
await prepareJob(prepareJobDefinition.args, prepareJobOutput)
}) })
afterEach(() => { afterEach(() => {
fs.rmSync(prepareJobOutputPath, { force: true })
testSetup.teardown() testSetup.teardown()
}) })

View File

@@ -1,6 +1,5 @@
import * as fs from 'fs' import * as fs from 'fs'
import * as path from 'path' import * as path from 'path'
import { v4 as uuidv4 } from 'uuid'
import { import {
cleanupJob, cleanupJob,
prepareJob, prepareJob,
@@ -9,94 +8,83 @@ import {
} from '../src/hooks' } from '../src/hooks'
import TestSetup from './test-setup' import TestSetup from './test-setup'
const prepareJobJson = fs.readFileSync( const definitions = {
prepareJob: JSON.parse(
fs.readFileSync(
path.resolve(__dirname + '/../../../examples/prepare-job.json'), path.resolve(__dirname + '/../../../examples/prepare-job.json'),
'utf8' 'utf8'
) )
),
const containerStepJson = fs.readFileSync( runContainerStep: JSON.parse(
fs.readFileSync(
path.resolve(__dirname + '/../../../examples/run-container-step.json'), path.resolve(__dirname + '/../../../examples/run-container-step.json'),
'utf8' 'utf8'
) )
),
const tmpOutputDir = `${__dirname}/_temp/${uuidv4()}` runScriptStep: JSON.parse(
fs.readFileSync(
let prepareJobData: any path.resolve(__dirname + '/../../../examples/run-script-step.json'),
let scriptStepJson: any 'utf-8'
let scriptStepData: any )
let containerStepData: any )
}
let prepareJobOutputFilePath: string
let testSetup: TestSetup let testSetup: TestSetup
describe('e2e', () => { describe('e2e', () => {
beforeAll(() => {
fs.mkdirSync(tmpOutputDir, { recursive: true })
})
afterAll(() => {
fs.rmSync(tmpOutputDir, { recursive: true })
})
beforeEach(() => { beforeEach(() => {
// init dirs
testSetup = new TestSetup() testSetup = new TestSetup()
testSetup.initialize() testSetup.initialize()
definitions.prepareJob.args.container.systemMountVolumes =
prepareJobData = JSON.parse(prepareJobJson)
prepareJobData.args.container.userMountVolumes = testSetup.userMountVolumes
prepareJobData.args.container.systemMountVolumes =
testSetup.systemMountVolumes testSetup.systemMountVolumes
prepareJobData.args.container.workingDirectory = testSetup.workingDirectory
scriptStepJson = fs.readFileSync(
path.resolve(__dirname + '/../../../examples/run-script-step.json'),
'utf8'
)
scriptStepData = JSON.parse(scriptStepJson)
scriptStepData.args.workingDirectory = testSetup.workingDirectory
containerStepData = JSON.parse(containerStepJson)
containerStepData.args.workingDirectory = testSetup.workingDirectory
containerStepData.args.userMountVolumes = testSetup.userMountVolumes
containerStepData.args.systemMountVolumes = testSetup.systemMountVolumes
prepareJobOutputFilePath = `${tmpOutputDir}/prepare-job-output-${uuidv4()}.json`
fs.writeFileSync(prepareJobOutputFilePath, '')
}) })
afterEach(() => { afterEach(() => {
fs.rmSync(prepareJobOutputFilePath, { force: true })
testSetup.teardown() testSetup.teardown()
}) })
it('should prepare job, then run script step, then run container step then cleanup', async () => { it('should prepare job, then run script step, then run container step then cleanup', async () => {
const prepareJobOutput = testSetup.createOutputFile(
'prepare-job-output.json'
)
await expect( await expect(
prepareJob(prepareJobData.args, prepareJobOutputFilePath) prepareJob(definitions.prepareJob.args, prepareJobOutput)
).resolves.not.toThrow() ).resolves.not.toThrow()
let rawState = fs.readFileSync(prepareJobOutputFilePath, 'utf-8')
let rawState = fs.readFileSync(prepareJobOutput, 'utf-8')
let resp = JSON.parse(rawState) let resp = JSON.parse(rawState)
await expect( await expect(
runScriptStep(scriptStepData.args, resp.state) runScriptStep(definitions.runScriptStep.args, resp.state)
).resolves.not.toThrow() ).resolves.not.toThrow()
await expect( await expect(
runContainerStep(containerStepData.args, resp.state) runContainerStep(definitions.runContainerStep.args, resp.state)
).resolves.not.toThrow() ).resolves.not.toThrow()
await expect(cleanupJob()).resolves.not.toThrow() await expect(cleanupJob()).resolves.not.toThrow()
}) })
it('should prepare job, then run script step, then run container step with Dockerfile then cleanup', async () => { it('should prepare job, then run script step, then run container step with Dockerfile then cleanup', async () => {
const prepareJobOutput = testSetup.createOutputFile(
'prepare-job-output.json'
)
await expect( await expect(
prepareJob(prepareJobData.args, prepareJobOutputFilePath) prepareJob(definitions.prepareJob.args, prepareJobOutput)
).resolves.not.toThrow()
let rawState = fs.readFileSync(prepareJobOutputFilePath, 'utf-8')
let resp = JSON.parse(rawState)
await expect(
runScriptStep(scriptStepData.args, resp.state)
).resolves.not.toThrow() ).resolves.not.toThrow()
const dockerfilePath = `${tmpOutputDir}/Dockerfile` let rawState = fs.readFileSync(prepareJobOutput, 'utf-8')
let resp = JSON.parse(rawState)
await expect(
runScriptStep(definitions.runScriptStep.args, resp.state)
).resolves.not.toThrow()
const dockerfilePath = `${testSetup.workingDirectory}/Dockerfile`
fs.writeFileSync( fs.writeFileSync(
dockerfilePath, dockerfilePath,
`FROM ubuntu:latest `FROM ubuntu:latest
@@ -104,13 +92,17 @@ ENV TEST=test
ENTRYPOINT [ "tail", "-f", "/dev/null" ] ENTRYPOINT [ "tail", "-f", "/dev/null" ]
` `
) )
const containerStepDataCopy = JSON.parse(JSON.stringify(containerStepData))
process.env.GITHUB_WORKSPACE = tmpOutputDir const containerStepDataCopy = JSON.parse(
JSON.stringify(definitions.runContainerStep)
)
containerStepDataCopy.args.dockerfile = 'Dockerfile' containerStepDataCopy.args.dockerfile = 'Dockerfile'
containerStepDataCopy.args.context = '.'
await expect( await expect(
runContainerStep(containerStepDataCopy.args, resp.state) runContainerStep(containerStepDataCopy.args, resp.state)
).resolves.not.toThrow() ).resolves.not.toThrow()
await expect(cleanupJob()).resolves.not.toThrow() await expect(cleanupJob()).resolves.not.toThrow()
}) })
}) })

View File

@@ -1,40 +1,24 @@
import * as fs from 'fs' import * as fs from 'fs'
import { v4 as uuidv4 } from 'uuid'
import { prepareJob } from '../src/hooks' import { prepareJob } from '../src/hooks'
import TestSetup from './test-setup' import TestSetup from './test-setup'
jest.useRealTimers() jest.useRealTimers()
let prepareJobOutputPath: string const prepareJobDefinition = JSON.parse(
let prepareJobData: any fs.readFileSync(`${__dirname}/../../../examples/prepare-job.json`, 'utf-8')
const tmpOutputDir = `${__dirname}/_temp/${uuidv4()}` )
const prepareJobInputPath = `${__dirname}/../../../examples/prepare-job.json`
let testSetup: TestSetup let testSetup: TestSetup
describe('prepare job', () => { describe('prepare job', () => {
beforeAll(() => { beforeEach(() => {
fs.mkdirSync(tmpOutputDir, { recursive: true })
})
afterAll(() => {
fs.rmSync(tmpOutputDir, { recursive: true })
})
beforeEach(async () => {
testSetup = new TestSetup() testSetup = new TestSetup()
testSetup.initialize() testSetup.initialize()
let prepareJobRawData = fs.readFileSync(prepareJobInputPath, 'utf8') prepareJobDefinition.args.container.systemMountVolumes =
prepareJobData = JSON.parse(prepareJobRawData.toString())
prepareJobData.args.container.userMountVolumes = testSetup.userMountVolumes
prepareJobData.args.container.systemMountVolumes =
testSetup.systemMountVolumes testSetup.systemMountVolumes
prepareJobData.args.container.workingDirectory = testSetup.workingDirectory prepareJobDefinition.args.container.workingDirectory =
testSetup.workingDirectory
prepareJobOutputPath = `${tmpOutputDir}/prepare-job-output-${uuidv4()}.json`
fs.writeFileSync(prepareJobOutputPath, '')
}) })
afterEach(() => { afterEach(() => {
@@ -42,38 +26,68 @@ describe('prepare job', () => {
}) })
it('should not throw', async () => { it('should not throw', async () => {
const prepareJobOutput = testSetup.createOutputFile(
'prepare-job-output.json'
)
await expect( await expect(
prepareJob(prepareJobData.args, prepareJobOutputPath) prepareJob(prepareJobDefinition.args, prepareJobOutput)
).resolves.not.toThrow() ).resolves.not.toThrow()
expect(() => fs.readFileSync(prepareJobOutputPath, 'utf-8')).not.toThrow() expect(() => fs.readFileSync(prepareJobOutput, 'utf-8')).not.toThrow()
}) })
it('should have JSON output written to a file', async () => { it('should have JSON output written to a file', async () => {
await prepareJob(prepareJobData.args, prepareJobOutputPath) const prepareJobOutput = testSetup.createOutputFile(
const prepareJobOutputContent = fs.readFileSync( 'prepare-job-output.json'
prepareJobOutputPath,
'utf-8'
) )
await prepareJob(prepareJobDefinition.args, prepareJobOutput)
const prepareJobOutputContent = fs.readFileSync(prepareJobOutput, 'utf-8')
expect(() => JSON.parse(prepareJobOutputContent)).not.toThrow() expect(() => JSON.parse(prepareJobOutputContent)).not.toThrow()
}) })
it('should have context written to a file', async () => { it('should have context written to a file', async () => {
await prepareJob(prepareJobData.args, prepareJobOutputPath) const prepareJobOutput = testSetup.createOutputFile(
const prepareJobOutputContent = fs.readFileSync( 'prepare-job-output.json'
prepareJobOutputPath, )
'utf-8' await prepareJob(prepareJobDefinition.args, prepareJobOutput)
const parsedPrepareJobOutput = JSON.parse(
fs.readFileSync(prepareJobOutput, 'utf-8')
) )
const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent)
expect(parsedPrepareJobOutput.context).toBeDefined() expect(parsedPrepareJobOutput.context).toBeDefined()
}) })
it('should have container ids written to file', async () => { it('should have isAlpine field set correctly', async () => {
await prepareJob(prepareJobData.args, prepareJobOutputPath) let prepareJobOutput = testSetup.createOutputFile(
const prepareJobOutputContent = fs.readFileSync( 'prepare-job-output-alpine.json'
prepareJobOutputPath,
'utf-8'
) )
const prepareJobArgsClone = JSON.parse(
JSON.stringify(prepareJobDefinition.args)
)
prepareJobArgsClone.container.image = 'alpine:latest'
await prepareJob(prepareJobArgsClone, prepareJobOutput)
let parsedPrepareJobOutput = JSON.parse(
fs.readFileSync(prepareJobOutput, 'utf-8')
)
expect(parsedPrepareJobOutput.isAlpine).toBe(true)
prepareJobOutput = testSetup.createOutputFile(
'prepare-job-output-ubuntu.json'
)
prepareJobArgsClone.container.image = 'ubuntu:latest'
await prepareJob(prepareJobArgsClone, prepareJobOutput)
parsedPrepareJobOutput = JSON.parse(
fs.readFileSync(prepareJobOutput, 'utf-8')
)
expect(parsedPrepareJobOutput.isAlpine).toBe(false)
})
it('should have container ids written to file', async () => {
const prepareJobOutput = testSetup.createOutputFile(
'prepare-job-output.json'
)
await prepareJob(prepareJobDefinition.args, prepareJobOutput)
const prepareJobOutputContent = fs.readFileSync(prepareJobOutput, 'utf-8')
const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent) const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent)
expect(parsedPrepareJobOutput.context.container.id).toBeDefined() expect(parsedPrepareJobOutput.context.container.id).toBeDefined()
@@ -82,11 +96,11 @@ describe('prepare job', () => {
}) })
it('should have ports for context written in form [containerPort]:[hostPort]', async () => { it('should have ports for context written in form [containerPort]:[hostPort]', async () => {
await prepareJob(prepareJobData.args, prepareJobOutputPath) const prepareJobOutput = testSetup.createOutputFile(
const prepareJobOutputContent = fs.readFileSync( 'prepare-job-output.json'
prepareJobOutputPath,
'utf-8'
) )
await prepareJob(prepareJobDefinition.args, prepareJobOutput)
const prepareJobOutputContent = fs.readFileSync(prepareJobOutput, 'utf-8')
const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent) const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent)
const mainContainerPorts = parsedPrepareJobOutput.context.container.ports const mainContainerPorts = parsedPrepareJobOutput.context.container.ports

View File

@@ -0,0 +1,47 @@
import * as fs from 'fs'
import { PrepareJobResponse } from 'hooklib/lib'
import * as path from 'path'
import { prepareJob, runScriptStep } from '../src/hooks'
import TestSetup from './test-setup'
jest.useRealTimers()
let testSetup: TestSetup
const definitions = {
prepareJob: JSON.parse(
fs.readFileSync(
path.resolve(__dirname + '/../../../examples/prepare-job.json'),
'utf8'
)
),
runScriptStep: JSON.parse(
fs.readFileSync(
path.resolve(__dirname + '/../../../examples/run-script-step.json'),
'utf-8'
)
)
}
let prepareJobResponse: PrepareJobResponse
describe('run-script-step', () => {
beforeEach(async () => {
testSetup = new TestSetup()
testSetup.initialize()
const prepareJobOutput = testSetup.createOutputFile(
'prepare-job-output.json'
)
await prepareJob(definitions.prepareJob.args, prepareJobOutput)
prepareJobResponse = JSON.parse(fs.readFileSync(prepareJobOutput, 'utf-8'))
})
it('Should run script step without exceptions', async () => {
await expect(
runScriptStep(definitions.runScriptStep.args, prepareJobResponse.state)
).resolves.not.toThrow()
})
})

View File

@@ -1,11 +1,14 @@
import * as fs from 'fs' import * as fs from 'fs'
import { v4 as uuidv4 } from 'uuid'
import { env } from 'process'
import { Mount } from 'hooklib' import { Mount } from 'hooklib'
import * as path from 'path'
import { env } from 'process'
import { v4 as uuidv4 } from 'uuid'
export default class TestSetup { export default class TestSetup {
private testdir: string private testdir: string
private runnerMockDir: string private runnerMockDir: string
readonly runnerOutputDir: string
private runnerMockSubdirs = { private runnerMockSubdirs = {
work: '_work', work: '_work',
externals: 'externals', externals: 'externals',
@@ -16,15 +19,16 @@ export default class TestSetup {
githubWorkflow: '_work/_temp/_github_workflow' githubWorkflow: '_work/_temp/_github_workflow'
} }
private readonly projectName = 'example' private readonly projectName = 'repo'
constructor() { constructor() {
this.testdir = `${__dirname}/_temp/${uuidv4()}` this.testdir = `${__dirname}/_temp/${uuidv4()}`
this.runnerMockDir = `${this.testdir}/runner/_layout` this.runnerMockDir = `${this.testdir}/runner/_layout`
this.runnerOutputDir = `${this.testdir}/outputs`
} }
private get allTestDirectories() { private get allTestDirectories() {
const resp = [this.testdir, this.runnerMockDir] const resp = [this.testdir, this.runnerMockDir, this.runnerOutputDir]
for (const [key, value] of Object.entries(this.runnerMockSubdirs)) { for (const [key, value] of Object.entries(this.runnerMockSubdirs)) {
resp.push(`${this.runnerMockDir}/${value}`) resp.push(`${this.runnerMockDir}/${value}`)
@@ -38,29 +42,21 @@ export default class TestSetup {
} }
public initialize(): void { public initialize(): void {
for (const dir of this.allTestDirectories) { env['GITHUB_WORKSPACE'] = this.workingDirectory
fs.mkdirSync(dir, { recursive: true })
}
env['RUNNER_NAME'] = 'test' env['RUNNER_NAME'] = 'test'
env[ env[
'RUNNER_TEMP' 'RUNNER_TEMP'
] = `${this.runnerMockDir}/${this.runnerMockSubdirs.workTemp}` ] = `${this.runnerMockDir}/${this.runnerMockSubdirs.workTemp}`
for (const dir of this.allTestDirectories) {
fs.mkdirSync(dir, { recursive: true })
}
} }
public teardown(): void { public teardown(): void {
fs.rmdirSync(this.testdir, { recursive: true }) fs.rmdirSync(this.testdir, { recursive: true })
} }
public get userMountVolumes(): Mount[] {
return [
{
sourceVolumePath: 'my_docker_volume',
targetVolumePath: '/volume_mount',
readOnly: false
}
]
}
public get systemMountVolumes(): Mount[] { public get systemMountVolumes(): Mount[] {
return [ return [
{ {
@@ -106,7 +102,17 @@ export default class TestSetup {
] ]
} }
public createOutputFile(name: string): string {
let filePath = path.join(this.runnerOutputDir, name || `${uuidv4()}.json`)
fs.writeFileSync(filePath, '')
return filePath
}
public get workingDirectory(): string { public get workingDirectory(): string {
return `${this.runnerMockDir}/_work/${this.projectName}/${this.projectName}`
}
public get containerWorkingDirectory(): string {
return `/__w/${this.projectName}/${this.projectName}` return `/__w/${this.projectName}/${this.projectName}`
} }
} }

View File

@@ -34,6 +34,7 @@ export interface ContainerInfo {
createOptions?: string createOptions?: string
environmentVariables?: { [key: string]: string } environmentVariables?: { [key: string]: string }
userMountVolumes?: Mount[] userMountVolumes?: Mount[]
systemMountVolumes?: Mount[]
registry?: Registry registry?: Registry
portMappings?: string[] portMappings?: string[]
} }

View File

@@ -1,7 +1,7 @@
import { prepareJob, cleanupJob, runScriptStep } from '../src/hooks'
import { TestHelper } from './test-setup'
import * as path from 'path'
import * as fs from 'fs' import * as fs from 'fs'
import * as path from 'path'
import { cleanupJob, prepareJob, runScriptStep } from '../src/hooks'
import { TestHelper } from './test-setup'
jest.useRealTimers() jest.useRealTimers()
@@ -45,7 +45,7 @@ describe('Run script step', () => {
NODE_ENV: 'development' NODE_ENV: 'development'
}, },
prependPath: ['/foo/bar', 'bar/foo'], prependPath: ['/foo/bar', 'bar/foo'],
workingDirectory: '/__w/thboop-test2/thboop-test2' workingDirectory: '/__w/repo/repo'
} }
const state = { const state = {
jobPod: prepareJobOutputData.state.jobPod jobPod: prepareJobOutputData.state.jobPod