mirror of
https://github.com/actions/runner-container-hooks.git
synced 2025-12-14 08:36:45 +00:00
cleared registry for testing
This commit is contained in:
@@ -5,7 +5,7 @@
|
|||||||
"args": {
|
"args": {
|
||||||
"container": {
|
"container": {
|
||||||
"image": "node:14.16",
|
"image": "node:14.16",
|
||||||
"workingDirectory": "/__w/thboop-test2/thboop-test2",
|
"workingDirectory": "/__w/repo/repo",
|
||||||
"createOptions": "--cpus 1",
|
"createOptions": "--cpus 1",
|
||||||
"environmentVariables": {
|
"environmentVariables": {
|
||||||
"NODE_ENV": "development"
|
"NODE_ENV": "development"
|
||||||
|
|||||||
@@ -16,7 +16,7 @@
|
|||||||
"echo \"hello world2\""
|
"echo \"hello world2\""
|
||||||
],
|
],
|
||||||
"entryPoint": "bash",
|
"entryPoint": "bash",
|
||||||
"workingDirectory": "/__w/thboop-test2/thboop-test2",
|
"workingDirectory": "/__w/repo/repo",
|
||||||
"createOptions": "--cpus 1",
|
"createOptions": "--cpus 1",
|
||||||
"environmentVariables": {
|
"environmentVariables": {
|
||||||
"NODE_ENV": "development"
|
"NODE_ENV": "development"
|
||||||
|
|||||||
@@ -21,6 +21,6 @@
|
|||||||
"/foo/bar",
|
"/foo/bar",
|
||||||
"bar/foo"
|
"bar/foo"
|
||||||
],
|
],
|
||||||
"workingDirectory": "/__w/thboop-test2/thboop-test2"
|
"workingDirectory": "/__w/repo/repo"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2,11 +2,9 @@ import * as core from '@actions/core'
|
|||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import {
|
import {
|
||||||
ContainerInfo,
|
ContainerInfo,
|
||||||
JobContainerInfo,
|
|
||||||
Registry,
|
Registry,
|
||||||
RunContainerStepArgs,
|
RunContainerStepArgs,
|
||||||
ServiceContainerInfo,
|
ServiceContainerInfo
|
||||||
StepContainerInfo
|
|
||||||
} from 'hooklib/lib'
|
} from 'hooklib/lib'
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import { env } from 'process'
|
import { env } from 'process'
|
||||||
@@ -52,7 +50,7 @@ export async function createContainer(
|
|||||||
|
|
||||||
const mountVolumes = [
|
const mountVolumes = [
|
||||||
...(args.userMountVolumes || []),
|
...(args.userMountVolumes || []),
|
||||||
...((args as JobContainerInfo | StepContainerInfo).systemMountVolumes || [])
|
...(args.systemMountVolumes || [])
|
||||||
]
|
]
|
||||||
for (const mountVolume of mountVolumes) {
|
for (const mountVolume of mountVolumes) {
|
||||||
dockerArgs.push(
|
dockerArgs.push(
|
||||||
@@ -345,8 +343,9 @@ export async function containerExecStep(
|
|||||||
dockerArgs.push(`"${key}"`)
|
dockerArgs.push(`"${key}"`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Todo figure out prepend path and update it here
|
if (args.prependPath?.length) {
|
||||||
// (we need to pass path in as -e Path={fullpath}) where {fullpath is the prepend path added to the current containers path}
|
dockerArgs.push('-e', `"PATH=${args.prependPath.join(':')}:$PATH"`)
|
||||||
|
}
|
||||||
|
|
||||||
dockerArgs.push(containerId)
|
dockerArgs.push(containerId)
|
||||||
dockerArgs.push(args.entryPoint)
|
dockerArgs.push(args.entryPoint)
|
||||||
|
|||||||
@@ -1,16 +1,7 @@
|
|||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import * as path from 'path'
|
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
|
||||||
import { cleanupJob, prepareJob } from '../src/hooks'
|
import { cleanupJob, prepareJob } from '../src/hooks'
|
||||||
import TestSetup from './test-setup'
|
import TestSetup from './test-setup'
|
||||||
|
|
||||||
const prepareJobInputPath = path.resolve(
|
|
||||||
`${__dirname}/../../../examples/prepare-job.json`
|
|
||||||
)
|
|
||||||
|
|
||||||
let prepareJobOutputPath: string
|
|
||||||
let prepareJobDefinition: any
|
|
||||||
|
|
||||||
let testSetup: TestSetup
|
let testSetup: TestSetup
|
||||||
|
|
||||||
jest.useRealTimers()
|
jest.useRealTimers()
|
||||||
@@ -20,28 +11,25 @@ describe('cleanup job', () => {
|
|||||||
testSetup = new TestSetup()
|
testSetup = new TestSetup()
|
||||||
testSetup.initialize()
|
testSetup.initialize()
|
||||||
|
|
||||||
const prepareJobRawData = fs.readFileSync(prepareJobInputPath, 'utf8')
|
const prepareJobDefinition = JSON.parse(
|
||||||
prepareJobDefinition = JSON.parse(prepareJobRawData.toString())
|
fs.readFileSync(
|
||||||
|
`${__dirname}/../../../examples/prepare-job.json`,
|
||||||
|
'utf-8'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
prepareJobOutputPath = `${
|
const prepareJobOutput = testSetup.createOutputFile(
|
||||||
testSetup.testDir
|
'prepare-job-output.json'
|
||||||
}/prepare-job-output-${uuidv4()}.json`
|
)
|
||||||
fs.writeFileSync(prepareJobOutputPath, '')
|
|
||||||
|
|
||||||
prepareJobDefinition.args.container.userMountVolumes =
|
|
||||||
testSetup.userMountVolumes
|
|
||||||
prepareJobDefinition.args.container.systemMountVolumes =
|
|
||||||
testSetup.systemMountVolumes
|
|
||||||
prepareJobDefinition.args.container.workingDirectory =
|
|
||||||
testSetup.containerWorkingDirectory
|
|
||||||
prepareJobDefinition.args.container.registry = null
|
prepareJobDefinition.args.container.registry = null
|
||||||
prepareJobDefinition.args.services.forEach(s => (s.registry = null))
|
prepareJobDefinition.args.services.forEach(s => {
|
||||||
|
s.registry = null
|
||||||
await prepareJob(prepareJobDefinition.args, prepareJobOutputPath)
|
})
|
||||||
|
await prepareJob(prepareJobDefinition.args, prepareJobOutput)
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
fs.rmSync(prepareJobOutputPath, { force: true })
|
|
||||||
testSetup.teardown()
|
testSetup.teardown()
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import * as path from 'path'
|
import * as path from 'path'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
|
||||||
import {
|
import {
|
||||||
cleanupJob,
|
cleanupJob,
|
||||||
prepareJob,
|
prepareJob,
|
||||||
@@ -9,61 +8,41 @@ import {
|
|||||||
} from '../src/hooks'
|
} from '../src/hooks'
|
||||||
import TestSetup from './test-setup'
|
import TestSetup from './test-setup'
|
||||||
|
|
||||||
const prepareJobJson = fs.readFileSync(
|
const definitions = {
|
||||||
path.resolve(__dirname + '/../../../examples/prepare-job.json'),
|
prepareJob: JSON.parse(
|
||||||
'utf8'
|
fs.readFileSync(
|
||||||
)
|
path.resolve(__dirname + '/../../../examples/prepare-job.json'),
|
||||||
|
'utf8'
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
const containerStepJson = fs.readFileSync(
|
runContainerStep: JSON.parse(
|
||||||
path.resolve(__dirname + '/../../../examples/run-container-step.json'),
|
fs.readFileSync(
|
||||||
'utf8'
|
path.resolve(__dirname + '/../../../examples/run-container-step.json'),
|
||||||
)
|
'utf8'
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
let prepareJobDefinition: any
|
runScriptStep: JSON.parse(
|
||||||
let scriptStepDefinition: any
|
fs.readFileSync(
|
||||||
let runContainerStepDefinition: any
|
path.resolve(__dirname + '/../../../examples/run-script-step.json'),
|
||||||
|
'utf-8'
|
||||||
let prepareJobOutputFilePath: string
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
let testSetup: TestSetup
|
let testSetup: TestSetup
|
||||||
|
|
||||||
describe('e2e', () => {
|
describe('e2e', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
// init dirs
|
|
||||||
testSetup = new TestSetup()
|
testSetup = new TestSetup()
|
||||||
testSetup.initialize()
|
testSetup.initialize()
|
||||||
|
definitions.prepareJob.args.container.systemMountVolumes =
|
||||||
prepareJobDefinition = JSON.parse(prepareJobJson)
|
|
||||||
prepareJobDefinition.args.container.userMountVolumes =
|
|
||||||
testSetup.userMountVolumes
|
|
||||||
prepareJobDefinition.args.container.systemMountVolumes =
|
|
||||||
testSetup.systemMountVolumes
|
testSetup.systemMountVolumes
|
||||||
prepareJobDefinition.args.container.workingDirectory =
|
definitions.prepareJob.args.container.registry = null
|
||||||
testSetup.containerWorkingDirectory
|
definitions.prepareJob.args.services.forEach(s => {
|
||||||
prepareJobDefinition.args.container.registry = null
|
s.registry = null
|
||||||
prepareJobDefinition.args.services.forEach(s => (s.registry = null))
|
})
|
||||||
|
|
||||||
const scriptStepJson = fs.readFileSync(
|
|
||||||
path.resolve(__dirname + '/../../../examples/run-script-step.json'),
|
|
||||||
'utf8'
|
|
||||||
)
|
|
||||||
scriptStepDefinition = JSON.parse(scriptStepJson)
|
|
||||||
scriptStepDefinition.args.workingDirectory =
|
|
||||||
testSetup.containerWorkingDirectory
|
|
||||||
scriptStepDefinition.args.registry = null
|
|
||||||
|
|
||||||
runContainerStepDefinition = JSON.parse(containerStepJson)
|
|
||||||
runContainerStepDefinition.args.workingDirectory =
|
|
||||||
testSetup.containerWorkingDirectory
|
|
||||||
runContainerStepDefinition.args.userMountVolumes =
|
|
||||||
testSetup.userMountVolumes
|
|
||||||
runContainerStepDefinition.args.systemMountVolumes =
|
|
||||||
runContainerStepDefinition.args.registry = null
|
|
||||||
|
|
||||||
prepareJobOutputFilePath = `${
|
|
||||||
testSetup.testDir
|
|
||||||
}/prepare-job-output-${uuidv4()}.json`
|
|
||||||
fs.writeFileSync(prepareJobOutputFilePath, '')
|
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -71,31 +50,45 @@ describe('e2e', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should prepare job, then run script step, then run container step then cleanup', async () => {
|
it('should prepare job, then run script step, then run container step then cleanup', async () => {
|
||||||
|
const prepareJobOutput = testSetup.createOutputFile(
|
||||||
|
'prepare-job-output.json'
|
||||||
|
)
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
prepareJob(prepareJobDefinition.args, prepareJobOutputFilePath)
|
prepareJob(definitions.prepareJob.args, prepareJobOutput)
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
let rawState = fs.readFileSync(prepareJobOutputFilePath, 'utf-8')
|
|
||||||
|
let rawState = fs.readFileSync(prepareJobOutput, 'utf-8')
|
||||||
let resp = JSON.parse(rawState)
|
let resp = JSON.parse(rawState)
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
runScriptStep(scriptStepDefinition.args, resp.state)
|
runScriptStep(definitions.runScriptStep.args, resp.state)
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
runContainerStep(runContainerStepDefinition.args, resp.state)
|
runContainerStep(definitions.runContainerStep.args, resp.state)
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
|
|
||||||
await expect(cleanupJob()).resolves.not.toThrow()
|
await expect(cleanupJob()).resolves.not.toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should prepare job, then run script step, then run container step with Dockerfile then cleanup', async () => {
|
it('should prepare job, then run script step, then run container step with Dockerfile then cleanup', async () => {
|
||||||
|
const prepareJobOutput = testSetup.createOutputFile(
|
||||||
|
'prepare-job-output.json'
|
||||||
|
)
|
||||||
|
|
||||||
await expect(
|
await expect(
|
||||||
prepareJob(prepareJobDefinition.args, prepareJobOutputFilePath)
|
prepareJob(definitions.prepareJob.args, prepareJobOutput)
|
||||||
).resolves.not.toThrow()
|
|
||||||
let rawState = fs.readFileSync(prepareJobOutputFilePath, 'utf-8')
|
|
||||||
let resp = JSON.parse(rawState)
|
|
||||||
await expect(
|
|
||||||
runScriptStep(scriptStepDefinition.args, resp.state)
|
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
|
|
||||||
const dockerfilePath = `${testSetup.testDir}/Dockerfile`
|
let rawState = fs.readFileSync(prepareJobOutput, 'utf-8')
|
||||||
|
let resp = JSON.parse(rawState)
|
||||||
|
|
||||||
|
await expect(
|
||||||
|
runScriptStep(definitions.runScriptStep.args, resp.state)
|
||||||
|
).resolves.not.toThrow()
|
||||||
|
|
||||||
|
const dockerfilePath = `${testSetup.workingDirectory}/Dockerfile`
|
||||||
fs.writeFileSync(
|
fs.writeFileSync(
|
||||||
dockerfilePath,
|
dockerfilePath,
|
||||||
`FROM ubuntu:latest
|
`FROM ubuntu:latest
|
||||||
@@ -103,14 +96,17 @@ ENV TEST=test
|
|||||||
ENTRYPOINT [ "tail", "-f", "/dev/null" ]
|
ENTRYPOINT [ "tail", "-f", "/dev/null" ]
|
||||||
`
|
`
|
||||||
)
|
)
|
||||||
|
|
||||||
const containerStepDataCopy = JSON.parse(
|
const containerStepDataCopy = JSON.parse(
|
||||||
JSON.stringify(runContainerStepDefinition)
|
JSON.stringify(definitions.runContainerStep)
|
||||||
)
|
)
|
||||||
|
|
||||||
containerStepDataCopy.args.dockerfile = 'Dockerfile'
|
containerStepDataCopy.args.dockerfile = 'Dockerfile'
|
||||||
containerStepDataCopy.args.context = '.'
|
|
||||||
await expect(
|
await expect(
|
||||||
runContainerStep(containerStepDataCopy.args, resp.state)
|
runContainerStep(containerStepDataCopy.args, resp.state)
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
|
|
||||||
await expect(cleanupJob()).resolves.not.toThrow()
|
await expect(cleanupJob()).resolves.not.toThrow()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,36 +1,28 @@
|
|||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
|
||||||
import { prepareJob } from '../src/hooks'
|
import { prepareJob } from '../src/hooks'
|
||||||
import TestSetup from './test-setup'
|
import TestSetup from './test-setup'
|
||||||
|
|
||||||
jest.useRealTimers()
|
jest.useRealTimers()
|
||||||
|
|
||||||
let prepareJobOutputPath: string
|
const prepareJobDefinition = JSON.parse(
|
||||||
let prepareJobData: any
|
fs.readFileSync(`${__dirname}/../../../examples/prepare-job.json`, 'utf-8')
|
||||||
const prepareJobInputPath = `${__dirname}/../../../examples/prepare-job.json`
|
)
|
||||||
|
|
||||||
let testSetup: TestSetup
|
let testSetup: TestSetup
|
||||||
|
|
||||||
describe('prepare job', () => {
|
describe('prepare job', () => {
|
||||||
beforeEach(async () => {
|
beforeEach(() => {
|
||||||
testSetup = new TestSetup()
|
testSetup = new TestSetup()
|
||||||
testSetup.initialize()
|
testSetup.initialize()
|
||||||
|
|
||||||
let prepareJobRawData = fs.readFileSync(prepareJobInputPath, 'utf8')
|
prepareJobDefinition.args.container.systemMountVolumes =
|
||||||
prepareJobData = JSON.parse(prepareJobRawData.toString())
|
|
||||||
|
|
||||||
prepareJobData.args.container.userMountVolumes = testSetup.userMountVolumes
|
|
||||||
prepareJobData.args.container.systemMountVolumes =
|
|
||||||
testSetup.systemMountVolumes
|
testSetup.systemMountVolumes
|
||||||
prepareJobData.args.container.workingDirectory =
|
prepareJobDefinition.args.container.workingDirectory =
|
||||||
testSetup.containerWorkingDirectory
|
testSetup.workingDirectory
|
||||||
prepareJobData.args.container.registry = null
|
prepareJobDefinition.args.container.registry = null
|
||||||
prepareJobData.args.services.forEach(s => (s.registry = null))
|
prepareJobDefinition.args.services.forEach(s => {
|
||||||
|
s.registry = null
|
||||||
prepareJobOutputPath = `${
|
})
|
||||||
testSetup.testDir
|
|
||||||
}/prepare-job-output-${uuidv4()}.json`
|
|
||||||
fs.writeFileSync(prepareJobOutputPath, '')
|
|
||||||
})
|
})
|
||||||
|
|
||||||
afterEach(() => {
|
afterEach(() => {
|
||||||
@@ -38,38 +30,68 @@ describe('prepare job', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should not throw', async () => {
|
it('should not throw', async () => {
|
||||||
|
const prepareJobOutput = testSetup.createOutputFile(
|
||||||
|
'prepare-job-output.json'
|
||||||
|
)
|
||||||
await expect(
|
await expect(
|
||||||
prepareJob(prepareJobData.args, prepareJobOutputPath)
|
prepareJob(prepareJobDefinition.args, prepareJobOutput)
|
||||||
).resolves.not.toThrow()
|
).resolves.not.toThrow()
|
||||||
|
|
||||||
expect(() => fs.readFileSync(prepareJobOutputPath, 'utf-8')).not.toThrow()
|
expect(() => fs.readFileSync(prepareJobOutput, 'utf-8')).not.toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should have JSON output written to a file', async () => {
|
it('should have JSON output written to a file', async () => {
|
||||||
await prepareJob(prepareJobData.args, prepareJobOutputPath)
|
const prepareJobOutput = testSetup.createOutputFile(
|
||||||
const prepareJobOutputContent = fs.readFileSync(
|
'prepare-job-output.json'
|
||||||
prepareJobOutputPath,
|
|
||||||
'utf-8'
|
|
||||||
)
|
)
|
||||||
|
await prepareJob(prepareJobDefinition.args, prepareJobOutput)
|
||||||
|
const prepareJobOutputContent = fs.readFileSync(prepareJobOutput, 'utf-8')
|
||||||
expect(() => JSON.parse(prepareJobOutputContent)).not.toThrow()
|
expect(() => JSON.parse(prepareJobOutputContent)).not.toThrow()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should have context written to a file', async () => {
|
it('should have context written to a file', async () => {
|
||||||
await prepareJob(prepareJobData.args, prepareJobOutputPath)
|
const prepareJobOutput = testSetup.createOutputFile(
|
||||||
const prepareJobOutputContent = fs.readFileSync(
|
'prepare-job-output.json'
|
||||||
prepareJobOutputPath,
|
)
|
||||||
'utf-8'
|
await prepareJob(prepareJobDefinition.args, prepareJobOutput)
|
||||||
|
const parsedPrepareJobOutput = JSON.parse(
|
||||||
|
fs.readFileSync(prepareJobOutput, 'utf-8')
|
||||||
)
|
)
|
||||||
const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent)
|
|
||||||
expect(parsedPrepareJobOutput.context).toBeDefined()
|
expect(parsedPrepareJobOutput.context).toBeDefined()
|
||||||
})
|
})
|
||||||
|
|
||||||
it('should have container ids written to file', async () => {
|
it('should have isAlpine field set correctly', async () => {
|
||||||
await prepareJob(prepareJobData.args, prepareJobOutputPath)
|
let prepareJobOutput = testSetup.createOutputFile(
|
||||||
const prepareJobOutputContent = fs.readFileSync(
|
'prepare-job-output-alpine.json'
|
||||||
prepareJobOutputPath,
|
|
||||||
'utf-8'
|
|
||||||
)
|
)
|
||||||
|
const prepareJobArgsClone = JSON.parse(
|
||||||
|
JSON.stringify(prepareJobDefinition.args)
|
||||||
|
)
|
||||||
|
prepareJobArgsClone.container.image = 'alpine:latest'
|
||||||
|
await prepareJob(prepareJobArgsClone, prepareJobOutput)
|
||||||
|
|
||||||
|
let parsedPrepareJobOutput = JSON.parse(
|
||||||
|
fs.readFileSync(prepareJobOutput, 'utf-8')
|
||||||
|
)
|
||||||
|
expect(parsedPrepareJobOutput.isAlpine).toBe(true)
|
||||||
|
|
||||||
|
prepareJobOutput = testSetup.createOutputFile(
|
||||||
|
'prepare-job-output-ubuntu.json'
|
||||||
|
)
|
||||||
|
prepareJobArgsClone.container.image = 'ubuntu:latest'
|
||||||
|
await prepareJob(prepareJobArgsClone, prepareJobOutput)
|
||||||
|
parsedPrepareJobOutput = JSON.parse(
|
||||||
|
fs.readFileSync(prepareJobOutput, 'utf-8')
|
||||||
|
)
|
||||||
|
expect(parsedPrepareJobOutput.isAlpine).toBe(false)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('should have container ids written to file', async () => {
|
||||||
|
const prepareJobOutput = testSetup.createOutputFile(
|
||||||
|
'prepare-job-output.json'
|
||||||
|
)
|
||||||
|
await prepareJob(prepareJobDefinition.args, prepareJobOutput)
|
||||||
|
const prepareJobOutputContent = fs.readFileSync(prepareJobOutput, 'utf-8')
|
||||||
const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent)
|
const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent)
|
||||||
|
|
||||||
expect(parsedPrepareJobOutput.context.container.id).toBeDefined()
|
expect(parsedPrepareJobOutput.context.container.id).toBeDefined()
|
||||||
@@ -78,11 +100,11 @@ describe('prepare job', () => {
|
|||||||
})
|
})
|
||||||
|
|
||||||
it('should have ports for context written in form [containerPort]:[hostPort]', async () => {
|
it('should have ports for context written in form [containerPort]:[hostPort]', async () => {
|
||||||
await prepareJob(prepareJobData.args, prepareJobOutputPath)
|
const prepareJobOutput = testSetup.createOutputFile(
|
||||||
const prepareJobOutputContent = fs.readFileSync(
|
'prepare-job-output.json'
|
||||||
prepareJobOutputPath,
|
|
||||||
'utf-8'
|
|
||||||
)
|
)
|
||||||
|
await prepareJob(prepareJobDefinition.args, prepareJobOutput)
|
||||||
|
const prepareJobOutputContent = fs.readFileSync(prepareJobOutput, 'utf-8')
|
||||||
const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent)
|
const parsedPrepareJobOutput = JSON.parse(prepareJobOutputContent)
|
||||||
|
|
||||||
const mainContainerPorts = parsedPrepareJobOutput.context.container.ports
|
const mainContainerPorts = parsedPrepareJobOutput.context.container.ports
|
||||||
|
|||||||
51
packages/docker/tests/run-script-step.ts
Normal file
51
packages/docker/tests/run-script-step.ts
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
import * as fs from 'fs'
|
||||||
|
import { PrepareJobResponse } from 'hooklib/lib'
|
||||||
|
import * as path from 'path'
|
||||||
|
import { prepareJob, runScriptStep } from '../src/hooks'
|
||||||
|
import TestSetup from './test-setup'
|
||||||
|
|
||||||
|
jest.useRealTimers()
|
||||||
|
|
||||||
|
let testSetup: TestSetup
|
||||||
|
|
||||||
|
const definitions = {
|
||||||
|
prepareJob: JSON.parse(
|
||||||
|
fs.readFileSync(
|
||||||
|
path.resolve(__dirname + '/../../../examples/prepare-job.json'),
|
||||||
|
'utf8'
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
|
runScriptStep: JSON.parse(
|
||||||
|
fs.readFileSync(
|
||||||
|
path.resolve(__dirname + '/../../../examples/run-script-step.json'),
|
||||||
|
'utf-8'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
let prepareJobResponse: PrepareJobResponse
|
||||||
|
|
||||||
|
describe('run-script-step', () => {
|
||||||
|
beforeEach(async () => {
|
||||||
|
testSetup = new TestSetup()
|
||||||
|
testSetup.initialize()
|
||||||
|
|
||||||
|
const prepareJobOutput = testSetup.createOutputFile(
|
||||||
|
'prepare-job-output.json'
|
||||||
|
)
|
||||||
|
definitions.prepareJob.args.container.registry = null
|
||||||
|
definitions.prepareJob.args.services.forEach(s => {
|
||||||
|
s.registry = null
|
||||||
|
})
|
||||||
|
await prepareJob(definitions.prepareJob.args, prepareJobOutput)
|
||||||
|
|
||||||
|
prepareJobResponse = JSON.parse(fs.readFileSync(prepareJobOutput, 'utf-8'))
|
||||||
|
})
|
||||||
|
|
||||||
|
it('Should run script step without exceptions', async () => {
|
||||||
|
await expect(
|
||||||
|
runScriptStep(definitions.runScriptStep.args, prepareJobResponse.state)
|
||||||
|
).resolves.not.toThrow()
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -1,11 +1,13 @@
|
|||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
import { Mount } from 'hooklib'
|
import { Mount } from 'hooklib'
|
||||||
|
import * as path from 'path'
|
||||||
import { env } from 'process'
|
import { env } from 'process'
|
||||||
import { v4 as uuidv4 } from 'uuid'
|
import { v4 as uuidv4 } from 'uuid'
|
||||||
|
|
||||||
export default class TestSetup {
|
export default class TestSetup {
|
||||||
private testdir: string
|
private testdir: string
|
||||||
private runnerMockDir: string
|
private runnerMockDir: string
|
||||||
|
readonly runnerOutputDir: string
|
||||||
|
|
||||||
private runnerMockSubdirs = {
|
private runnerMockSubdirs = {
|
||||||
work: '_work',
|
work: '_work',
|
||||||
@@ -16,54 +18,45 @@ export default class TestSetup {
|
|||||||
githubHome: '_work/_temp/_github_home',
|
githubHome: '_work/_temp/_github_home',
|
||||||
githubWorkflow: '_work/_temp/_github_workflow'
|
githubWorkflow: '_work/_temp/_github_workflow'
|
||||||
}
|
}
|
||||||
private readonly projectName = 'test'
|
|
||||||
|
private readonly projectName = 'repo'
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.testdir = `${__dirname}/_temp/${uuidv4()}`
|
this.testdir = `${__dirname}/_temp/${uuidv4()}`
|
||||||
this.runnerMockDir = `${this.testdir}/runner/_layout`
|
this.runnerMockDir = `${this.testdir}/runner/_layout`
|
||||||
}
|
this.runnerOutputDir = `${this.testdir}/outputs`
|
||||||
|
|
||||||
public initialize(): void {
|
|
||||||
for (const dir of this.allTestDirectories) {
|
|
||||||
fs.mkdirSync(dir, { recursive: true })
|
|
||||||
}
|
|
||||||
env.RUNNER_NAME = 'test'
|
|
||||||
env.RUNNER_TEMP = `${this.runnerMockDir}/${this.runnerMockSubdirs.workTemp}`
|
|
||||||
env.GITHUB_WORKSPACE = this.runnerProjectWorkDir
|
|
||||||
}
|
|
||||||
|
|
||||||
public teardown(): void {
|
|
||||||
fs.rmdirSync(this.testdir, { recursive: true })
|
|
||||||
}
|
|
||||||
|
|
||||||
public get userMountVolumes(): Mount[] {
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
sourceVolumePath: 'my_docker_volume',
|
|
||||||
targetVolumePath: '/volume_mount',
|
|
||||||
readOnly: false
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
public get runnerProjectWorkDir() {
|
|
||||||
return `${this.runnerMockDir}/_work/${this.projectName}/${this.projectName}`
|
|
||||||
}
|
|
||||||
|
|
||||||
public get testDir() {
|
|
||||||
return this.testdir
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private get allTestDirectories() {
|
private get allTestDirectories() {
|
||||||
const resp = [this.testdir, this.runnerMockDir, this.runnerProjectWorkDir]
|
const resp = [this.testdir, this.runnerMockDir, this.runnerOutputDir]
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(this.runnerMockSubdirs)) {
|
for (const [key, value] of Object.entries(this.runnerMockSubdirs)) {
|
||||||
resp.push(`${this.runnerMockDir}/${value}`)
|
resp.push(`${this.runnerMockDir}/${value}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resp.push(
|
||||||
|
`${this.runnerMockDir}/_work/${this.projectName}/${this.projectName}`
|
||||||
|
)
|
||||||
|
|
||||||
return resp
|
return resp
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public initialize(): void {
|
||||||
|
env['GITHUB_WORKSPACE'] = this.workingDirectory
|
||||||
|
env['RUNNER_NAME'] = 'test'
|
||||||
|
env[
|
||||||
|
'RUNNER_TEMP'
|
||||||
|
] = `${this.runnerMockDir}/${this.runnerMockSubdirs.workTemp}`
|
||||||
|
|
||||||
|
for (const dir of this.allTestDirectories) {
|
||||||
|
fs.mkdirSync(dir, { recursive: true })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public teardown(): void {
|
||||||
|
fs.rmdirSync(this.testdir, { recursive: true })
|
||||||
|
}
|
||||||
|
|
||||||
public get systemMountVolumes(): Mount[] {
|
public get systemMountVolumes(): Mount[] {
|
||||||
return [
|
return [
|
||||||
{
|
{
|
||||||
@@ -109,6 +102,16 @@ export default class TestSetup {
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public createOutputFile(name: string): string {
|
||||||
|
let filePath = path.join(this.runnerOutputDir, name || `${uuidv4()}.json`)
|
||||||
|
fs.writeFileSync(filePath, '')
|
||||||
|
return filePath
|
||||||
|
}
|
||||||
|
|
||||||
|
public get workingDirectory(): string {
|
||||||
|
return `${this.runnerMockDir}/_work/${this.projectName}/${this.projectName}`
|
||||||
|
}
|
||||||
|
|
||||||
public get containerWorkingDirectory(): string {
|
public get containerWorkingDirectory(): string {
|
||||||
return `/__w/${this.projectName}/${this.projectName}`
|
return `/__w/${this.projectName}/${this.projectName}`
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -34,6 +34,7 @@ export interface ContainerInfo {
|
|||||||
createOptions?: string
|
createOptions?: string
|
||||||
environmentVariables?: { [key: string]: string }
|
environmentVariables?: { [key: string]: string }
|
||||||
userMountVolumes?: Mount[]
|
userMountVolumes?: Mount[]
|
||||||
|
systemMountVolumes?: Mount[]
|
||||||
registry?: Registry
|
registry?: Registry
|
||||||
portMappings?: string[]
|
portMappings?: string[]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { prepareJob, cleanupJob, runScriptStep } from '../src/hooks'
|
|
||||||
import { TestHelper } from './test-setup'
|
|
||||||
import * as path from 'path'
|
|
||||||
import * as fs from 'fs'
|
import * as fs from 'fs'
|
||||||
|
import * as path from 'path'
|
||||||
|
import { cleanupJob, prepareJob, runScriptStep } from '../src/hooks'
|
||||||
|
import { TestHelper } from './test-setup'
|
||||||
|
|
||||||
jest.useRealTimers()
|
jest.useRealTimers()
|
||||||
|
|
||||||
@@ -45,7 +45,7 @@ describe('Run script step', () => {
|
|||||||
NODE_ENV: 'development'
|
NODE_ENV: 'development'
|
||||||
},
|
},
|
||||||
prependPath: ['/foo/bar', 'bar/foo'],
|
prependPath: ['/foo/bar', 'bar/foo'],
|
||||||
workingDirectory: '/__w/thboop-test2/thboop-test2'
|
workingDirectory: '/__w/repo/repo'
|
||||||
}
|
}
|
||||||
const state = {
|
const state = {
|
||||||
jobPod: prepareJobOutputData.state.jobPod
|
jobPod: prepareJobOutputData.state.jobPod
|
||||||
|
|||||||
Reference in New Issue
Block a user