mirror of
https://github.com/actions/runner.git
synced 2026-04-09 04:03:17 +08:00
Compare commits
20 Commits
dependabot
...
rentziass/
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ac65885854 | ||
|
|
21ba579c06 | ||
|
|
580116c18b | ||
|
|
c9a1751d87 | ||
|
|
7711dc53e2 | ||
|
|
df507886cb | ||
|
|
5c6dd47e76 | ||
|
|
7ff994b932 | ||
|
|
b9275b59cf | ||
|
|
f0c228635e | ||
|
|
9728019b24 | ||
|
|
e17e7aabbf | ||
|
|
4259ffb6dc | ||
|
|
4e8e1ff020 | ||
|
|
b6cca8fb99 | ||
|
|
18d0789c74 | ||
|
|
c985a9ff03 | ||
|
|
45ed15ddf3 | ||
|
|
c5dcf59d26 | ||
|
|
c7f6c49ba0 |
@@ -4,7 +4,7 @@
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:2": {},
|
||||
"ghcr.io/devcontainers/features/dotnet": {
|
||||
"version": "8.0.418"
|
||||
"version": "8.0.419"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "20"
|
||||
|
||||
@@ -5,8 +5,8 @@ ARG TARGETOS
|
||||
ARG TARGETARCH
|
||||
ARG RUNNER_VERSION
|
||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.7.0
|
||||
ARG DOCKER_VERSION=29.3.0
|
||||
ARG BUILDX_VERSION=0.32.1
|
||||
ARG DOCKER_VERSION=29.3.1
|
||||
ARG BUILDX_VERSION=0.33.0
|
||||
|
||||
RUN apt update -y && apt install curl unzip -y
|
||||
|
||||
|
||||
@@ -1,35 +1,33 @@
|
||||
## What's Changed
|
||||
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4200
|
||||
* Update dotnet sdk to latest version @8.0.417 by @github-actions[bot] in https://github.com/actions/runner/pull/4201
|
||||
* Bump System.Formats.Asn1 and System.Security.Cryptography.Pkcs by @dependabot[bot] in https://github.com/actions/runner/pull/4202
|
||||
* Allow empty container options by @ericsciple in https://github.com/actions/runner/pull/4208
|
||||
* Update Docker to v29.1.5 and Buildx to v0.31.0 by @github-actions[bot] in https://github.com/actions/runner/pull/4212
|
||||
* Report job level annotations by @TingluoHuang in https://github.com/actions/runner/pull/4216
|
||||
* Fix local action display name showing `Run /./` instead of `Run ./` by @ericsciple in https://github.com/actions/runner/pull/4218
|
||||
* Update Docker to v29.2.0 and Buildx to v0.31.1 by @github-actions[bot] in https://github.com/actions/runner/pull/4219
|
||||
* Add support for libssl3 and libssl3t64 for newer Debian/Ubuntu versions by @nekketsuuu in https://github.com/actions/runner/pull/4213
|
||||
* Validate work dir during runner start up. by @TingluoHuang in https://github.com/actions/runner/pull/4227
|
||||
* Bump hook to 0.8.1 by @nikola-jokic in https://github.com/actions/runner/pull/4222
|
||||
* Support return job result as exitcode in hosted runner. by @TingluoHuang in https://github.com/actions/runner/pull/4233
|
||||
* Add telemetry tracking for deprecated set-output and save-state commands by @ericsciple in https://github.com/actions/runner/pull/4221
|
||||
* Fix parser comparison mismatches by @ericsciple in https://github.com/actions/runner/pull/4220
|
||||
* Remove unnecessary connection test during some registration flows by @zarenner in https://github.com/actions/runner/pull/4244
|
||||
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4249
|
||||
* Update dotnet sdk to latest version @8.0.418 by @github-actions[bot] in https://github.com/actions/runner/pull/4250
|
||||
* Fix link to SECURITY.md in README by @TingluoHuang in https://github.com/actions/runner/pull/4253
|
||||
* Try to infer runner is on hosted/ghes when githuburl is empty. by @TingluoHuang in https://github.com/actions/runner/pull/4254
|
||||
* Add Node.js 20 deprecation warning annotation (Phase 1) by @salmanmkc in https://github.com/actions/runner/pull/4242
|
||||
* Update Node.js 20 deprecation date to June 2nd, 2026 by @salmanmkc in https://github.com/actions/runner/pull/4258
|
||||
* Composite Action Step Markers by @ericsciple in https://github.com/actions/runner/pull/4243
|
||||
* Symlink actions cache by @paveliak in https://github.com/actions/runner/pull/4260
|
||||
* Bump minimatch in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/4261
|
||||
* Bump @stylistic/eslint-plugin from 3.1.0 to 5.9.0 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/4257
|
||||
* Log inner exception message. by @TingluoHuang in https://github.com/actions/runner/pull/4265
|
||||
* Fix composite post-step marker display names by @ericsciple in https://github.com/actions/runner/pull/4267
|
||||
* Bump actions/download-artifact from 7 to 8 by @dependabot[bot] in https://github.com/actions/runner/pull/4269
|
||||
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4272
|
||||
* Avoid throw in SelfUpdaters. by @TingluoHuang in https://github.com/actions/runner/pull/4274
|
||||
* Fix parser comparison mismatches by @ericsciple in https://github.com/actions/runner/pull/4273
|
||||
* Devcontainer: bump base image Ubuntu version by @MaxHorstmann in https://github.com/actions/runner/pull/4277
|
||||
* Support `entrypoint` and `command` for service containers by @ericsciple in https://github.com/actions/runner/pull/4276
|
||||
* Bump actions/upload-artifact from 6 to 7 by @dependabot[bot] in https://github.com/actions/runner/pull/4270
|
||||
* Bump docker/login-action from 3 to 4 by @dependabot[bot] in https://github.com/actions/runner/pull/4278
|
||||
* Fix positional arg bug in ExpressionParser.CreateTree by @ericsciple in https://github.com/actions/runner/pull/4279
|
||||
* Bump docker/build-push-action from 6 to 7 by @dependabot[bot] in https://github.com/actions/runner/pull/4283
|
||||
* Bump docker/setup-buildx-action from 3 to 4 by @dependabot[bot] in https://github.com/actions/runner/pull/4282
|
||||
* Bump actions/attest-build-provenance from 3 to 4 by @dependabot[bot] in https://github.com/actions/runner/pull/4266
|
||||
* Bump @stylistic/eslint-plugin from 5.9.0 to 5.10.0 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/4281
|
||||
* Update Docker to v29.3.0 and Buildx to v0.32.1 by @github-actions[bot] in https://github.com/actions/runner/pull/4286
|
||||
* chore: update Node versions by @github-actions[bot] in https://github.com/actions/runner/pull/4287
|
||||
* Fix cancellation token race during parser comparison by @ericsciple in https://github.com/actions/runner/pull/4280
|
||||
* Bump @typescript-eslint/eslint-plugin from 8.47.0 to 8.54.0 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/4230
|
||||
* Exit with specified exit code when runner is outdated by @nikola-jokic in https://github.com/actions/runner/pull/4285
|
||||
* Report infra_error for action download failures. by @TingluoHuang in https://github.com/actions/runner/pull/4294
|
||||
* Update dotnet sdk to latest version @8.0.419 by @github-actions[bot] in https://github.com/actions/runner/pull/4301
|
||||
* Node 24 enforcement + Linux ARM32 deprecation support by @salmanmkc in https://github.com/actions/runner/pull/4303
|
||||
* Bump @typescript-eslint/eslint-plugin from 8.54.0 to 8.57.1 in /src/Misc/expressionFunc/hashFiles by @dependabot[bot] in https://github.com/actions/runner/pull/4304
|
||||
|
||||
## New Contributors
|
||||
* @nekketsuuu made their first contribution in https://github.com/actions/runner/pull/4213
|
||||
* @zarenner made their first contribution in https://github.com/actions/runner/pull/4244
|
||||
* @MaxHorstmann made their first contribution in https://github.com/actions/runner/pull/4277
|
||||
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.331.0...v2.332.0
|
||||
**Full Changelog**: https://github.com/actions/runner/compare/v2.332.0...v2.333.0
|
||||
|
||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||
|
||||
555
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
555
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
@@ -14,7 +14,7 @@
|
||||
"devDependencies": {
|
||||
"@stylistic/eslint-plugin": "^5.10.0",
|
||||
"@types/node": "^22.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.58.1",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"eslint": "^8.47.0",
|
||||
@@ -93,9 +93,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@eslint-community/regexpp": {
|
||||
"version": "4.10.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz",
|
||||
"integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==",
|
||||
"version": "4.12.2",
|
||||
"resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz",
|
||||
"integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": "^12.0.0 || ^14.0.0 || >=16.0.0"
|
||||
@@ -247,19 +247,6 @@
|
||||
"eslint": "^9.0.0 || ^10.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@stylistic/eslint-plugin/node_modules/@typescript-eslint/types": {
|
||||
"version": "8.56.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.0.tgz",
|
||||
"integrity": "sha512-DBsLPs3GsWhX5HylbP9HNG15U0bnwut55Lx12bHB9MpXxQ+R5GC8MwQe+N1UFXxAeQDvEsEDY6ZYwX03K7Z6HQ==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@stylistic/eslint-plugin/node_modules/eslint-visitor-keys": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
|
||||
@@ -321,21 +308,19 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.47.0.tgz",
|
||||
"integrity": "sha512-fe0rz9WJQ5t2iaLfdbDc9T80GJy0AeO453q8C3YCilnGozvOyCG5t+EZtg7j7D88+c3FipfP/x+wzGnh1xp8ZA==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.58.1.tgz",
|
||||
"integrity": "sha512-eSkwoemjo76bdXl2MYqtxg51HNwUSkWfODUOQ3PaTLZGh9uIWWFZIjyjaJnex7wXDu+TRx+ATsnSxdN9YWfRTQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.10.0",
|
||||
"@typescript-eslint/scope-manager": "8.47.0",
|
||||
"@typescript-eslint/type-utils": "8.47.0",
|
||||
"@typescript-eslint/utils": "8.47.0",
|
||||
"@typescript-eslint/visitor-keys": "8.47.0",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^7.0.0",
|
||||
"@eslint-community/regexpp": "^4.12.2",
|
||||
"@typescript-eslint/scope-manager": "8.58.1",
|
||||
"@typescript-eslint/type-utils": "8.58.1",
|
||||
"@typescript-eslint/utils": "8.58.1",
|
||||
"@typescript-eslint/visitor-keys": "8.58.1",
|
||||
"ignore": "^7.0.5",
|
||||
"natural-compare": "^1.4.0",
|
||||
"ts-api-utils": "^2.1.0"
|
||||
"ts-api-utils": "^2.5.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -345,9 +330,9 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@typescript-eslint/parser": "^8.47.0",
|
||||
"eslint": "^8.57.0 || ^9.0.0",
|
||||
"typescript": ">=4.8.4 <6.0.0"
|
||||
"@typescript-eslint/parser": "^8.58.1",
|
||||
"eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
|
||||
"typescript": ">=4.8.4 <6.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/ignore": {
|
||||
@@ -361,11 +346,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin/node_modules/ts-api-utils": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz",
|
||||
"integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==",
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz",
|
||||
"integrity": "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18.12"
|
||||
},
|
||||
@@ -374,17 +358,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.47.0.tgz",
|
||||
"integrity": "sha512-lJi3PfxVmo0AkEY93ecfN+r8SofEqZNGByvHAI3GBLrvt1Cw6H5k1IM02nSzu0RfUafr2EvFSw0wAsZgubNplQ==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.58.1.tgz",
|
||||
"integrity": "sha512-gGkiNMPqerb2cJSVcruigx9eHBlLG14fSdPdqMoOcBfh+vvn4iCq2C8MzUB89PrxOXk0y3GZ1yIWb9aOzL93bw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "8.47.0",
|
||||
"@typescript-eslint/types": "8.47.0",
|
||||
"@typescript-eslint/typescript-estree": "8.47.0",
|
||||
"@typescript-eslint/visitor-keys": "8.47.0",
|
||||
"debug": "^4.3.4"
|
||||
"@typescript-eslint/scope-manager": "8.58.1",
|
||||
"@typescript-eslint/types": "8.58.1",
|
||||
"@typescript-eslint/typescript-estree": "8.58.1",
|
||||
"@typescript-eslint/visitor-keys": "8.58.1",
|
||||
"debug": "^4.4.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -394,20 +377,19 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": "^8.57.0 || ^9.0.0",
|
||||
"typescript": ">=4.8.4 <6.0.0"
|
||||
"eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
|
||||
"typescript": ">=4.8.4 <6.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/project-service": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.47.0.tgz",
|
||||
"integrity": "sha512-2X4BX8hUeB5JcA1TQJ7GjcgulXQ+5UkNb0DL8gHsHUHdFoiCTJoYLTpib3LtSDPZsRET5ygN4qqIWrHyYIKERA==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.58.1.tgz",
|
||||
"integrity": "sha512-gfQ8fk6cxhtptek+/8ZIqw8YrRW5048Gug8Ts5IYcMLCw18iUgrZAEY/D7s4hkI0FxEfGakKuPK/XUMPzPxi5g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/tsconfig-utils": "^8.47.0",
|
||||
"@typescript-eslint/types": "^8.47.0",
|
||||
"debug": "^4.3.4"
|
||||
"@typescript-eslint/tsconfig-utils": "^8.58.1",
|
||||
"@typescript-eslint/types": "^8.58.1",
|
||||
"debug": "^4.4.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -417,18 +399,17 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": ">=4.8.4 <6.0.0"
|
||||
"typescript": ">=4.8.4 <6.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.47.0.tgz",
|
||||
"integrity": "sha512-a0TTJk4HXMkfpFkL9/WaGTNuv7JWfFTQFJd6zS9dVAjKsojmv9HT55xzbEpnZoY+VUb+YXLMp+ihMLz/UlZfDg==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.58.1.tgz",
|
||||
"integrity": "sha512-TPYUEqJK6avLcEjumWsIuTpuYODTTDAtoMdt8ZZa93uWMTX13Nb8L5leSje1NluammvU+oI3QRr5lLXPgihX3w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.47.0",
|
||||
"@typescript-eslint/visitor-keys": "8.47.0"
|
||||
"@typescript-eslint/types": "8.58.1",
|
||||
"@typescript-eslint/visitor-keys": "8.58.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -439,11 +420,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/tsconfig-utils": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.47.0.tgz",
|
||||
"integrity": "sha512-ybUAvjy4ZCL11uryalkKxuT3w3sXJAuWhOoGS3T/Wu+iUu1tGJmk5ytSY8gbdACNARmcYEB0COksD2j6hfGK2g==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.58.1.tgz",
|
||||
"integrity": "sha512-JAr2hOIct2Q+qk3G+8YFfqkqi7sC86uNryT+2i5HzMa2MPjw4qNFvtjnw1IiA1rP7QhNKVe21mSSLaSjwA1Olw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
@@ -452,21 +432,20 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": ">=4.8.4 <6.0.0"
|
||||
"typescript": ">=4.8.4 <6.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.47.0.tgz",
|
||||
"integrity": "sha512-QC9RiCmZ2HmIdCEvhd1aJELBlD93ErziOXXlHEZyuBo3tBiAZieya0HLIxp+DoDWlsQqDawyKuNEhORyku+P8A==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.58.1.tgz",
|
||||
"integrity": "sha512-HUFxvTJVroT+0rXVJC7eD5zol6ID+Sn5npVPWoFuHGg9Ncq5Q4EYstqR+UOqaNRFXi5TYkpXXkLhoCHe3G0+7w==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.47.0",
|
||||
"@typescript-eslint/typescript-estree": "8.47.0",
|
||||
"@typescript-eslint/utils": "8.47.0",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^2.1.0"
|
||||
"@typescript-eslint/types": "8.58.1",
|
||||
"@typescript-eslint/typescript-estree": "8.58.1",
|
||||
"@typescript-eslint/utils": "8.58.1",
|
||||
"debug": "^4.4.3",
|
||||
"ts-api-utils": "^2.5.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -476,16 +455,15 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": "^8.57.0 || ^9.0.0",
|
||||
"typescript": ">=4.8.4 <6.0.0"
|
||||
"eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
|
||||
"typescript": ">=4.8.4 <6.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/ts-api-utils": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz",
|
||||
"integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==",
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz",
|
||||
"integrity": "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18.12"
|
||||
},
|
||||
@@ -494,11 +472,10 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/types": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.47.0.tgz",
|
||||
"integrity": "sha512-nHAE6bMKsizhA2uuYZbEbmp5z2UpffNrPEqiKIeN7VsV6UY/roxanWfoRrf6x/k9+Obf+GQdkm0nPU+vnMXo9A==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.58.1.tgz",
|
||||
"integrity": "sha512-io/dV5Aw5ezwzfPBBWLoT+5QfVtP8O7q4Kftjn5azJ88bYyp/ZMCsyW1lpKK46EXJcaYMZ1JtYj+s/7TdzmQMw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
@@ -508,22 +485,20 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.47.0.tgz",
|
||||
"integrity": "sha512-k6ti9UepJf5NpzCjH31hQNLHQWupTRPhZ+KFF8WtTuTpy7uHPfeg2NM7cP27aCGajoEplxJDFVCEm9TGPYyiVg==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.58.1.tgz",
|
||||
"integrity": "sha512-w4w7WR7GHOjqqPnvAYbazq+Y5oS68b9CzasGtnd6jIeOIeKUzYzupGTB2T4LTPSv4d+WPeccbxuneTFHYgAAWg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/project-service": "8.47.0",
|
||||
"@typescript-eslint/tsconfig-utils": "8.47.0",
|
||||
"@typescript-eslint/types": "8.47.0",
|
||||
"@typescript-eslint/visitor-keys": "8.47.0",
|
||||
"debug": "^4.3.4",
|
||||
"fast-glob": "^3.3.2",
|
||||
"is-glob": "^4.0.3",
|
||||
"minimatch": "^9.0.4",
|
||||
"semver": "^7.6.0",
|
||||
"ts-api-utils": "^2.1.0"
|
||||
"@typescript-eslint/project-service": "8.58.1",
|
||||
"@typescript-eslint/tsconfig-utils": "8.58.1",
|
||||
"@typescript-eslint/types": "8.58.1",
|
||||
"@typescript-eslint/visitor-keys": "8.58.1",
|
||||
"debug": "^4.4.3",
|
||||
"minimatch": "^10.2.2",
|
||||
"semver": "^7.7.3",
|
||||
"tinyglobby": "^0.2.15",
|
||||
"ts-api-utils": "^2.5.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -533,7 +508,7 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"typescript": ">=4.8.4 <6.0.0"
|
||||
"typescript": ">=4.8.4 <6.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree/node_modules/balanced-match": {
|
||||
@@ -546,9 +521,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz",
|
||||
"integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==",
|
||||
"version": "5.0.5",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
|
||||
"integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"balanced-match": "^4.0.2"
|
||||
@@ -558,26 +533,25 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": {
|
||||
"version": "9.0.7",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.7.tgz",
|
||||
"integrity": "sha512-MOwgjc8tfrpn5QQEvjijjmDVtMw2oL88ugTevzxQnzRLm6l3fVEF2gzU0kYeYYKD8C66+IdGX6peJ4MyUlUnPg==",
|
||||
"version": "10.2.5",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.5.tgz",
|
||||
"integrity": "sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"brace-expansion": "^5.0.2"
|
||||
"brace-expansion": "^5.0.5"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=16 || 14 >=14.17"
|
||||
"node": "18 || 20 || >=22"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree/node_modules/ts-api-utils": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz",
|
||||
"integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==",
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz",
|
||||
"integrity": "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18.12"
|
||||
},
|
||||
@@ -586,16 +560,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/utils": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.47.0.tgz",
|
||||
"integrity": "sha512-g7XrNf25iL4TJOiPqatNuaChyqt49a/onq5YsJ9+hXeugK+41LVg7AxikMfM02PC6jbNtZLCJj6AUcQXJS/jGQ==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.58.1.tgz",
|
||||
"integrity": "sha512-Ln8R0tmWC7pTtLOzgJzYTXSCjJ9rDNHAqTaVONF4FEi2qwce8mD9iSOxOpLFFvWp/wBFlew0mjM1L1ihYWfBdQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.7.0",
|
||||
"@typescript-eslint/scope-manager": "8.47.0",
|
||||
"@typescript-eslint/types": "8.47.0",
|
||||
"@typescript-eslint/typescript-estree": "8.47.0"
|
||||
"@eslint-community/eslint-utils": "^4.9.1",
|
||||
"@typescript-eslint/scope-manager": "8.58.1",
|
||||
"@typescript-eslint/types": "8.58.1",
|
||||
"@typescript-eslint/typescript-estree": "8.58.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -605,19 +578,18 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": "^8.57.0 || ^9.0.0",
|
||||
"typescript": ">=4.8.4 <6.0.0"
|
||||
"eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
|
||||
"typescript": ">=4.8.4 <6.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.47.0.tgz",
|
||||
"integrity": "sha512-SIV3/6eftCy1bNzCQoPmbWsRLujS8t5iDIZ4spZOBHqrM+yfX2ogg8Tt3PDTAVKw3sSCiUgg30uOAvK2r9zGjQ==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.58.1.tgz",
|
||||
"integrity": "sha512-y+vH7QE8ycjoa0bWciFg7OpFcipUuem1ujhrdLtq1gByKwfbC7bPeKsiny9e0urg93DqwGcHey+bGRKCnF1nZQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.47.0",
|
||||
"eslint-visitor-keys": "^4.2.1"
|
||||
"@typescript-eslint/types": "8.58.1",
|
||||
"eslint-visitor-keys": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -628,13 +600,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
|
||||
"integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz",
|
||||
"integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==",
|
||||
"dev": true,
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
"node": "^20.19.0 || ^22.13.0 || >=24"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/eslint"
|
||||
@@ -928,10 +899,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"license": "MIT",
|
||||
"version": "1.1.13",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz",
|
||||
"integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
@@ -1150,11 +1120,10 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/debug": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
|
||||
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
|
||||
"version": "4.4.3",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
|
||||
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"ms": "^2.1.3"
|
||||
},
|
||||
@@ -1801,9 +1770,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-github/node_modules/brace-expansion": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz",
|
||||
"integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==",
|
||||
"version": "5.0.5",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
|
||||
"integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"balanced-match": "^4.0.2"
|
||||
@@ -2217,9 +2186,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/flatted": {
|
||||
"version": "3.2.7",
|
||||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz",
|
||||
"integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==",
|
||||
"version": "3.4.2",
|
||||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.2.tgz",
|
||||
"integrity": "sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/for-each": {
|
||||
@@ -4019,9 +3988,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/semver": {
|
||||
"version": "7.6.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz",
|
||||
"integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==",
|
||||
"version": "7.7.4",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz",
|
||||
"integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==",
|
||||
"dev": true,
|
||||
"bin": {
|
||||
"semver": "bin/semver.js"
|
||||
@@ -4319,6 +4288,51 @@
|
||||
"integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/tinyglobby": {
|
||||
"version": "0.2.16",
|
||||
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.16.tgz",
|
||||
"integrity": "sha512-pn99VhoACYR8nFHhxqix+uvsbXineAasWm5ojXoN8xEwK5Kd3/TrhNn1wByuD52UxWRLy8pu+kRMniEi6Eq9Zg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"fdir": "^6.5.0",
|
||||
"picomatch": "^4.0.4"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/SuperchupuDev"
|
||||
}
|
||||
},
|
||||
"node_modules/tinyglobby/node_modules/fdir": {
|
||||
"version": "6.5.0",
|
||||
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
|
||||
"integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"picomatch": "^3 || ^4"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"picomatch": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/tinyglobby/node_modules/picomatch": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz",
|
||||
"integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/jonschlinkert"
|
||||
}
|
||||
},
|
||||
"node_modules/titleize": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/titleize/-/titleize-3.0.0.tgz",
|
||||
@@ -4756,9 +4770,9 @@
|
||||
}
|
||||
},
|
||||
"@eslint-community/regexpp": {
|
||||
"version": "4.10.0",
|
||||
"resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.0.tgz",
|
||||
"integrity": "sha512-Cu96Sd2By9mCNTx2iyKOmq10v22jUVQv0lQnlGNy16oE9589yE+QADPbrMGCkA51cKZSg3Pu/aTJVTGfL/qjUA==",
|
||||
"version": "4.12.2",
|
||||
"resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz",
|
||||
"integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==",
|
||||
"dev": true
|
||||
},
|
||||
"@eslint/eslintrc": {
|
||||
@@ -4867,12 +4881,6 @@
|
||||
"picomatch": "^4.0.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": {
|
||||
"version": "8.56.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.56.0.tgz",
|
||||
"integrity": "sha512-DBsLPs3GsWhX5HylbP9HNG15U0bnwut55Lx12bHB9MpXxQ+R5GC8MwQe+N1UFXxAeQDvEsEDY6ZYwX03K7Z6HQ==",
|
||||
"dev": true
|
||||
},
|
||||
"eslint-visitor-keys": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
|
||||
@@ -4914,20 +4922,19 @@
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/eslint-plugin": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.47.0.tgz",
|
||||
"integrity": "sha512-fe0rz9WJQ5t2iaLfdbDc9T80GJy0AeO453q8C3YCilnGozvOyCG5t+EZtg7j7D88+c3FipfP/x+wzGnh1xp8ZA==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.58.1.tgz",
|
||||
"integrity": "sha512-eSkwoemjo76bdXl2MYqtxg51HNwUSkWfODUOQ3PaTLZGh9uIWWFZIjyjaJnex7wXDu+TRx+ATsnSxdN9YWfRTQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@eslint-community/regexpp": "^4.10.0",
|
||||
"@typescript-eslint/scope-manager": "8.47.0",
|
||||
"@typescript-eslint/type-utils": "8.47.0",
|
||||
"@typescript-eslint/utils": "8.47.0",
|
||||
"@typescript-eslint/visitor-keys": "8.47.0",
|
||||
"graphemer": "^1.4.0",
|
||||
"ignore": "^7.0.0",
|
||||
"@eslint-community/regexpp": "^4.12.2",
|
||||
"@typescript-eslint/scope-manager": "8.58.1",
|
||||
"@typescript-eslint/type-utils": "8.58.1",
|
||||
"@typescript-eslint/utils": "8.58.1",
|
||||
"@typescript-eslint/visitor-keys": "8.58.1",
|
||||
"ignore": "^7.0.5",
|
||||
"natural-compare": "^1.4.0",
|
||||
"ts-api-utils": "^2.1.0"
|
||||
"ts-api-utils": "^2.5.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"ignore": {
|
||||
@@ -4937,99 +4944,98 @@
|
||||
"dev": true
|
||||
},
|
||||
"ts-api-utils": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz",
|
||||
"integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==",
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz",
|
||||
"integrity": "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==",
|
||||
"dev": true,
|
||||
"requires": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/parser": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.47.0.tgz",
|
||||
"integrity": "sha512-lJi3PfxVmo0AkEY93ecfN+r8SofEqZNGByvHAI3GBLrvt1Cw6H5k1IM02nSzu0RfUafr2EvFSw0wAsZgubNplQ==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.58.1.tgz",
|
||||
"integrity": "sha512-gGkiNMPqerb2cJSVcruigx9eHBlLG14fSdPdqMoOcBfh+vvn4iCq2C8MzUB89PrxOXk0y3GZ1yIWb9aOzL93bw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/scope-manager": "8.47.0",
|
||||
"@typescript-eslint/types": "8.47.0",
|
||||
"@typescript-eslint/typescript-estree": "8.47.0",
|
||||
"@typescript-eslint/visitor-keys": "8.47.0",
|
||||
"debug": "^4.3.4"
|
||||
"@typescript-eslint/scope-manager": "8.58.1",
|
||||
"@typescript-eslint/types": "8.58.1",
|
||||
"@typescript-eslint/typescript-estree": "8.58.1",
|
||||
"@typescript-eslint/visitor-keys": "8.58.1",
|
||||
"debug": "^4.4.3"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/project-service": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.47.0.tgz",
|
||||
"integrity": "sha512-2X4BX8hUeB5JcA1TQJ7GjcgulXQ+5UkNb0DL8gHsHUHdFoiCTJoYLTpib3LtSDPZsRET5ygN4qqIWrHyYIKERA==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.58.1.tgz",
|
||||
"integrity": "sha512-gfQ8fk6cxhtptek+/8ZIqw8YrRW5048Gug8Ts5IYcMLCw18iUgrZAEY/D7s4hkI0FxEfGakKuPK/XUMPzPxi5g==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/tsconfig-utils": "^8.47.0",
|
||||
"@typescript-eslint/types": "^8.47.0",
|
||||
"debug": "^4.3.4"
|
||||
"@typescript-eslint/tsconfig-utils": "^8.58.1",
|
||||
"@typescript-eslint/types": "^8.58.1",
|
||||
"debug": "^4.4.3"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/scope-manager": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.47.0.tgz",
|
||||
"integrity": "sha512-a0TTJk4HXMkfpFkL9/WaGTNuv7JWfFTQFJd6zS9dVAjKsojmv9HT55xzbEpnZoY+VUb+YXLMp+ihMLz/UlZfDg==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.58.1.tgz",
|
||||
"integrity": "sha512-TPYUEqJK6avLcEjumWsIuTpuYODTTDAtoMdt8ZZa93uWMTX13Nb8L5leSje1NluammvU+oI3QRr5lLXPgihX3w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "8.47.0",
|
||||
"@typescript-eslint/visitor-keys": "8.47.0"
|
||||
"@typescript-eslint/types": "8.58.1",
|
||||
"@typescript-eslint/visitor-keys": "8.58.1"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/tsconfig-utils": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.47.0.tgz",
|
||||
"integrity": "sha512-ybUAvjy4ZCL11uryalkKxuT3w3sXJAuWhOoGS3T/Wu+iUu1tGJmk5ytSY8gbdACNARmcYEB0COksD2j6hfGK2g==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.58.1.tgz",
|
||||
"integrity": "sha512-JAr2hOIct2Q+qk3G+8YFfqkqi7sC86uNryT+2i5HzMa2MPjw4qNFvtjnw1IiA1rP7QhNKVe21mSSLaSjwA1Olw==",
|
||||
"dev": true,
|
||||
"requires": {}
|
||||
},
|
||||
"@typescript-eslint/type-utils": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.47.0.tgz",
|
||||
"integrity": "sha512-QC9RiCmZ2HmIdCEvhd1aJELBlD93ErziOXXlHEZyuBo3tBiAZieya0HLIxp+DoDWlsQqDawyKuNEhORyku+P8A==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.58.1.tgz",
|
||||
"integrity": "sha512-HUFxvTJVroT+0rXVJC7eD5zol6ID+Sn5npVPWoFuHGg9Ncq5Q4EYstqR+UOqaNRFXi5TYkpXXkLhoCHe3G0+7w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "8.47.0",
|
||||
"@typescript-eslint/typescript-estree": "8.47.0",
|
||||
"@typescript-eslint/utils": "8.47.0",
|
||||
"debug": "^4.3.4",
|
||||
"ts-api-utils": "^2.1.0"
|
||||
"@typescript-eslint/types": "8.58.1",
|
||||
"@typescript-eslint/typescript-estree": "8.58.1",
|
||||
"@typescript-eslint/utils": "8.58.1",
|
||||
"debug": "^4.4.3",
|
||||
"ts-api-utils": "^2.5.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"ts-api-utils": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz",
|
||||
"integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==",
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz",
|
||||
"integrity": "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==",
|
||||
"dev": true,
|
||||
"requires": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/types": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.47.0.tgz",
|
||||
"integrity": "sha512-nHAE6bMKsizhA2uuYZbEbmp5z2UpffNrPEqiKIeN7VsV6UY/roxanWfoRrf6x/k9+Obf+GQdkm0nPU+vnMXo9A==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.58.1.tgz",
|
||||
"integrity": "sha512-io/dV5Aw5ezwzfPBBWLoT+5QfVtP8O7q4Kftjn5azJ88bYyp/ZMCsyW1lpKK46EXJcaYMZ1JtYj+s/7TdzmQMw==",
|
||||
"dev": true
|
||||
},
|
||||
"@typescript-eslint/typescript-estree": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.47.0.tgz",
|
||||
"integrity": "sha512-k6ti9UepJf5NpzCjH31hQNLHQWupTRPhZ+KFF8WtTuTpy7uHPfeg2NM7cP27aCGajoEplxJDFVCEm9TGPYyiVg==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.58.1.tgz",
|
||||
"integrity": "sha512-w4w7WR7GHOjqqPnvAYbazq+Y5oS68b9CzasGtnd6jIeOIeKUzYzupGTB2T4LTPSv4d+WPeccbxuneTFHYgAAWg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/project-service": "8.47.0",
|
||||
"@typescript-eslint/tsconfig-utils": "8.47.0",
|
||||
"@typescript-eslint/types": "8.47.0",
|
||||
"@typescript-eslint/visitor-keys": "8.47.0",
|
||||
"debug": "^4.3.4",
|
||||
"fast-glob": "^3.3.2",
|
||||
"is-glob": "^4.0.3",
|
||||
"minimatch": "^9.0.4",
|
||||
"semver": "^7.6.0",
|
||||
"ts-api-utils": "^2.1.0"
|
||||
"@typescript-eslint/project-service": "8.58.1",
|
||||
"@typescript-eslint/tsconfig-utils": "8.58.1",
|
||||
"@typescript-eslint/types": "8.58.1",
|
||||
"@typescript-eslint/visitor-keys": "8.58.1",
|
||||
"debug": "^4.4.3",
|
||||
"minimatch": "^10.2.2",
|
||||
"semver": "^7.7.3",
|
||||
"tinyglobby": "^0.2.15",
|
||||
"ts-api-utils": "^2.5.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"balanced-match": {
|
||||
@@ -5039,58 +5045,58 @@
|
||||
"dev": true
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz",
|
||||
"integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==",
|
||||
"version": "5.0.5",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
|
||||
"integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"balanced-match": "^4.0.2"
|
||||
}
|
||||
},
|
||||
"minimatch": {
|
||||
"version": "9.0.7",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.7.tgz",
|
||||
"integrity": "sha512-MOwgjc8tfrpn5QQEvjijjmDVtMw2oL88ugTevzxQnzRLm6l3fVEF2gzU0kYeYYKD8C66+IdGX6peJ4MyUlUnPg==",
|
||||
"version": "10.2.5",
|
||||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.5.tgz",
|
||||
"integrity": "sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"brace-expansion": "^5.0.2"
|
||||
"brace-expansion": "^5.0.5"
|
||||
}
|
||||
},
|
||||
"ts-api-utils": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz",
|
||||
"integrity": "sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ==",
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz",
|
||||
"integrity": "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==",
|
||||
"dev": true,
|
||||
"requires": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/utils": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.47.0.tgz",
|
||||
"integrity": "sha512-g7XrNf25iL4TJOiPqatNuaChyqt49a/onq5YsJ9+hXeugK+41LVg7AxikMfM02PC6jbNtZLCJj6AUcQXJS/jGQ==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.58.1.tgz",
|
||||
"integrity": "sha512-Ln8R0tmWC7pTtLOzgJzYTXSCjJ9rDNHAqTaVONF4FEi2qwce8mD9iSOxOpLFFvWp/wBFlew0mjM1L1ihYWfBdQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@eslint-community/eslint-utils": "^4.7.0",
|
||||
"@typescript-eslint/scope-manager": "8.47.0",
|
||||
"@typescript-eslint/types": "8.47.0",
|
||||
"@typescript-eslint/typescript-estree": "8.47.0"
|
||||
"@eslint-community/eslint-utils": "^4.9.1",
|
||||
"@typescript-eslint/scope-manager": "8.58.1",
|
||||
"@typescript-eslint/types": "8.58.1",
|
||||
"@typescript-eslint/typescript-estree": "8.58.1"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/visitor-keys": {
|
||||
"version": "8.47.0",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.47.0.tgz",
|
||||
"integrity": "sha512-SIV3/6eftCy1bNzCQoPmbWsRLujS8t5iDIZ4spZOBHqrM+yfX2ogg8Tt3PDTAVKw3sSCiUgg30uOAvK2r9zGjQ==",
|
||||
"version": "8.58.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.58.1.tgz",
|
||||
"integrity": "sha512-y+vH7QE8ycjoa0bWciFg7OpFcipUuem1ujhrdLtq1gByKwfbC7bPeKsiny9e0urg93DqwGcHey+bGRKCnF1nZQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "8.47.0",
|
||||
"eslint-visitor-keys": "^4.2.1"
|
||||
"@typescript-eslint/types": "8.58.1",
|
||||
"eslint-visitor-keys": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"eslint-visitor-keys": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
|
||||
"integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz",
|
||||
"integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
@@ -5299,9 +5305,9 @@
|
||||
}
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"version": "1.1.13",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz",
|
||||
"integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==",
|
||||
"requires": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
@@ -5438,9 +5444,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"debug": {
|
||||
"version": "4.4.0",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
|
||||
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
|
||||
"version": "4.4.3",
|
||||
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
|
||||
"integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"ms": "^2.1.3"
|
||||
@@ -5884,9 +5890,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz",
|
||||
"integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==",
|
||||
"version": "5.0.5",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
|
||||
"integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"balanced-match": "^4.0.2"
|
||||
@@ -6191,9 +6197,9 @@
|
||||
}
|
||||
},
|
||||
"flatted": {
|
||||
"version": "3.2.7",
|
||||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz",
|
||||
"integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==",
|
||||
"version": "3.4.2",
|
||||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.2.tgz",
|
||||
"integrity": "sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==",
|
||||
"dev": true
|
||||
},
|
||||
"for-each": {
|
||||
@@ -7389,9 +7395,9 @@
|
||||
}
|
||||
},
|
||||
"semver": {
|
||||
"version": "7.6.2",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz",
|
||||
"integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==",
|
||||
"version": "7.7.4",
|
||||
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz",
|
||||
"integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==",
|
||||
"dev": true
|
||||
},
|
||||
"shebang-command": {
|
||||
@@ -7587,6 +7593,31 @@
|
||||
"integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==",
|
||||
"dev": true
|
||||
},
|
||||
"tinyglobby": {
|
||||
"version": "0.2.16",
|
||||
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.16.tgz",
|
||||
"integrity": "sha512-pn99VhoACYR8nFHhxqix+uvsbXineAasWm5ojXoN8xEwK5Kd3/TrhNn1wByuD52UxWRLy8pu+kRMniEi6Eq9Zg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"fdir": "^6.5.0",
|
||||
"picomatch": "^4.0.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"fdir": {
|
||||
"version": "6.5.0",
|
||||
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
|
||||
"integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
|
||||
"dev": true,
|
||||
"requires": {}
|
||||
},
|
||||
"picomatch": {
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz",
|
||||
"integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"titleize": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/titleize/-/titleize-3.0.0.tgz",
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
"devDependencies": {
|
||||
"@stylistic/eslint-plugin": "^5.10.0",
|
||||
"@types/node": "^22.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.58.1",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"eslint": "^8.47.0",
|
||||
|
||||
@@ -6,8 +6,8 @@ NODE_URL=https://nodejs.org/dist
|
||||
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
|
||||
# When you update Node versions you must also create a new release of alpine_nodejs at that updated version.
|
||||
# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started
|
||||
NODE20_VERSION="20.20.1"
|
||||
NODE24_VERSION="24.14.0"
|
||||
NODE20_VERSION="20.20.2"
|
||||
NODE24_VERSION="24.14.1"
|
||||
|
||||
get_abs_path() {
|
||||
# exploits the fact that pwd will print abs path when no args
|
||||
|
||||
@@ -10,6 +10,13 @@ if %ERRORLEVEL% EQU 0 (
|
||||
exit /b 0
|
||||
)
|
||||
|
||||
if "%ACTIONS_RUNNER_RETURN_VERSION_DEPRECATED_EXIT_CODE%"=="1" (
|
||||
if %ERRORLEVEL% EQU 7 (
|
||||
echo "Runner listener exit with deprecated version error code: %ERRORLEVEL%."
|
||||
exit /b %ERRORLEVEL%
|
||||
)
|
||||
)
|
||||
|
||||
if %ERRORLEVEL% EQU 1 (
|
||||
echo "Runner listener exit with terminated error, stop the service, no retry needed."
|
||||
exit /b 0
|
||||
|
||||
@@ -34,11 +34,13 @@ fi
|
||||
|
||||
updateFile="update.finished"
|
||||
"$DIR"/bin/Runner.Listener run $*
|
||||
|
||||
returnCode=$?
|
||||
if [[ $returnCode == 0 ]]; then
|
||||
echo "Runner listener exit with 0 return code, stop the service, no retry needed."
|
||||
exit 0
|
||||
elif [[ "$ACTIONS_RUNNER_RETURN_VERSION_DEPRECATED_EXIT_CODE" == "1" && $returnCode -eq 7 ]]; then
|
||||
echo "Runner listener exit with deprecated version exit code: ${returnCode}."
|
||||
exit "$returnCode"
|
||||
elif [[ $returnCode == 1 ]]; then
|
||||
echo "Runner listener exit with terminated error, stop the service, no retry needed."
|
||||
exit 0
|
||||
|
||||
@@ -25,7 +25,14 @@ call "%~dp0run-helper.cmd" %*
|
||||
if %ERRORLEVEL% EQU 1 (
|
||||
echo "Restarting runner..."
|
||||
goto :launch_helper
|
||||
) else (
|
||||
echo "Exiting runner..."
|
||||
exit /b 0
|
||||
)
|
||||
|
||||
if "%ACTIONS_RUNNER_RETURN_VERSION_DEPRECATED_EXIT_CODE%"=="1" (
|
||||
if %ERRORLEVEL% EQU 7 (
|
||||
echo "Exiting runner with deprecated version error code: %ERRORLEVEL%"
|
||||
exit /b %ERRORLEVEL%
|
||||
)
|
||||
)
|
||||
|
||||
echo "Exiting runner..."
|
||||
exit /b 0
|
||||
|
||||
@@ -19,6 +19,9 @@ run() {
|
||||
returnCode=$?
|
||||
if [[ $returnCode -eq 2 ]]; then
|
||||
echo "Restarting runner..."
|
||||
elif [[ "$ACTIONS_RUNNER_RETURN_VERSION_DEPRECATED_EXIT_CODE" == "1" && $returnCode -eq 7 ]]; then
|
||||
echo "Exiting runner..."
|
||||
exit "$returnCode"
|
||||
else
|
||||
echo "Exiting runner..."
|
||||
exit 0
|
||||
@@ -42,6 +45,9 @@ runWithManualTrap() {
|
||||
returnCode=$?
|
||||
if [[ $returnCode -eq 2 ]]; then
|
||||
echo "Restarting runner..."
|
||||
elif [[ "$ACTIONS_RUNNER_RETURN_VERSION_DEPRECATED_EXIT_CODE" == "1" && $returnCode -eq 7 ]]; then
|
||||
echo "Exiting runner..."
|
||||
exit "$returnCode"
|
||||
else
|
||||
echo "Exiting runner..."
|
||||
# Unregister signal handling before exit
|
||||
|
||||
@@ -159,6 +159,7 @@ namespace GitHub.Runner.Common
|
||||
// and the runner should be restarted. This is a temporary code and will be removed in the future after
|
||||
// the runner is migrated to runner admin.
|
||||
public const int RunnerConfigurationRefreshed = 6;
|
||||
public const int RunnerVersionDeprecated = 7;
|
||||
}
|
||||
|
||||
public static class Features
|
||||
@@ -176,6 +177,8 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string SetOrchestrationIdEnvForActions = "actions_set_orchestration_id_env_for_actions";
|
||||
public static readonly string SendJobLevelAnnotations = "actions_send_job_level_annotations";
|
||||
public static readonly string EmitCompositeMarkers = "actions_runner_emit_composite_markers";
|
||||
public static readonly string BatchActionResolution = "actions_batch_action_resolution";
|
||||
public static readonly string UseBearerTokenForCodeload = "actions_use_bearer_token_for_codeload";
|
||||
}
|
||||
|
||||
// Node version migration related constants
|
||||
@@ -194,8 +197,22 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string RequireNode24Flag = "actions.runner.requirenode24";
|
||||
public static readonly string WarnOnNode20Flag = "actions.runner.warnonnode20";
|
||||
|
||||
// Feature flags for Linux ARM32 deprecation
|
||||
public static readonly string DeprecateLinuxArm32Flag = "actions_runner_deprecate_linux_arm32";
|
||||
public static readonly string KillLinuxArm32Flag = "actions_runner_kill_linux_arm32";
|
||||
|
||||
// Blog post URL for Node 20 deprecation
|
||||
public static readonly string Node20DeprecationUrl = "https://github.blog/changelog/2025-09-19-deprecation-of-node-20-on-github-actions-runners/";
|
||||
|
||||
// Node 20 migration dates (hardcoded fallbacks, can be overridden via job variables)
|
||||
public static readonly string Node24DefaultDate = "June 2nd, 2026";
|
||||
public static readonly string Node20RemovalDate = "September 16th, 2026";
|
||||
|
||||
// Variable keys for server-overridable dates
|
||||
public static readonly string Node24DefaultDateVariable = "actions_runner_node24_default_date";
|
||||
public static readonly string Node20RemovalDateVariable = "actions_runner_node20_removal_date";
|
||||
|
||||
public static readonly string LinuxArm32DeprecationMessage = "Linux ARM32 runners are deprecated and will no longer be supported after {0}. Please migrate to a supported platform.";
|
||||
}
|
||||
|
||||
public static readonly string InternalTelemetryIssueDataKey = "_internal_telemetry";
|
||||
@@ -277,6 +294,7 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string AllowUnsupportedCommands = "ACTIONS_ALLOW_UNSECURE_COMMANDS";
|
||||
public static readonly string AllowUnsupportedStopCommandTokens = "ACTIONS_ALLOW_UNSECURE_STOPCOMMAND_TOKENS";
|
||||
public static readonly string RequireJobContainer = "ACTIONS_RUNNER_REQUIRE_JOB_CONTAINER";
|
||||
public static readonly string ReturnVersionDeprecatedExitCode = "ACTIONS_RUNNER_RETURN_VERSION_DEPRECATED_EXIT_CODE";
|
||||
public static readonly string RunnerDebug = "ACTIONS_RUNNER_DEBUG";
|
||||
public static readonly string StepDebug = "ACTIONS_STEP_DEBUG";
|
||||
}
|
||||
|
||||
@@ -12,6 +12,13 @@ namespace GitHub.Runner.Common
|
||||
private ISecretMasker _secretMasker;
|
||||
private TraceSource _traceSource;
|
||||
|
||||
/// <summary>
|
||||
/// The underlying <see cref="System.Diagnostics.TraceSource"/> for this instance.
|
||||
/// Useful when third-party libraries require a <see cref="System.Diagnostics.TraceSource"/>
|
||||
/// to route their diagnostics into the runner's log infrastructure.
|
||||
/// </summary>
|
||||
public TraceSource Source => _traceSource;
|
||||
|
||||
public Tracing(string name, ISecretMasker secretMasker, SourceSwitch sourceSwitch, HostTraceListener traceListener, StdoutTraceListener stdoutTraceListener = null)
|
||||
{
|
||||
ArgUtil.NotNull(secretMasker, nameof(secretMasker));
|
||||
|
||||
@@ -58,7 +58,7 @@ namespace GitHub.Runner.Common.Util
|
||||
{
|
||||
return (Constants.Runner.NodeMigration.Node24, null);
|
||||
}
|
||||
|
||||
|
||||
// Get environment variable details with source information
|
||||
var forceNode24Details = GetEnvironmentVariableDetails(
|
||||
Constants.Runner.NodeMigration.ForceNode24Variable, workflowEnvironment);
|
||||
@@ -108,14 +108,50 @@ namespace GitHub.Runner.Common.Util
|
||||
|
||||
/// <summary>
|
||||
/// Checks if Node24 is requested but running on ARM32 Linux, and determines if fallback is needed.
|
||||
/// Also handles ARM32 deprecation and kill switch phases.
|
||||
/// </summary>
|
||||
/// <param name="preferredVersion">The preferred Node version</param>
|
||||
/// <param name="deprecateArm32">Feature flag indicating ARM32 Linux is deprecated</param>
|
||||
/// <param name="killArm32">Feature flag indicating ARM32 Linux should no longer work</param>
|
||||
/// <returns>A tuple containing the adjusted node version and an optional warning message</returns>
|
||||
public static (string nodeVersion, string warningMessage) CheckNodeVersionForLinuxArm32(string preferredVersion)
|
||||
public static (string nodeVersion, string warningMessage) CheckNodeVersionForLinuxArm32(
|
||||
string preferredVersion,
|
||||
bool deprecateArm32 = false,
|
||||
bool killArm32 = false,
|
||||
string node20RemovalDate = null)
|
||||
{
|
||||
if (string.Equals(preferredVersion, Constants.Runner.NodeMigration.Node24, StringComparison.OrdinalIgnoreCase) &&
|
||||
Constants.Runner.PlatformArchitecture.Equals(Constants.Architecture.Arm) &&
|
||||
Constants.Runner.Platform.Equals(Constants.OSPlatform.Linux))
|
||||
bool isArm32Linux = Constants.Runner.PlatformArchitecture.Equals(Constants.Architecture.Arm) &&
|
||||
Constants.Runner.Platform.Equals(Constants.OSPlatform.Linux);
|
||||
|
||||
if (!isArm32Linux)
|
||||
{
|
||||
return (preferredVersion, null);
|
||||
}
|
||||
|
||||
// ARM32 kill switch: runner should no longer work on this platform
|
||||
if (killArm32)
|
||||
{
|
||||
return (null, "Linux ARM32 runners are no longer supported. Please migrate to a supported platform.");
|
||||
}
|
||||
|
||||
// ARM32 deprecation warning: continue using node20 but warn about upcoming end of support
|
||||
if (deprecateArm32)
|
||||
{
|
||||
string effectiveDate = string.IsNullOrEmpty(node20RemovalDate) ? Constants.Runner.NodeMigration.Node20RemovalDate : node20RemovalDate;
|
||||
string deprecationWarning = string.Format(
|
||||
Constants.Runner.NodeMigration.LinuxArm32DeprecationMessage,
|
||||
effectiveDate);
|
||||
|
||||
if (string.Equals(preferredVersion, Constants.Runner.NodeMigration.Node24, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return (Constants.Runner.NodeMigration.Node20, deprecationWarning);
|
||||
}
|
||||
|
||||
return (preferredVersion, deprecationWarning);
|
||||
}
|
||||
|
||||
// Legacy behavior: fall back to node20 if node24 was requested on ARM32
|
||||
if (string.Equals(preferredVersion, Constants.Runner.NodeMigration.Node24, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return (Constants.Runner.NodeMigration.Node20, "Node 24 is not supported on Linux ARM32 platforms. Falling back to Node 20.");
|
||||
}
|
||||
|
||||
@@ -141,9 +141,9 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
catch (AccessDeniedException e) when (e.ErrorCode == 1)
|
||||
{
|
||||
terminal.WriteError($"An error occured: {e.Message}");
|
||||
terminal.WriteError($"An error occurred: {e.Message}");
|
||||
trace.Error(e);
|
||||
return Constants.Runner.ReturnCode.TerminatedError;
|
||||
return GetRunnerVersionDeprecatedExitCode();
|
||||
}
|
||||
catch (RunnerNotFoundException e)
|
||||
{
|
||||
@@ -159,6 +159,16 @@ namespace GitHub.Runner.Listener
|
||||
}
|
||||
}
|
||||
|
||||
private static int GetRunnerVersionDeprecatedExitCode()
|
||||
{
|
||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable(Constants.Variables.Actions.ReturnVersionDeprecatedExitCode)))
|
||||
{
|
||||
return Constants.Runner.ReturnCode.RunnerVersionDeprecated;
|
||||
}
|
||||
|
||||
return Constants.Runner.ReturnCode.TerminatedError;
|
||||
}
|
||||
|
||||
private static void LoadAndSetEnv()
|
||||
{
|
||||
var binDir = Path.GetDirectoryName(Assembly.GetEntryAssembly().Location);
|
||||
|
||||
@@ -79,6 +79,13 @@ namespace GitHub.Runner.Worker
|
||||
PreStepTracker = new Dictionary<Guid, IActionRunner>()
|
||||
};
|
||||
var containerSetupSteps = new List<JobExtensionRunner>();
|
||||
var batchActionResolution = (executionContext.Global.Variables.GetBoolean(Constants.Runner.Features.BatchActionResolution) ?? false)
|
||||
|| StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION"));
|
||||
// Stack-local cache: same action (owner/repo@ref) is resolved only once,
|
||||
// even if it appears at multiple depths in a composite tree.
|
||||
var resolvedDownloadInfos = batchActionResolution
|
||||
? new Dictionary<string, WebApi.ActionDownloadInfo>(StringComparer.Ordinal)
|
||||
: null;
|
||||
var depth = 0;
|
||||
// We are running at the start of a job
|
||||
if (rootStepId == default(Guid))
|
||||
@@ -105,7 +112,9 @@ namespace GitHub.Runner.Worker
|
||||
PrepareActionsState result = new PrepareActionsState();
|
||||
try
|
||||
{
|
||||
result = await PrepareActionsRecursiveAsync(executionContext, state, actions, depth, rootStepId);
|
||||
result = batchActionResolution
|
||||
? await PrepareActionsRecursiveAsync(executionContext, state, actions, resolvedDownloadInfos, depth, rootStepId)
|
||||
: await PrepareActionsRecursiveLegacyAsync(executionContext, state, actions, depth, rootStepId);
|
||||
}
|
||||
catch (FailedToResolveActionDownloadInfoException ex)
|
||||
{
|
||||
@@ -115,6 +124,14 @@ namespace GitHub.Runner.Worker
|
||||
executionContext.Result = TaskResult.Failed;
|
||||
throw;
|
||||
}
|
||||
catch (FailedToDownloadActionException ex)
|
||||
{
|
||||
// Log the error and fail the PrepareActionsAsync Initialization.
|
||||
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
|
||||
executionContext.InfrastructureError(ex.InnerException?.Message ?? ex.Message, category: "error_download_action");
|
||||
executionContext.Result = TaskResult.Failed;
|
||||
throw;
|
||||
}
|
||||
catch (InvalidActionArchiveException ex)
|
||||
{
|
||||
// Log the error and fail the PrepareActionsAsync Initialization.
|
||||
@@ -161,7 +178,192 @@ namespace GitHub.Runner.Worker
|
||||
return new PrepareResult(containerSetupSteps, result.PreStepTracker);
|
||||
}
|
||||
|
||||
private async Task<PrepareActionsState> PrepareActionsRecursiveAsync(IExecutionContext executionContext, PrepareActionsState state, IEnumerable<Pipelines.ActionStep> actions, Int32 depth = 0, Guid parentStepId = default(Guid))
|
||||
private async Task<PrepareActionsState> PrepareActionsRecursiveAsync(IExecutionContext executionContext, PrepareActionsState state, IEnumerable<Pipelines.ActionStep> actions, Dictionary<string, WebApi.ActionDownloadInfo> resolvedDownloadInfos, Int32 depth = 0, Guid parentStepId = default(Guid))
|
||||
{
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
if (depth > Constants.CompositeActionsMaxDepth)
|
||||
{
|
||||
throw new Exception($"Composite action depth exceeded max depth {Constants.CompositeActionsMaxDepth}");
|
||||
}
|
||||
|
||||
var repositoryActions = new List<Pipelines.ActionStep>();
|
||||
|
||||
foreach (var action in actions)
|
||||
{
|
||||
if (action.Reference.Type == Pipelines.ActionSourceType.ContainerRegistry)
|
||||
{
|
||||
ArgUtil.NotNull(action, nameof(action));
|
||||
var containerReference = action.Reference as Pipelines.ContainerRegistryReference;
|
||||
ArgUtil.NotNull(containerReference, nameof(containerReference));
|
||||
ArgUtil.NotNullOrEmpty(containerReference.Image, nameof(containerReference.Image));
|
||||
|
||||
if (!state.ImagesToPull.ContainsKey(containerReference.Image))
|
||||
{
|
||||
state.ImagesToPull[containerReference.Image] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'");
|
||||
state.ImagesToPull[containerReference.Image].Add(action.Id);
|
||||
}
|
||||
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository)
|
||||
{
|
||||
repositoryActions.Add(action);
|
||||
}
|
||||
}
|
||||
|
||||
if (repositoryActions.Count > 0)
|
||||
{
|
||||
// Resolve download info, skipping any actions already cached.
|
||||
await ResolveNewActionsAsync(executionContext, repositoryActions, resolvedDownloadInfos);
|
||||
|
||||
// Download each action.
|
||||
foreach (var action in repositoryActions)
|
||||
{
|
||||
var lookupKey = GetDownloadInfoLookupKey(action);
|
||||
if (string.IsNullOrEmpty(lookupKey))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if (!resolvedDownloadInfos.TryGetValue(lookupKey, out var downloadInfo))
|
||||
{
|
||||
throw new Exception($"Missing download info for {lookupKey}");
|
||||
}
|
||||
await DownloadRepositoryActionAsync(executionContext, downloadInfo);
|
||||
}
|
||||
|
||||
// Parse action.yml and collect composite sub-actions for batched
|
||||
// resolution below. Pre/post step registration is deferred until
|
||||
// after recursion so that HasPre/HasPost reflect the full subtree.
|
||||
var nextLevel = new List<(Pipelines.ActionStep action, Guid parentId)>();
|
||||
|
||||
foreach (var action in repositoryActions)
|
||||
{
|
||||
var setupInfo = PrepareRepositoryActionAsync(executionContext, action);
|
||||
if (setupInfo != null && setupInfo.Container != null)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(setupInfo.Container.Image))
|
||||
{
|
||||
if (!state.ImagesToPull.ContainsKey(setupInfo.Container.Image))
|
||||
{
|
||||
state.ImagesToPull[setupInfo.Container.Image] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.Container.ActionRepository}' needs to pull image '{setupInfo.Container.Image}'");
|
||||
state.ImagesToPull[setupInfo.Container.Image].Add(action.Id);
|
||||
}
|
||||
else
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(setupInfo.Container.ActionRepository, nameof(setupInfo.Container.ActionRepository));
|
||||
|
||||
if (!state.ImagesToBuild.ContainsKey(setupInfo.Container.ActionRepository))
|
||||
{
|
||||
state.ImagesToBuild[setupInfo.Container.ActionRepository] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.Container.ActionRepository}' needs to build image '{setupInfo.Container.Dockerfile}'");
|
||||
state.ImagesToBuild[setupInfo.Container.ActionRepository].Add(action.Id);
|
||||
state.ImagesToBuildInfo[setupInfo.Container.ActionRepository] = setupInfo.Container;
|
||||
}
|
||||
}
|
||||
else if (setupInfo != null && setupInfo.Steps != null && setupInfo.Steps.Count > 0)
|
||||
{
|
||||
foreach (var step in setupInfo.Steps)
|
||||
{
|
||||
nextLevel.Add((step, action.Id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve all next-level sub-actions in one batch API call,
|
||||
// then recurse per parent (which hits the cache, not the API).
|
||||
if (nextLevel.Count > 0)
|
||||
{
|
||||
var nextLevelRepoActions = nextLevel
|
||||
.Where(x => x.action.Reference.Type == Pipelines.ActionSourceType.Repository)
|
||||
.Select(x => x.action)
|
||||
.ToList();
|
||||
await ResolveNewActionsAsync(executionContext, nextLevelRepoActions, resolvedDownloadInfos);
|
||||
|
||||
foreach (var group in nextLevel.GroupBy(x => x.parentId))
|
||||
{
|
||||
var groupActions = group.Select(x => x.action).ToList();
|
||||
state = await PrepareActionsRecursiveAsync(executionContext, state, groupActions, resolvedDownloadInfos, depth + 1, group.Key);
|
||||
}
|
||||
}
|
||||
|
||||
// Register pre/post steps after recursion so that HasPre/HasPost
|
||||
// are correct (they depend on _cachedEmbeddedPreSteps/PostSteps
|
||||
// being populated by the recursive calls above).
|
||||
foreach (var action in repositoryActions)
|
||||
{
|
||||
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
|
||||
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
|
||||
{
|
||||
var definition = LoadAction(executionContext, action);
|
||||
if (definition.Data.Execution.HasPre)
|
||||
{
|
||||
Trace.Info($"Add 'pre' execution for {action.Id}");
|
||||
// Root Step
|
||||
if (depth < 1)
|
||||
{
|
||||
var actionRunner = HostContext.CreateService<IActionRunner>();
|
||||
actionRunner.Action = action;
|
||||
actionRunner.Stage = ActionRunStage.Pre;
|
||||
actionRunner.Condition = definition.Data.Execution.InitCondition;
|
||||
state.PreStepTracker[action.Id] = actionRunner;
|
||||
}
|
||||
// Embedded Step
|
||||
else
|
||||
{
|
||||
if (!_cachedEmbeddedPreSteps.ContainsKey(parentStepId))
|
||||
{
|
||||
_cachedEmbeddedPreSteps[parentStepId] = new List<Pipelines.ActionStep>();
|
||||
}
|
||||
// Clone action so we can modify the condition without affecting the original
|
||||
var clonedAction = action.Clone() as Pipelines.ActionStep;
|
||||
clonedAction.Condition = definition.Data.Execution.InitCondition;
|
||||
_cachedEmbeddedPreSteps[parentStepId].Add(clonedAction);
|
||||
}
|
||||
}
|
||||
|
||||
if (definition.Data.Execution.HasPost && depth > 0)
|
||||
{
|
||||
if (!_cachedEmbeddedPostSteps.ContainsKey(parentStepId))
|
||||
{
|
||||
// If we haven't done so already, add the parent to the post steps
|
||||
_cachedEmbeddedPostSteps[parentStepId] = new Stack<Pipelines.ActionStep>();
|
||||
}
|
||||
// Clone action so we can modify the condition without affecting the original
|
||||
var clonedAction = action.Clone() as Pipelines.ActionStep;
|
||||
clonedAction.Condition = definition.Data.Execution.CleanupCondition;
|
||||
_cachedEmbeddedPostSteps[parentStepId].Push(clonedAction);
|
||||
}
|
||||
}
|
||||
else if (depth > 0)
|
||||
{
|
||||
// if we're in a composite action and haven't loaded the local action yet
|
||||
// we assume it has a post step
|
||||
if (!_cachedEmbeddedPostSteps.ContainsKey(parentStepId))
|
||||
{
|
||||
// If we haven't done so already, add the parent to the post steps
|
||||
_cachedEmbeddedPostSteps[parentStepId] = new Stack<Pipelines.ActionStep>();
|
||||
}
|
||||
// Clone action so we can modify the condition without affecting the original
|
||||
var clonedAction = action.Clone() as Pipelines.ActionStep;
|
||||
_cachedEmbeddedPostSteps[parentStepId].Push(clonedAction);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Legacy (non-batched) action resolution. Each composite resolves its
|
||||
/// sub-actions individually, with no cross-depth deduplication.
|
||||
/// Used when the BatchActionResolution feature flag is disabled.
|
||||
/// </summary>
|
||||
private async Task<PrepareActionsState> PrepareActionsRecursiveLegacyAsync(IExecutionContext executionContext, PrepareActionsState state, IEnumerable<Pipelines.ActionStep> actions, Int32 depth = 0, Guid parentStepId = default(Guid))
|
||||
{
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
if (depth > Constants.CompositeActionsMaxDepth)
|
||||
@@ -247,7 +449,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else if (setupInfo != null && setupInfo.Steps != null && setupInfo.Steps.Count > 0)
|
||||
{
|
||||
state = await PrepareActionsRecursiveAsync(executionContext, state, setupInfo.Steps, depth + 1, action.Id);
|
||||
state = await PrepareActionsRecursiveLegacyAsync(executionContext, state, setupInfo.Steps, depth + 1, action.Id);
|
||||
}
|
||||
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
|
||||
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
|
||||
@@ -754,6 +956,33 @@ namespace GitHub.Runner.Worker
|
||||
return actionDownloadInfos.Actions;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Only resolves actions not already in resolvedDownloadInfos.
|
||||
/// Results are cached for reuse at deeper recursion levels.
|
||||
/// </summary>
|
||||
private async Task ResolveNewActionsAsync(IExecutionContext executionContext, List<Pipelines.ActionStep> actions, Dictionary<string, WebApi.ActionDownloadInfo> resolvedDownloadInfos)
|
||||
{
|
||||
var actionsToResolve = new List<Pipelines.ActionStep>();
|
||||
var pendingKeys = new HashSet<string>(StringComparer.Ordinal);
|
||||
foreach (var action in actions)
|
||||
{
|
||||
var lookupKey = GetDownloadInfoLookupKey(action);
|
||||
if (!string.IsNullOrEmpty(lookupKey) && !resolvedDownloadInfos.ContainsKey(lookupKey) && pendingKeys.Add(lookupKey))
|
||||
{
|
||||
actionsToResolve.Add(action);
|
||||
}
|
||||
}
|
||||
|
||||
if (actionsToResolve.Count > 0)
|
||||
{
|
||||
var downloadInfos = await GetDownloadInfoAsync(executionContext, actionsToResolve);
|
||||
foreach (var kvp in downloadInfos)
|
||||
{
|
||||
resolvedDownloadInfos[kvp.Key] = kvp.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo)
|
||||
{
|
||||
Trace.Entering();
|
||||
@@ -1138,16 +1367,29 @@ namespace GitHub.Runner.Worker
|
||||
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
|
||||
}
|
||||
|
||||
private AuthenticationHeaderValue CreateAuthHeader(string token)
|
||||
private AuthenticationHeaderValue CreateAuthHeader(IExecutionContext executionContext, string downloadUrl, string token)
|
||||
{
|
||||
if (string.IsNullOrEmpty(token))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
if (executionContext.Global.Variables.GetBoolean(Constants.Runner.Features.UseBearerTokenForCodeload) == true &&
|
||||
Uri.TryCreate(downloadUrl, UriKind.Absolute, out var parsedUrl) &&
|
||||
!string.IsNullOrEmpty(parsedUrl?.Host) &&
|
||||
!string.IsNullOrEmpty(parsedUrl?.PathAndQuery) &&
|
||||
(parsedUrl.Host.StartsWith("codeload.", StringComparison.OrdinalIgnoreCase) || parsedUrl.PathAndQuery.StartsWith("/_codeload/", StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
Trace.Info("Using Bearer token for action archive download directly to codeload.");
|
||||
return new AuthenticationHeaderValue("Bearer", token);
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info("Using Basic token for action archive download.");
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task DownloadRepositoryArchive(IExecutionContext executionContext, string downloadUrl, string downloadAuthToken, string archiveFile)
|
||||
@@ -1157,93 +1399,102 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
// Allow up to 20 * 60s for any action to be downloaded from github graph.
|
||||
int timeoutSeconds = 20 * 60;
|
||||
while (retryCount < 3)
|
||||
try
|
||||
{
|
||||
string requestId = string.Empty;
|
||||
using (var actionDownloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
|
||||
using (var actionDownloadCancellation = CancellationTokenSource.CreateLinkedTokenSource(actionDownloadTimeout.Token, executionContext.CancellationToken))
|
||||
while (retryCount < 3)
|
||||
{
|
||||
try
|
||||
string requestId = string.Empty;
|
||||
using (var actionDownloadTimeout = new CancellationTokenSource(TimeSpan.FromSeconds(timeoutSeconds)))
|
||||
using (var actionDownloadCancellation = CancellationTokenSource.CreateLinkedTokenSource(actionDownloadTimeout.Token, executionContext.CancellationToken))
|
||||
{
|
||||
//open zip stream in async mode
|
||||
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
try
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadAuthToken);
|
||||
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
using (var response = await httpClient.GetAsync(downloadUrl))
|
||||
//open zip stream in async mode
|
||||
using (FileStream fs = new(archiveFile, FileMode.Create, FileAccess.Write, FileShare.None, bufferSize: _defaultFileStreamBufferSize, useAsync: true))
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
requestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||
if (!string.IsNullOrEmpty(requestId))
|
||||
{
|
||||
Trace.Info($"Request URL: {downloadUrl} X-GitHub-Request-Id: {requestId} Http Status: {response.StatusCode}");
|
||||
}
|
||||
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(executionContext, downloadUrl, downloadAuthToken);
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
using (var response = await httpClient.GetAsync(downloadUrl))
|
||||
{
|
||||
using (var result = await response.Content.ReadAsStreamAsync())
|
||||
requestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||
if (!string.IsNullOrEmpty(requestId))
|
||||
{
|
||||
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
|
||||
await fs.FlushAsync(actionDownloadCancellation.Token);
|
||||
|
||||
// download succeed, break out the retry loop.
|
||||
break;
|
||||
Trace.Info($"Request URL: {downloadUrl} X-GitHub-Request-Id: {requestId} Http Status: {response.StatusCode}");
|
||||
}
|
||||
|
||||
if (response.IsSuccessStatusCode)
|
||||
{
|
||||
using (var result = await response.Content.ReadAsStreamAsync())
|
||||
{
|
||||
await result.CopyToAsync(fs, _defaultCopyBufferSize, actionDownloadCancellation.Token);
|
||||
await fs.FlushAsync(actionDownloadCancellation.Token);
|
||||
|
||||
// download succeed, break out the retry loop.
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
// It doesn't make sense to retry in this case, so just stop
|
||||
throw new ActionNotFoundException(new Uri(downloadUrl), requestId);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Something else bad happened, let's go to our retry logic
|
||||
response.EnsureSuccessStatusCode();
|
||||
}
|
||||
}
|
||||
else if (response.StatusCode == HttpStatusCode.NotFound)
|
||||
{
|
||||
// It doesn't make sense to retry in this case, so just stop
|
||||
throw new ActionNotFoundException(new Uri(downloadUrl), requestId);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Something else bad happened, let's go to our retry logic
|
||||
response.EnsureSuccessStatusCode();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info("Action download has been cancelled.");
|
||||
throw;
|
||||
}
|
||||
catch (OperationCanceledException ex) when (!executionContext.CancellationToken.IsCancellationRequested && retryCount >= 2)
|
||||
{
|
||||
Trace.Info($"Action download final retry timeout after {timeoutSeconds} seconds.");
|
||||
throw new TimeoutException($"Action '{downloadUrl}' download has timed out. Error: {ex.Message} {requestId}");
|
||||
}
|
||||
catch (ActionNotFoundException)
|
||||
{
|
||||
Trace.Info($"The action at '{downloadUrl}' does not exist");
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex) when (retryCount < 2)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Fail to download archive '{downloadUrl}' -- Attempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
if (actionDownloadTimeout.Token.IsCancellationRequested)
|
||||
catch (OperationCanceledException) when (executionContext.CancellationToken.IsCancellationRequested)
|
||||
{
|
||||
// action download didn't finish within timeout
|
||||
executionContext.Warning($"Action '{downloadUrl}' didn't finish download within {timeoutSeconds} seconds. {requestId}");
|
||||
Trace.Info("Action download has been cancelled.");
|
||||
throw;
|
||||
}
|
||||
else
|
||||
catch (OperationCanceledException ex) when (!executionContext.CancellationToken.IsCancellationRequested && retryCount >= 2)
|
||||
{
|
||||
executionContext.Warning($"Failed to download action '{downloadUrl}'. Error: {ex.Message} {requestId}");
|
||||
Trace.Info($"Action download final retry timeout after {timeoutSeconds} seconds.");
|
||||
throw new TimeoutException($"Action '{downloadUrl}' download has timed out. Error: {ex.Message} {requestId}");
|
||||
}
|
||||
catch (ActionNotFoundException)
|
||||
{
|
||||
Trace.Info($"The action at '{downloadUrl}' does not exist");
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex) when (retryCount < 2)
|
||||
{
|
||||
retryCount++;
|
||||
Trace.Error($"Fail to download archive '{downloadUrl}' -- Attempt: {retryCount}");
|
||||
Trace.Error(ex);
|
||||
if (actionDownloadTimeout.Token.IsCancellationRequested)
|
||||
{
|
||||
// action download didn't finish within timeout
|
||||
executionContext.Warning($"Action '{downloadUrl}' didn't finish download within {timeoutSeconds} seconds. {requestId}");
|
||||
}
|
||||
else
|
||||
{
|
||||
executionContext.Warning($"Failed to download action '{downloadUrl}'. Error: {ex.Message} {requestId}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
|
||||
{
|
||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
|
||||
executionContext.Warning($"Back off {backOff.TotalSeconds} seconds before retry.");
|
||||
await Task.Delay(backOff);
|
||||
if (String.IsNullOrEmpty(Environment.GetEnvironmentVariable("_GITHUB_ACTION_DOWNLOAD_NO_BACKOFF")))
|
||||
{
|
||||
var backOff = BackoffTimerHelper.GetRandomBackoff(TimeSpan.FromSeconds(10), TimeSpan.FromSeconds(30));
|
||||
executionContext.Warning($"Back off {backOff.TotalSeconds} seconds before retry.");
|
||||
await Task.Delay(backOff);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception ex) when (!(ex is OperationCanceledException) && !executionContext.CancellationToken.IsCancellationRequested)
|
||||
{
|
||||
Trace.Error($"Failed to download archive '{downloadUrl}' after {retryCount + 1} attempts.");
|
||||
Trace.Error(ex);
|
||||
throw new FailedToDownloadActionException($"Failed to download archive '{downloadUrl}' after {retryCount + 1} attempts.", ex);
|
||||
}
|
||||
|
||||
ArgUtil.NotNullOrEmpty(archiveFile, nameof(archiveFile));
|
||||
executionContext.Debug($"Download '{downloadUrl}' to '{archiveFile}'");
|
||||
|
||||
@@ -316,7 +316,6 @@ namespace GitHub.Runner.Worker
|
||||
Schema = _actionManifestSchema,
|
||||
// TODO: Switch to real tracewriter for cutover
|
||||
TraceWriter = new GitHub.Actions.WorkflowParser.ObjectTemplating.EmptyTraceWriter(),
|
||||
AllowCaseFunction = false,
|
||||
};
|
||||
|
||||
// Expression values from execution context
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
@@ -315,7 +315,6 @@ namespace GitHub.Runner.Worker
|
||||
maxBytes: 10 * 1024 * 1024),
|
||||
Schema = _actionManifestSchema,
|
||||
TraceWriter = executionContext.ToTemplateTraceWriter(),
|
||||
AllowCaseFunction = false,
|
||||
};
|
||||
|
||||
// Expression values from execution context
|
||||
|
||||
1435
src/Runner.Worker/Dap/DapDebugger.cs
Normal file
1435
src/Runner.Worker/Dap/DapDebugger.cs
Normal file
File diff suppressed because it is too large
Load Diff
1231
src/Runner.Worker/Dap/DapMessages.cs
Normal file
1231
src/Runner.Worker/Dap/DapMessages.cs
Normal file
File diff suppressed because it is too large
Load Diff
369
src/Runner.Worker/Dap/DapReplExecutor.cs
Normal file
369
src/Runner.Worker/Dap/DapReplExecutor.cs
Normal file
@@ -0,0 +1,369 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker.Handlers;
|
||||
|
||||
namespace GitHub.Runner.Worker.Dap
|
||||
{
|
||||
/// <summary>
|
||||
/// Executes <see cref="RunCommand"/> objects in the job's runtime context.
|
||||
///
|
||||
/// Mirrors the behavior of a normal workflow <c>run:</c> step as closely
|
||||
/// as possible by reusing the runner's existing shell-resolution logic,
|
||||
/// script fixup helpers, and process execution infrastructure.
|
||||
///
|
||||
/// Output is streamed to the debugger via DAP <c>output</c> events with
|
||||
/// secrets masked before emission.
|
||||
/// </summary>
|
||||
internal sealed class DapReplExecutor
|
||||
{
|
||||
private readonly IHostContext _hostContext;
|
||||
private readonly Action<string, string> _sendOutput;
|
||||
private readonly Tracing _trace;
|
||||
|
||||
public DapReplExecutor(IHostContext hostContext, Action<string, string> sendOutput)
|
||||
{
|
||||
_hostContext = hostContext ?? throw new ArgumentNullException(nameof(hostContext));
|
||||
_sendOutput = sendOutput ?? throw new ArgumentNullException(nameof(sendOutput));
|
||||
_trace = hostContext.GetTrace(nameof(DapReplExecutor));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Executes a <see cref="RunCommand"/> and returns the exit code as a
|
||||
/// formatted <see cref="EvaluateResponseBody"/>.
|
||||
/// </summary>
|
||||
public async Task<EvaluateResponseBody> ExecuteRunCommandAsync(
|
||||
RunCommand command,
|
||||
IExecutionContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (context == null)
|
||||
{
|
||||
return ErrorResult("No execution context available. The debugger must be paused at a step to run commands.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
return await ExecuteScriptAsync(command, context, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_trace.Error($"REPL run command failed ({ex.GetType().Name})");
|
||||
var maskedError = _hostContext.SecretMasker.MaskSecrets(ex.Message);
|
||||
return ErrorResult($"Command failed: {maskedError}");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<EvaluateResponseBody> ExecuteScriptAsync(
|
||||
RunCommand command,
|
||||
IExecutionContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// 1. Resolve shell — same logic as ScriptHandler
|
||||
string shellCommand;
|
||||
string argFormat;
|
||||
|
||||
if (!string.IsNullOrEmpty(command.Shell))
|
||||
{
|
||||
// Explicit shell from the DSL
|
||||
var parsed = ScriptHandlerHelpers.ParseShellOptionString(command.Shell);
|
||||
shellCommand = parsed.shellCommand;
|
||||
argFormat = string.IsNullOrEmpty(parsed.shellArgs)
|
||||
? ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand)
|
||||
: parsed.shellArgs;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Default shell — mirrors ScriptHandler platform defaults
|
||||
shellCommand = ResolveDefaultShell(context);
|
||||
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand);
|
||||
}
|
||||
|
||||
_trace.Info("Resolved REPL shell");
|
||||
|
||||
// 2. Expand ${{ }} expressions in the script body, just like
|
||||
// ActionRunner evaluates step inputs before ScriptHandler sees them
|
||||
var contents = ExpandExpressions(command.Script, context);
|
||||
contents = ScriptHandlerHelpers.FixUpScriptContents(shellCommand, contents);
|
||||
|
||||
// Write to a temp file (same pattern as ScriptHandler)
|
||||
var extension = ScriptHandlerHelpers.GetScriptFileExtension(shellCommand);
|
||||
var scriptFilePath = Path.Combine(
|
||||
_hostContext.GetDirectory(WellKnownDirectory.Temp),
|
||||
$"dap_repl_{Guid.NewGuid()}{extension}");
|
||||
|
||||
Encoding encoding = new UTF8Encoding(false);
|
||||
#if OS_WINDOWS
|
||||
contents = contents.Replace("\r\n", "\n").Replace("\n", "\r\n");
|
||||
encoding = Console.InputEncoding.CodePage != 65001
|
||||
? Console.InputEncoding
|
||||
: encoding;
|
||||
#endif
|
||||
File.WriteAllText(scriptFilePath, contents, encoding);
|
||||
|
||||
try
|
||||
{
|
||||
// 3. Format arguments with script path
|
||||
var resolvedPath = scriptFilePath.Replace("\"", "\\\"");
|
||||
if (string.IsNullOrEmpty(argFormat) || !argFormat.Contains("{0}"))
|
||||
{
|
||||
return ErrorResult($"Invalid shell option '{shellCommand}'. Shell must be a valid built-in (bash, sh, cmd, powershell, pwsh) or a format string containing '{{0}}'");
|
||||
}
|
||||
var arguments = string.Format(argFormat, resolvedPath);
|
||||
|
||||
// 4. Resolve shell command path
|
||||
string prependPath = string.Join(
|
||||
Path.PathSeparator.ToString(),
|
||||
Enumerable.Reverse(context.Global.PrependPath));
|
||||
var commandPath = WhichUtil.Which(shellCommand, false, _trace, prependPath)
|
||||
?? shellCommand;
|
||||
|
||||
// 5. Build environment — merge from execution context like a real step
|
||||
var environment = BuildEnvironment(context, command.Env);
|
||||
|
||||
// 6. Resolve working directory
|
||||
var workingDirectory = command.WorkingDirectory;
|
||||
if (string.IsNullOrEmpty(workingDirectory))
|
||||
{
|
||||
var githubContext = context.ExpressionValues.TryGetValue("github", out var gh)
|
||||
? gh as DictionaryContextData
|
||||
: null;
|
||||
var workspace = githubContext?.TryGetValue("workspace", out var ws) == true
|
||||
? (ws as StringContextData)?.Value
|
||||
: null;
|
||||
workingDirectory = workspace ?? _hostContext.GetDirectory(WellKnownDirectory.Work);
|
||||
}
|
||||
|
||||
_trace.Info("Executing REPL command");
|
||||
|
||||
// Stream execution info to debugger
|
||||
SendOutput("console", $"$ {shellCommand} {command.Script.Substring(0, Math.Min(command.Script.Length, 80))}{(command.Script.Length > 80 ? "..." : "")}\n");
|
||||
|
||||
// 7. Execute via IProcessInvoker (same as DefaultStepHost)
|
||||
int exitCode;
|
||||
using (var processInvoker = _hostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += (sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
var masked = _hostContext.SecretMasker.MaskSecrets(args.Data);
|
||||
SendOutput("stdout", masked + "\n");
|
||||
}
|
||||
};
|
||||
|
||||
processInvoker.ErrorDataReceived += (sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
var masked = _hostContext.SecretMasker.MaskSecrets(args.Data);
|
||||
SendOutput("stderr", masked + "\n");
|
||||
}
|
||||
};
|
||||
|
||||
exitCode = await processInvoker.ExecuteAsync(
|
||||
workingDirectory: workingDirectory,
|
||||
fileName: commandPath,
|
||||
arguments: arguments,
|
||||
environment: environment,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
_trace.Info($"REPL command exited with code {exitCode}");
|
||||
|
||||
// 8. Return only the exit code summary (output was already streamed)
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = exitCode == 0 ? $"(exit code: {exitCode})" : $"Process completed with exit code {exitCode}.",
|
||||
Type = exitCode == 0 ? "string" : "error",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Clean up temp script file
|
||||
try { File.Delete(scriptFilePath); }
|
||||
catch { /* best effort */ }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Expands <c>${{ }}</c> expressions in the input string using the
|
||||
/// runner's template evaluator — the same evaluation path that processes
|
||||
/// step inputs before <see cref="ScriptHandler"/> runs them.
|
||||
///
|
||||
/// Each <c>${{ expr }}</c> occurrence is individually evaluated and
|
||||
/// replaced with its masked string result, mirroring the semantics of
|
||||
/// expression interpolation in a workflow <c>run:</c> step body.
|
||||
/// </summary>
|
||||
internal string ExpandExpressions(string input, IExecutionContext context)
|
||||
{
|
||||
if (string.IsNullOrEmpty(input) || !input.Contains("${{"))
|
||||
{
|
||||
return input ?? string.Empty;
|
||||
}
|
||||
|
||||
var result = new StringBuilder();
|
||||
int pos = 0;
|
||||
|
||||
while (pos < input.Length)
|
||||
{
|
||||
var start = input.IndexOf("${{", pos, StringComparison.Ordinal);
|
||||
if (start < 0)
|
||||
{
|
||||
result.Append(input, pos, input.Length - pos);
|
||||
break;
|
||||
}
|
||||
|
||||
// Append the literal text before the expression
|
||||
result.Append(input, pos, start - pos);
|
||||
|
||||
var end = input.IndexOf("}}", start + 3, StringComparison.Ordinal);
|
||||
if (end < 0)
|
||||
{
|
||||
// Unterminated expression — keep literal
|
||||
result.Append(input, start, input.Length - start);
|
||||
break;
|
||||
}
|
||||
|
||||
var expr = input.Substring(start + 3, end - start - 3).Trim();
|
||||
end += 2; // skip past "}}"
|
||||
|
||||
// Evaluate the expression
|
||||
try
|
||||
{
|
||||
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||
var token = new GitHub.DistributedTask.ObjectTemplating.Tokens.BasicExpressionToken(
|
||||
null, null, null, expr);
|
||||
var evaluated = templateEvaluator.EvaluateStepDisplayName(
|
||||
token,
|
||||
context.ExpressionValues,
|
||||
context.ExpressionFunctions);
|
||||
result.Append(_hostContext.SecretMasker.MaskSecrets(evaluated ?? string.Empty));
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_trace.Warning($"Expression expansion failed ({ex.GetType().Name})");
|
||||
// Keep the original expression literal on failure
|
||||
result.Append(input, start, end - start);
|
||||
}
|
||||
|
||||
pos = end;
|
||||
}
|
||||
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resolves the default shell the same way <see cref="ScriptHandler"/>
|
||||
/// does: check job defaults, then fall back to platform default.
|
||||
/// </summary>
|
||||
internal string ResolveDefaultShell(IExecutionContext context)
|
||||
{
|
||||
// Check job defaults
|
||||
if (context.Global?.JobDefaults != null &&
|
||||
context.Global.JobDefaults.TryGetValue("run", out var runDefaults) &&
|
||||
runDefaults.TryGetValue("shell", out var defaultShell) &&
|
||||
!string.IsNullOrEmpty(defaultShell))
|
||||
{
|
||||
_trace.Info("Using job default shell");
|
||||
return defaultShell;
|
||||
}
|
||||
|
||||
#if OS_WINDOWS
|
||||
string prependPath = string.Join(
|
||||
Path.PathSeparator.ToString(),
|
||||
context.Global?.PrependPath != null ? Enumerable.Reverse(context.Global.PrependPath) : Array.Empty<string>());
|
||||
var pwshPath = WhichUtil.Which("pwsh", false, _trace, prependPath);
|
||||
return !string.IsNullOrEmpty(pwshPath) ? "pwsh" : "powershell";
|
||||
#else
|
||||
return "sh";
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Merges the job context environment with any REPL-specific overrides.
|
||||
/// </summary>
|
||||
internal Dictionary<string, string> BuildEnvironment(
|
||||
IExecutionContext context,
|
||||
Dictionary<string, string> replEnv)
|
||||
{
|
||||
var env = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
|
||||
|
||||
// Pull environment from the execution context (same as ActionRunner)
|
||||
if (context.ExpressionValues.TryGetValue("env", out var envData))
|
||||
{
|
||||
if (envData is DictionaryContextData dictEnv)
|
||||
{
|
||||
foreach (var pair in dictEnv)
|
||||
{
|
||||
if (pair.Value is StringContextData str)
|
||||
{
|
||||
env[pair.Key] = str.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (envData is CaseSensitiveDictionaryContextData csEnv)
|
||||
{
|
||||
foreach (var pair in csEnv)
|
||||
{
|
||||
if (pair.Value is StringContextData str)
|
||||
{
|
||||
env[pair.Key] = str.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Expose runtime context variables to the environment (GITHUB_*, RUNNER_*, etc.)
|
||||
foreach (var ctxPair in context.ExpressionValues)
|
||||
{
|
||||
if (ctxPair.Value is IEnvironmentContextData runtimeContext && runtimeContext != null)
|
||||
{
|
||||
foreach (var rtEnv in runtimeContext.GetRuntimeEnvironmentVariables())
|
||||
{
|
||||
env[rtEnv.Key] = rtEnv.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply REPL-specific overrides last (so they win),
|
||||
// expanding any ${{ }} expressions in the values
|
||||
if (replEnv != null)
|
||||
{
|
||||
foreach (var pair in replEnv)
|
||||
{
|
||||
env[pair.Key] = ExpandExpressions(pair.Value, context);
|
||||
}
|
||||
}
|
||||
|
||||
return env;
|
||||
}
|
||||
|
||||
private void SendOutput(string category, string text)
|
||||
{
|
||||
_sendOutput(category, text);
|
||||
}
|
||||
|
||||
private static EvaluateResponseBody ErrorResult(string message)
|
||||
{
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = message,
|
||||
Type = "error",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
411
src/Runner.Worker/Dap/DapReplParser.cs
Normal file
411
src/Runner.Worker/Dap/DapReplParser.cs
Normal file
@@ -0,0 +1,411 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
|
||||
namespace GitHub.Runner.Worker.Dap
|
||||
{
|
||||
/// <summary>
|
||||
/// Base type for all REPL DSL commands.
|
||||
/// </summary>
|
||||
internal abstract class DapReplCommand
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// <c>help</c> or <c>help("run")</c>
|
||||
/// </summary>
|
||||
internal sealed class HelpCommand : DapReplCommand
|
||||
{
|
||||
public string Topic { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// <c>run("echo hello")</c> or
|
||||
/// <c>run("echo hello", shell: "bash", env: { FOO: "bar" }, working_directory: "/tmp")</c>
|
||||
/// </summary>
|
||||
internal sealed class RunCommand : DapReplCommand
|
||||
{
|
||||
public string Script { get; set; }
|
||||
public string Shell { get; set; }
|
||||
public Dictionary<string, string> Env { get; set; }
|
||||
public string WorkingDirectory { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses REPL input into typed <see cref="DapReplCommand"/> objects.
|
||||
///
|
||||
/// Grammar (intentionally minimal — extend as the DSL grows):
|
||||
/// <code>
|
||||
/// help → HelpCommand { Topic = null }
|
||||
/// help("run") → HelpCommand { Topic = "run" }
|
||||
/// run("script body") → RunCommand { Script = "script body" }
|
||||
/// run("script", shell: "bash") → RunCommand { Shell = "bash" }
|
||||
/// run("script", env: { K: "V" }) → RunCommand { Env = { K → V } }
|
||||
/// run("script", working_directory: "p")→ RunCommand { WorkingDirectory = "p" }
|
||||
/// </code>
|
||||
///
|
||||
/// Parsing is intentionally hand-rolled rather than regex-based so it can
|
||||
/// handle nested braces, quoted strings with escapes, and grow to support
|
||||
/// future commands without accumulating regex complexity.
|
||||
/// </summary>
|
||||
internal static class DapReplParser
|
||||
{
|
||||
/// <summary>
|
||||
/// Attempts to parse REPL input into a command. Returns null if the
|
||||
/// input does not match any known DSL command (i.e. it should be
|
||||
/// treated as an expression instead).
|
||||
/// </summary>
|
||||
internal static DapReplCommand TryParse(string input, out string error)
|
||||
{
|
||||
error = null;
|
||||
if (string.IsNullOrWhiteSpace(input))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var trimmed = input.Trim();
|
||||
|
||||
// help / help("topic")
|
||||
if (trimmed.Equals("help", StringComparison.OrdinalIgnoreCase) ||
|
||||
trimmed.StartsWith("help(", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return ParseHelp(trimmed, out error);
|
||||
}
|
||||
|
||||
// run("...")
|
||||
if (trimmed.StartsWith("run(", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return ParseRun(trimmed, out error);
|
||||
}
|
||||
|
||||
// Not a DSL command
|
||||
return null;
|
||||
}
|
||||
|
||||
internal static string GetGeneralHelp()
|
||||
{
|
||||
return """
|
||||
Actions Debug Console
|
||||
|
||||
Commands:
|
||||
help Show this help
|
||||
help("run") Show help for the run command
|
||||
run("script") Execute a script (like a workflow run step)
|
||||
|
||||
Anything else is evaluated as a GitHub Actions expression.
|
||||
Example: github.repository
|
||||
Example: ${{ github.event_name }}
|
||||
|
||||
""";
|
||||
}
|
||||
|
||||
internal static string GetRunHelp()
|
||||
{
|
||||
return """
|
||||
run command — execute a script in the job context
|
||||
|
||||
Usage:
|
||||
run("echo hello")
|
||||
run("echo $FOO", shell: "bash")
|
||||
run("echo $FOO", env: { FOO: "bar" })
|
||||
run("ls", working_directory: "/tmp")
|
||||
run("echo $X", shell: "bash", env: { X: "1" }, working_directory: "/tmp")
|
||||
|
||||
Options:
|
||||
shell: Shell to use (default: job default, e.g. bash)
|
||||
env: Extra environment variables as { KEY: "value" }
|
||||
working_directory: Working directory for the command
|
||||
|
||||
Behavior:
|
||||
- Equivalent to a workflow `run:` step
|
||||
- Expressions in the script body are expanded (${{ ... }})
|
||||
- Output is streamed in real time and secrets are masked
|
||||
|
||||
""";
|
||||
}
|
||||
|
||||
#region Parsers
|
||||
|
||||
private static HelpCommand ParseHelp(string input, out string error)
|
||||
{
|
||||
error = null;
|
||||
if (input.Equals("help", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return new HelpCommand();
|
||||
}
|
||||
|
||||
// help("topic")
|
||||
var inner = ExtractParenthesizedArgs(input, "help", out error);
|
||||
if (error != null) return null;
|
||||
|
||||
var topic = ExtractQuotedString(inner.Trim(), out error);
|
||||
if (error != null) return null;
|
||||
|
||||
return new HelpCommand { Topic = topic };
|
||||
}
|
||||
|
||||
private static RunCommand ParseRun(string input, out string error)
|
||||
{
|
||||
error = null;
|
||||
|
||||
var inner = ExtractParenthesizedArgs(input, "run", out error);
|
||||
if (error != null) return null;
|
||||
|
||||
// Split into argument list respecting quotes and braces
|
||||
var args = SplitArguments(inner, out error);
|
||||
if (error != null) return null;
|
||||
if (args.Count == 0)
|
||||
{
|
||||
error = "run() requires a script argument. Example: run(\"echo hello\")";
|
||||
return null;
|
||||
}
|
||||
|
||||
// First arg must be the script body (a quoted string)
|
||||
var script = ExtractQuotedString(args[0].Trim(), out error);
|
||||
if (error != null)
|
||||
{
|
||||
error = $"First argument to run() must be a quoted string. {error}";
|
||||
return null;
|
||||
}
|
||||
|
||||
var cmd = new RunCommand { Script = script };
|
||||
|
||||
// Parse remaining keyword arguments
|
||||
for (int i = 1; i < args.Count; i++)
|
||||
{
|
||||
var kv = args[i].Trim();
|
||||
var colonIdx = kv.IndexOf(':');
|
||||
if (colonIdx <= 0)
|
||||
{
|
||||
error = $"Expected keyword argument (e.g. shell: \"bash\"), got: {kv}";
|
||||
return null;
|
||||
}
|
||||
|
||||
var key = kv.Substring(0, colonIdx).Trim();
|
||||
var value = kv.Substring(colonIdx + 1).Trim();
|
||||
|
||||
switch (key.ToLowerInvariant())
|
||||
{
|
||||
case "shell":
|
||||
cmd.Shell = ExtractQuotedString(value, out error);
|
||||
if (error != null) { error = $"shell: {error}"; return null; }
|
||||
break;
|
||||
|
||||
case "working_directory":
|
||||
cmd.WorkingDirectory = ExtractQuotedString(value, out error);
|
||||
if (error != null) { error = $"working_directory: {error}"; return null; }
|
||||
break;
|
||||
|
||||
case "env":
|
||||
cmd.Env = ParseEnvBlock(value, out error);
|
||||
if (error != null) { error = $"env: {error}"; return null; }
|
||||
break;
|
||||
|
||||
default:
|
||||
error = $"Unknown option: {key}. Valid options: shell, env, working_directory";
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Low-level parsing helpers
|
||||
|
||||
/// <summary>
|
||||
/// Given "cmd(...)" returns the inner content between the outer parens.
|
||||
/// </summary>
|
||||
private static string ExtractParenthesizedArgs(string input, string prefix, out string error)
|
||||
{
|
||||
error = null;
|
||||
var start = prefix.Length; // skip "cmd"
|
||||
if (start >= input.Length || input[start] != '(')
|
||||
{
|
||||
error = $"Expected '(' after {prefix}";
|
||||
return null;
|
||||
}
|
||||
|
||||
if (input[input.Length - 1] != ')')
|
||||
{
|
||||
error = $"Expected ')' at end of {prefix}(...)";
|
||||
return null;
|
||||
}
|
||||
|
||||
return input.Substring(start + 1, input.Length - start - 2);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts a double-quoted string value, handling escaped quotes.
|
||||
/// </summary>
|
||||
internal static string ExtractQuotedString(string input, out string error)
|
||||
{
|
||||
error = null;
|
||||
if (string.IsNullOrEmpty(input))
|
||||
{
|
||||
error = "Expected a quoted string, got empty input";
|
||||
return null;
|
||||
}
|
||||
|
||||
if (input[0] != '"')
|
||||
{
|
||||
error = $"Expected a quoted string starting with \", got: {Truncate(input, 40)}";
|
||||
return null;
|
||||
}
|
||||
|
||||
var sb = new StringBuilder();
|
||||
for (int i = 1; i < input.Length; i++)
|
||||
{
|
||||
if (input[i] == '\\' && i + 1 < input.Length)
|
||||
{
|
||||
sb.Append(input[i + 1]);
|
||||
i++;
|
||||
}
|
||||
else if (input[i] == '"')
|
||||
{
|
||||
// Check nothing meaningful follows the closing quote
|
||||
var rest = input.Substring(i + 1).Trim();
|
||||
if (rest.Length > 0)
|
||||
{
|
||||
error = $"Unexpected content after closing quote: {Truncate(rest, 40)}";
|
||||
return null;
|
||||
}
|
||||
return sb.ToString();
|
||||
}
|
||||
else
|
||||
{
|
||||
sb.Append(input[i]);
|
||||
}
|
||||
}
|
||||
|
||||
error = "Unterminated string (missing closing \")";
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Splits a comma-separated argument list, respecting quoted strings
|
||||
/// and nested braces so that <c>"a, b", env: { K: "V, W" }</c> is
|
||||
/// correctly split into two arguments.
|
||||
/// </summary>
|
||||
internal static List<string> SplitArguments(string input, out string error)
|
||||
{
|
||||
error = null;
|
||||
var result = new List<string>();
|
||||
var current = new StringBuilder();
|
||||
int depth = 0;
|
||||
bool inQuote = false;
|
||||
|
||||
for (int i = 0; i < input.Length; i++)
|
||||
{
|
||||
var ch = input[i];
|
||||
|
||||
if (ch == '\\' && inQuote && i + 1 < input.Length)
|
||||
{
|
||||
current.Append(ch);
|
||||
current.Append(input[++i]);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ch == '"')
|
||||
{
|
||||
inQuote = !inQuote;
|
||||
current.Append(ch);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!inQuote)
|
||||
{
|
||||
if (ch == '{')
|
||||
{
|
||||
depth++;
|
||||
current.Append(ch);
|
||||
continue;
|
||||
}
|
||||
if (ch == '}')
|
||||
{
|
||||
depth--;
|
||||
current.Append(ch);
|
||||
continue;
|
||||
}
|
||||
if (ch == ',' && depth == 0)
|
||||
{
|
||||
result.Add(current.ToString());
|
||||
current.Clear();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
current.Append(ch);
|
||||
}
|
||||
|
||||
if (inQuote)
|
||||
{
|
||||
error = "Unterminated string in arguments";
|
||||
return null;
|
||||
}
|
||||
if (depth != 0)
|
||||
{
|
||||
error = "Unmatched braces in arguments";
|
||||
return null;
|
||||
}
|
||||
|
||||
if (current.Length > 0)
|
||||
{
|
||||
result.Add(current.ToString());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses <c>{ KEY: "value", KEY2: "value2" }</c> into a dictionary.
|
||||
/// </summary>
|
||||
internal static Dictionary<string, string> ParseEnvBlock(string input, out string error)
|
||||
{
|
||||
error = null;
|
||||
var trimmed = input.Trim();
|
||||
if (!trimmed.StartsWith("{") || !trimmed.EndsWith("}"))
|
||||
{
|
||||
error = "Expected env block in the form { KEY: \"value\" }";
|
||||
return null;
|
||||
}
|
||||
|
||||
var inner = trimmed.Substring(1, trimmed.Length - 2).Trim();
|
||||
if (string.IsNullOrEmpty(inner))
|
||||
{
|
||||
return new Dictionary<string, string>();
|
||||
}
|
||||
|
||||
var pairs = SplitArguments(inner, out error);
|
||||
if (error != null) return null;
|
||||
|
||||
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var pair in pairs)
|
||||
{
|
||||
var colonIdx = pair.IndexOf(':');
|
||||
if (colonIdx <= 0)
|
||||
{
|
||||
error = $"Expected KEY: \"value\" pair, got: {Truncate(pair.Trim(), 40)}";
|
||||
return null;
|
||||
}
|
||||
|
||||
var key = pair.Substring(0, colonIdx).Trim();
|
||||
var val = ExtractQuotedString(pair.Substring(colonIdx + 1).Trim(), out error);
|
||||
if (error != null) return null;
|
||||
|
||||
result[key] = val;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static string Truncate(string value, int maxLength)
|
||||
{
|
||||
if (value == null) return "(null)";
|
||||
return value.Length <= maxLength ? value : value.Substring(0, maxLength) + "...";
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
373
src/Runner.Worker/Dap/DapVariableProvider.cs
Normal file
373
src/Runner.Worker/Dap/DapVariableProvider.cs
Normal file
@@ -0,0 +1,373 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using GitHub.DistributedTask.Logging;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
|
||||
namespace GitHub.Runner.Worker.Dap
|
||||
{
|
||||
/// <summary>
|
||||
/// Maps runner execution context data to DAP scopes and variables.
|
||||
///
|
||||
/// This is the single point where runner context values are materialized
|
||||
/// for the debugger. All values pass through the runner's existing
|
||||
/// <see cref="GitHub.DistributedTask.Logging.ISecretMasker"/> so the DAP
|
||||
/// surface never exposes anything beyond what a normal CI log would show.
|
||||
///
|
||||
/// The secrets scope is intentionally opaque: keys are visible but every
|
||||
/// value is replaced with a constant redaction marker.
|
||||
///
|
||||
/// Designed to be reusable by future DAP features (evaluate, hover, REPL)
|
||||
/// so that masking policy is never duplicated.
|
||||
/// </summary>
|
||||
internal sealed class DapVariableProvider
|
||||
{
|
||||
// Well-known scope names that map to top-level expression contexts.
|
||||
// Order matters: the index determines the stable variablesReference ID.
|
||||
private static readonly string[] _scopeNames =
|
||||
{
|
||||
"github", "env", "runner", "job", "steps",
|
||||
"secrets", "inputs", "vars", "matrix", "needs"
|
||||
};
|
||||
|
||||
// Scope references occupy the range [1, ScopeReferenceMax].
|
||||
private const int _scopeReferenceBase = 1;
|
||||
private const int _scopeReferenceMax = 100;
|
||||
|
||||
// Dynamic (nested) variable references start above the scope range.
|
||||
private const int _dynamicReferenceBase = 101;
|
||||
|
||||
private const string _redactedValue = "***";
|
||||
|
||||
private readonly ISecretMasker _secretMasker;
|
||||
|
||||
// Maps dynamic variable reference IDs to the backing data and its
|
||||
// dot-separated path (e.g. "github.event.pull_request").
|
||||
private readonly Dictionary<int, (PipelineContextData Data, string Path)> _variableReferences = new();
|
||||
private int _nextVariableReference = _dynamicReferenceBase;
|
||||
|
||||
public DapVariableProvider(ISecretMasker secretMasker)
|
||||
{
|
||||
_secretMasker = secretMasker ?? throw new ArgumentNullException(nameof(secretMasker));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears all dynamic variable references.
|
||||
/// Call this whenever the paused execution context changes (e.g. new step)
|
||||
/// so that stale nested references are not served to the client.
|
||||
/// </summary>
|
||||
public void Reset()
|
||||
{
|
||||
_variableReferences.Clear();
|
||||
_nextVariableReference = _dynamicReferenceBase;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the list of DAP scopes for the given execution context.
|
||||
/// Each scope corresponds to a well-known runner expression context
|
||||
/// (github, env, secrets, …) and carries a stable variablesReference
|
||||
/// that the client can use to drill into variables.
|
||||
/// </summary>
|
||||
public List<Scope> GetScopes(IExecutionContext context)
|
||||
{
|
||||
var scopes = new List<Scope>();
|
||||
|
||||
if (context?.ExpressionValues == null)
|
||||
{
|
||||
return scopes;
|
||||
}
|
||||
|
||||
for (int i = 0; i < _scopeNames.Length; i++)
|
||||
{
|
||||
var scopeName = _scopeNames[i];
|
||||
if (!context.ExpressionValues.TryGetValue(scopeName, out var value) || value == null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var scope = new Scope
|
||||
{
|
||||
Name = scopeName,
|
||||
VariablesReference = _scopeReferenceBase + i,
|
||||
Expensive = false,
|
||||
PresentationHint = scopeName == "secrets" ? "registers" : null
|
||||
};
|
||||
|
||||
if (value is DictionaryContextData dict)
|
||||
{
|
||||
scope.NamedVariables = dict.Count;
|
||||
}
|
||||
else if (value is CaseSensitiveDictionaryContextData csDict)
|
||||
{
|
||||
scope.NamedVariables = csDict.Count;
|
||||
}
|
||||
|
||||
scopes.Add(scope);
|
||||
}
|
||||
|
||||
return scopes;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the child variables for a given variablesReference.
|
||||
/// The reference may point at a top-level scope (1–100) or a
|
||||
/// dynamically registered nested container (101+).
|
||||
/// </summary>
|
||||
public List<Variable> GetVariables(IExecutionContext context, int variablesReference)
|
||||
{
|
||||
var variables = new List<Variable>();
|
||||
|
||||
if (context?.ExpressionValues == null)
|
||||
{
|
||||
return variables;
|
||||
}
|
||||
|
||||
PipelineContextData data = null;
|
||||
string basePath = null;
|
||||
bool isSecretsScope = false;
|
||||
|
||||
if (variablesReference >= _scopeReferenceBase && variablesReference <= _scopeReferenceMax)
|
||||
{
|
||||
var scopeIndex = variablesReference - _scopeReferenceBase;
|
||||
if (scopeIndex < _scopeNames.Length)
|
||||
{
|
||||
var scopeName = _scopeNames[scopeIndex];
|
||||
isSecretsScope = scopeName == "secrets";
|
||||
if (context.ExpressionValues.TryGetValue(scopeName, out data))
|
||||
{
|
||||
basePath = scopeName;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (_variableReferences.TryGetValue(variablesReference, out var refData))
|
||||
{
|
||||
data = refData.Data;
|
||||
basePath = refData.Path;
|
||||
isSecretsScope = basePath?.StartsWith("secrets", StringComparison.OrdinalIgnoreCase) == true;
|
||||
}
|
||||
|
||||
if (data == null)
|
||||
{
|
||||
return variables;
|
||||
}
|
||||
|
||||
ConvertToVariables(data, basePath, isSecretsScope, variables);
|
||||
return variables;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates a GitHub Actions expression (e.g. "github.repository",
|
||||
/// "${{ github.event_name }}") in the context of the current step and
|
||||
/// returns a masked result suitable for the DAP evaluate response.
|
||||
///
|
||||
/// Uses the runner's standard <see cref="GitHub.DistributedTask.Pipelines.ObjectTemplating.IPipelineTemplateEvaluator"/>
|
||||
/// so the full expression language is available (functions, operators,
|
||||
/// context access).
|
||||
/// </summary>
|
||||
public EvaluateResponseBody EvaluateExpression(string expression, IExecutionContext context)
|
||||
{
|
||||
if (context?.ExpressionValues == null)
|
||||
{
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = "(no execution context available)",
|
||||
Type = "string",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
|
||||
// Strip ${{ }} wrapper if present
|
||||
var expr = expression?.Trim() ?? string.Empty;
|
||||
if (expr.StartsWith("${{") && expr.EndsWith("}}"))
|
||||
{
|
||||
expr = expr.Substring(3, expr.Length - 5).Trim();
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(expr))
|
||||
{
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = string.Empty,
|
||||
Type = "string",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||
var token = new BasicExpressionToken(null, null, null, expr);
|
||||
|
||||
var result = templateEvaluator.EvaluateStepDisplayName(
|
||||
token,
|
||||
context.ExpressionValues,
|
||||
context.ExpressionFunctions);
|
||||
|
||||
result = _secretMasker.MaskSecrets(result ?? "null");
|
||||
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = result,
|
||||
Type = InferResultType(result),
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
var errorMessage = _secretMasker.MaskSecrets($"Evaluation error: {ex.Message}");
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = errorMessage,
|
||||
Type = "string",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Infers a simple DAP type hint from the string representation of a result.
|
||||
/// </summary>
|
||||
internal static string InferResultType(string value)
|
||||
{
|
||||
value = value?.ToLower();
|
||||
if (value == null || value == "null")
|
||||
return "null";
|
||||
if (value == "true" || value == "false")
|
||||
return "boolean";
|
||||
if (double.TryParse(value, NumberStyles.Any,
|
||||
CultureInfo.InvariantCulture, out _))
|
||||
return "number";
|
||||
if (value.StartsWith("{") || value.StartsWith("["))
|
||||
return "object";
|
||||
return "string";
|
||||
}
|
||||
|
||||
#region Private helpers
|
||||
|
||||
private void ConvertToVariables(
|
||||
PipelineContextData data,
|
||||
string basePath,
|
||||
bool isSecretsScope,
|
||||
List<Variable> variables)
|
||||
{
|
||||
switch (data)
|
||||
{
|
||||
case DictionaryContextData dict:
|
||||
foreach (var pair in dict)
|
||||
{
|
||||
variables.Add(CreateVariable(pair.Key, pair.Value, basePath, isSecretsScope));
|
||||
}
|
||||
break;
|
||||
|
||||
case CaseSensitiveDictionaryContextData csDict:
|
||||
foreach (var pair in csDict)
|
||||
{
|
||||
variables.Add(CreateVariable(pair.Key, pair.Value, basePath, isSecretsScope));
|
||||
}
|
||||
break;
|
||||
|
||||
case ArrayContextData array:
|
||||
for (int i = 0; i < array.Count; i++)
|
||||
{
|
||||
var variable = CreateVariable($"[{i}]", array[i], basePath, isSecretsScope);
|
||||
variables.Add(variable);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private Variable CreateVariable(
|
||||
string name,
|
||||
PipelineContextData value,
|
||||
string basePath,
|
||||
bool isSecretsScope)
|
||||
{
|
||||
var childPath = string.IsNullOrEmpty(basePath) ? name : $"{basePath}.{name}";
|
||||
var variable = new Variable
|
||||
{
|
||||
Name = name,
|
||||
EvaluateName = $"${{{{ {childPath} }}}}"
|
||||
};
|
||||
|
||||
// Secrets scope: redact ALL values regardless of underlying type.
|
||||
// Keys are visible but values are always replaced with the
|
||||
// redaction marker, and nested containers are not drillable.
|
||||
if (isSecretsScope)
|
||||
{
|
||||
variable.Value = _redactedValue;
|
||||
variable.Type = "string";
|
||||
variable.VariablesReference = 0;
|
||||
return variable;
|
||||
}
|
||||
|
||||
if (value == null)
|
||||
{
|
||||
variable.Value = "null";
|
||||
variable.Type = "null";
|
||||
variable.VariablesReference = 0;
|
||||
return variable;
|
||||
}
|
||||
|
||||
switch (value)
|
||||
{
|
||||
case StringContextData str:
|
||||
variable.Value = _secretMasker.MaskSecrets(str.Value);
|
||||
variable.Type = "string";
|
||||
variable.VariablesReference = 0;
|
||||
break;
|
||||
|
||||
case NumberContextData num:
|
||||
variable.Value = _secretMasker.MaskSecrets(num.Value.ToString("G15", CultureInfo.InvariantCulture));
|
||||
variable.Type = "number";
|
||||
variable.VariablesReference = 0;
|
||||
break;
|
||||
|
||||
case BooleanContextData boolVal:
|
||||
variable.Value = boolVal.Value ? "true" : "false";
|
||||
variable.Type = "boolean";
|
||||
variable.VariablesReference = 0;
|
||||
break;
|
||||
|
||||
case DictionaryContextData dict:
|
||||
variable.Value = $"Object ({dict.Count} properties)";
|
||||
variable.Type = "object";
|
||||
variable.VariablesReference = RegisterVariableReference(dict, childPath);
|
||||
variable.NamedVariables = dict.Count;
|
||||
break;
|
||||
|
||||
case CaseSensitiveDictionaryContextData csDict:
|
||||
variable.Value = $"Object ({csDict.Count} properties)";
|
||||
variable.Type = "object";
|
||||
variable.VariablesReference = RegisterVariableReference(csDict, childPath);
|
||||
variable.NamedVariables = csDict.Count;
|
||||
break;
|
||||
|
||||
case ArrayContextData array:
|
||||
variable.Value = $"Array ({array.Count} items)";
|
||||
variable.Type = "array";
|
||||
variable.VariablesReference = RegisterVariableReference(array, childPath);
|
||||
variable.IndexedVariables = array.Count;
|
||||
break;
|
||||
|
||||
default:
|
||||
var rawValue = value.ToJToken()?.ToString() ?? "unknown";
|
||||
variable.Value = _secretMasker.MaskSecrets(rawValue);
|
||||
variable.Type = value.GetType().Name;
|
||||
variable.VariablesReference = 0;
|
||||
break;
|
||||
}
|
||||
|
||||
return variable;
|
||||
}
|
||||
|
||||
private int RegisterVariableReference(PipelineContextData data, string path)
|
||||
{
|
||||
var reference = _nextVariableReference++;
|
||||
_variableReferences[reference] = (data, path);
|
||||
return reference;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
33
src/Runner.Worker/Dap/DebuggerConfig.cs
Normal file
33
src/Runner.Worker/Dap/DebuggerConfig.cs
Normal file
@@ -0,0 +1,33 @@
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Runner.Worker.Dap
|
||||
{
|
||||
/// <summary>
|
||||
/// Consolidated runtime configuration for the job debugger.
|
||||
/// Populated once from the acquire response and owned by <see cref="GlobalContext"/>.
|
||||
/// </summary>
|
||||
public sealed class DebuggerConfig
|
||||
{
|
||||
public DebuggerConfig(bool enabled, DebuggerTunnelInfo tunnel)
|
||||
{
|
||||
Enabled = enabled;
|
||||
Tunnel = tunnel;
|
||||
}
|
||||
|
||||
/// <summary>Whether the debugger is enabled for this job.</summary>
|
||||
public bool Enabled { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Dev Tunnel details for remote debugging.
|
||||
/// Required when <see cref="Enabled"/> is true.
|
||||
/// </summary>
|
||||
public DebuggerTunnelInfo Tunnel { get; }
|
||||
|
||||
/// <summary>Whether the tunnel configuration is complete and valid.</summary>
|
||||
public bool HasValidTunnel => Tunnel != null
|
||||
&& !string.IsNullOrEmpty(Tunnel.TunnelId)
|
||||
&& !string.IsNullOrEmpty(Tunnel.ClusterId)
|
||||
&& !string.IsNullOrEmpty(Tunnel.HostToken)
|
||||
&& Tunnel.Port >= 1024 && Tunnel.Port <= 65535;
|
||||
}
|
||||
}
|
||||
26
src/Runner.Worker/Dap/IDapDebugger.cs
Normal file
26
src/Runner.Worker/Dap/IDapDebugger.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
|
||||
namespace GitHub.Runner.Worker.Dap
|
||||
{
|
||||
public enum DapSessionState
|
||||
{
|
||||
NotStarted,
|
||||
WaitingForConnection,
|
||||
Initializing,
|
||||
Ready,
|
||||
Paused,
|
||||
Running,
|
||||
Terminated
|
||||
}
|
||||
|
||||
[ServiceLocator(Default = typeof(DapDebugger))]
|
||||
public interface IDapDebugger : IRunnerService
|
||||
{
|
||||
Task StartAsync(IExecutionContext jobContext);
|
||||
Task WaitUntilReadyAsync();
|
||||
Task OnStepStartingAsync(IStep step);
|
||||
void OnStepCompleted(IStep step);
|
||||
Task OnJobCompletedAsync();
|
||||
}
|
||||
}
|
||||
812
src/Runner.Worker/Dap/WebSocketDapBridge.cs
Normal file
812
src/Runner.Worker/Dap/WebSocketDapBridge.cs
Normal file
@@ -0,0 +1,812 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net;
|
||||
using System.Net.Sockets;
|
||||
using System.Net.WebSockets;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
|
||||
namespace GitHub.Runner.Worker.Dap
|
||||
{
|
||||
internal sealed class WebSocketDapBridge : IAsyncDisposable
|
||||
{
|
||||
internal enum IncomingStreamPrefixKind
|
||||
{
|
||||
Unknown,
|
||||
HttpWebSocketUpgrade,
|
||||
PreUpgradedWebSocket,
|
||||
WebSocketReservedBits,
|
||||
Http2Preface,
|
||||
TlsClientHello,
|
||||
}
|
||||
|
||||
private const int _bufferSize = 32 * 1024;
|
||||
private const int _maxHeaderLineLength = 8 * 1024;
|
||||
private const int _defaultMaxInboundMessageSize = 10 * 1024 * 1024; // 10 MB
|
||||
private static readonly TimeSpan _keepAliveInterval = TimeSpan.FromSeconds(30);
|
||||
private static readonly TimeSpan _closeTimeout = TimeSpan.FromSeconds(5);
|
||||
private const string _webSocketAcceptMagic = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
|
||||
|
||||
private readonly Tracing _trace;
|
||||
private readonly int _listenPort;
|
||||
private readonly int _targetPort;
|
||||
|
||||
private TcpListener _listener;
|
||||
private CancellationTokenSource _loopCts;
|
||||
private Task _acceptLoopTask;
|
||||
|
||||
// Overridable for unit tests to avoid allocating 10 MB payloads.
|
||||
internal int MaxInboundMessageSize { get; set; } = _defaultMaxInboundMessageSize;
|
||||
|
||||
public WebSocketDapBridge(Tracing trace, int listenPort, int targetPort)
|
||||
{
|
||||
_trace = trace ?? throw new ArgumentNullException(nameof(trace));
|
||||
_listenPort = listenPort;
|
||||
_targetPort = targetPort;
|
||||
}
|
||||
|
||||
public void Start()
|
||||
{
|
||||
if (_listener != null)
|
||||
{
|
||||
throw new InvalidOperationException("WebSocket DAP bridge already started.");
|
||||
}
|
||||
|
||||
_listener = new TcpListener(IPAddress.Loopback, _listenPort);
|
||||
_listener.Start();
|
||||
_loopCts = new CancellationTokenSource();
|
||||
_acceptLoopTask = AcceptLoopAsync(_loopCts.Token);
|
||||
|
||||
_trace.Info($"WebSocket DAP bridge listening on {_listener.LocalEndpoint} -> 127.0.0.1:{_targetPort}");
|
||||
}
|
||||
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
_loopCts?.Cancel();
|
||||
}
|
||||
catch
|
||||
{
|
||||
// best effort during shutdown
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
_listener?.Stop();
|
||||
}
|
||||
catch
|
||||
{
|
||||
// best effort during shutdown
|
||||
}
|
||||
|
||||
if (_acceptLoopTask != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _acceptLoopTask;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
// expected on shutdown
|
||||
}
|
||||
}
|
||||
|
||||
_loopCts?.Dispose();
|
||||
_loopCts = null;
|
||||
_listener = null;
|
||||
_acceptLoopTask = null;
|
||||
}
|
||||
|
||||
private async Task AcceptLoopAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
while (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
TcpClient client = null;
|
||||
try
|
||||
{
|
||||
client = await _listener.AcceptTcpClientAsync(cancellationToken);
|
||||
client.NoDelay = true;
|
||||
await HandleClientAsync(client, cancellationToken);
|
||||
}
|
||||
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
catch (ObjectDisposedException) when (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
client?.Dispose();
|
||||
_trace.Warning($"WebSocket DAP bridge connection error ({ex.GetType().Name})");
|
||||
_trace.Error(ex);
|
||||
}
|
||||
}
|
||||
|
||||
_trace.Info("WebSocket DAP bridge accept loop ended");
|
||||
}
|
||||
|
||||
private async Task HandleClientAsync(TcpClient incomingClient, CancellationToken cancellationToken)
|
||||
{
|
||||
using (incomingClient)
|
||||
using (var incomingStream = incomingClient.GetStream())
|
||||
{
|
||||
_trace.Info($"WebSocket DAP bridge accepted client {incomingClient.Client.RemoteEndPoint}");
|
||||
|
||||
var webSocket = await AcceptWebSocketAsync(incomingStream, cancellationToken);
|
||||
if (webSocket == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
using (webSocket)
|
||||
using (var dapClient = new TcpClient())
|
||||
{
|
||||
dapClient.NoDelay = true;
|
||||
await dapClient.ConnectAsync(IPAddress.Loopback, _targetPort, cancellationToken);
|
||||
|
||||
using (var dapStream = dapClient.GetStream())
|
||||
using (var sessionCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken))
|
||||
{
|
||||
var proxyToken = sessionCts.Token;
|
||||
var wsToTcpTask = PumpWebSocketToTcpAsync(webSocket, dapStream, proxyToken);
|
||||
var tcpToWsTask = PumpTcpToWebSocketAsync(dapStream, webSocket, proxyToken);
|
||||
|
||||
var completedTask = await Task.WhenAny(wsToTcpTask, tcpToWsTask);
|
||||
sessionCts.Cancel();
|
||||
|
||||
try
|
||||
{
|
||||
await completedTask;
|
||||
}
|
||||
catch (OperationCanceledException) when (proxyToken.IsCancellationRequested)
|
||||
{
|
||||
// expected during shutdown
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await Task.WhenAll(wsToTcpTask, tcpToWsTask);
|
||||
}
|
||||
catch (OperationCanceledException) when (proxyToken.IsCancellationRequested)
|
||||
{
|
||||
// expected during shutdown
|
||||
}
|
||||
catch (IOException)
|
||||
{
|
||||
// peer disconnected while unwinding
|
||||
}
|
||||
catch (WebSocketException)
|
||||
{
|
||||
// peer disconnected while unwinding
|
||||
}
|
||||
}
|
||||
|
||||
await CloseWebSocketAsync(webSocket);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<WebSocket> AcceptWebSocketAsync(NetworkStream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
var initialBytes = await ReadInitialBytesAsync(stream, cancellationToken);
|
||||
if (initialBytes == null || initialBytes.Length == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var prefixKind = ClassifyIncomingStreamPrefix(initialBytes);
|
||||
if (prefixKind == IncomingStreamPrefixKind.PreUpgradedWebSocket)
|
||||
{
|
||||
_trace.Info($"Treating incoming tunnel stream as an already-upgraded websocket connection ({DescribeInitialBytes(initialBytes)})");
|
||||
return WebSocket.CreateFromStream(
|
||||
new ReplayableStream(stream, initialBytes),
|
||||
isServer: true,
|
||||
subProtocol: null,
|
||||
keepAliveInterval: _keepAliveInterval);
|
||||
}
|
||||
|
||||
if (prefixKind != IncomingStreamPrefixKind.HttpWebSocketUpgrade)
|
||||
{
|
||||
_trace.Warning($"Unsupported debugger tunnel stream prefix ({prefixKind}): {DescribeInitialBytes(initialBytes)}");
|
||||
return null;
|
||||
}
|
||||
|
||||
var handshakeStream = new ReplayableStream(stream, initialBytes);
|
||||
var requestLine = await ReadLineAsync(handshakeStream, cancellationToken);
|
||||
if (string.IsNullOrEmpty(requestLine))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var headers = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
while (true)
|
||||
{
|
||||
var line = await ReadLineAsync(handshakeStream, cancellationToken);
|
||||
if (line == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (line.Length == 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var separatorIndex = line.IndexOf(':');
|
||||
if (separatorIndex <= 0)
|
||||
{
|
||||
await WriteHttpErrorAsync(stream, HttpStatusCode.BadRequest, "Invalid HTTP header.", cancellationToken);
|
||||
return null;
|
||||
}
|
||||
|
||||
var headerName = line.Substring(0, separatorIndex).Trim();
|
||||
var headerValue = line.Substring(separatorIndex + 1).Trim();
|
||||
|
||||
if (headers.TryGetValue(headerName, out var existingValue))
|
||||
{
|
||||
headers[headerName] = $"{existingValue}, {headerValue}";
|
||||
}
|
||||
else
|
||||
{
|
||||
headers[headerName] = headerValue;
|
||||
}
|
||||
}
|
||||
|
||||
if (!IsValidWebSocketRequest(requestLine, headers))
|
||||
{
|
||||
_trace.Info($"Rejected non-websocket request: {requestLine}");
|
||||
await WriteHttpErrorAsync(stream, HttpStatusCode.BadRequest, "Expected a websocket upgrade request.", cancellationToken);
|
||||
return null;
|
||||
}
|
||||
|
||||
var webSocketKey = headers["Sec-WebSocket-Key"];
|
||||
var acceptValue = ComputeAcceptValue(webSocketKey);
|
||||
var responseBytes = Encoding.ASCII.GetBytes(
|
||||
"HTTP/1.1 101 Switching Protocols\r\n" +
|
||||
"Connection: Upgrade\r\n" +
|
||||
"Upgrade: websocket\r\n" +
|
||||
$"Sec-WebSocket-Accept: {acceptValue}\r\n" +
|
||||
"\r\n");
|
||||
|
||||
await handshakeStream.WriteAsync(responseBytes, 0, responseBytes.Length, cancellationToken);
|
||||
await handshakeStream.FlushAsync(cancellationToken);
|
||||
|
||||
_trace.Info("WebSocket DAP bridge completed websocket handshake");
|
||||
return WebSocket.CreateFromStream(handshakeStream, isServer: true, subProtocol: null, keepAliveInterval: _keepAliveInterval);
|
||||
}
|
||||
|
||||
private async Task PumpWebSocketToTcpAsync(WebSocket source, NetworkStream destination, CancellationToken cancellationToken)
|
||||
{
|
||||
var buffer = new byte[_bufferSize];
|
||||
|
||||
while (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
using (var messageStream = new MemoryStream())
|
||||
{
|
||||
WebSocketReceiveResult result;
|
||||
do
|
||||
{
|
||||
result = await source.ReceiveAsync(new ArraySegment<byte>(buffer), cancellationToken);
|
||||
if (result.MessageType == WebSocketMessageType.Close)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.MessageType != WebSocketMessageType.Binary &&
|
||||
result.MessageType != WebSocketMessageType.Text)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if (result.Count > 0)
|
||||
{
|
||||
if (messageStream.Length + result.Count > MaxInboundMessageSize)
|
||||
{
|
||||
_trace.Warning($"WebSocket message exceeds maximum allowed size of {MaxInboundMessageSize} bytes, closing connection");
|
||||
await source.CloseAsync(
|
||||
WebSocketCloseStatus.MessageTooBig,
|
||||
$"Message exceeds {MaxInboundMessageSize} byte limit",
|
||||
CancellationToken.None);
|
||||
return;
|
||||
}
|
||||
|
||||
messageStream.Write(buffer, 0, result.Count);
|
||||
}
|
||||
}
|
||||
while (!result.EndOfMessage);
|
||||
|
||||
if (result.MessageType != WebSocketMessageType.Binary &&
|
||||
result.MessageType != WebSocketMessageType.Text)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var messageBytes = messageStream.ToArray();
|
||||
if (messageBytes.Length == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var contentLengthHeader = Encoding.ASCII.GetBytes($"Content-Length: {messageBytes.Length}\r\n\r\n");
|
||||
await destination.WriteAsync(contentLengthHeader, 0, contentLengthHeader.Length, cancellationToken);
|
||||
await destination.WriteAsync(messageBytes, 0, messageBytes.Length, cancellationToken);
|
||||
await destination.FlushAsync(cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task PumpTcpToWebSocketAsync(NetworkStream source, WebSocket destination, CancellationToken cancellationToken)
|
||||
{
|
||||
var readBuffer = new byte[_bufferSize];
|
||||
var dapBuffer = new List<byte>();
|
||||
|
||||
while (!cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
var bytesRead = await source.ReadAsync(readBuffer, 0, readBuffer.Length, cancellationToken);
|
||||
if (bytesRead == 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
for (int i = 0; i < bytesRead; i++)
|
||||
{
|
||||
dapBuffer.Add(readBuffer[i]);
|
||||
}
|
||||
|
||||
while (TryParseDapMessage(dapBuffer, out var messageBody))
|
||||
{
|
||||
await destination.SendAsync(
|
||||
new ArraySegment<byte>(messageBody),
|
||||
WebSocketMessageType.Text,
|
||||
endOfMessage: true,
|
||||
cancellationToken);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static bool TryParseDapMessage(List<byte> buffer, out byte[] messageBody)
|
||||
{
|
||||
messageBody = null;
|
||||
|
||||
var headerEndMarker = new byte[] { (byte)'\r', (byte)'\n', (byte)'\r', (byte)'\n' };
|
||||
var headerEndIndex = FindSequence(buffer, headerEndMarker);
|
||||
if (headerEndIndex == -1)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var headerBytes = buffer.GetRange(0, headerEndIndex).ToArray();
|
||||
var headerText = Encoding.ASCII.GetString(headerBytes);
|
||||
|
||||
var contentLength = -1;
|
||||
foreach (var line in headerText.Split(new[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries))
|
||||
{
|
||||
if (line.StartsWith("Content-Length:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var valueStart = line.IndexOf(':') + 1;
|
||||
if (int.TryParse(line.Substring(valueStart).Trim(), out var parsedLength))
|
||||
{
|
||||
contentLength = parsedLength;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (contentLength < 0)
|
||||
{
|
||||
buffer.RemoveRange(0, headerEndIndex + 4);
|
||||
return false;
|
||||
}
|
||||
|
||||
var messageStart = headerEndIndex + 4;
|
||||
var messageEnd = messageStart + contentLength;
|
||||
|
||||
if (buffer.Count < messageEnd)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
messageBody = buffer.GetRange(messageStart, contentLength).ToArray();
|
||||
buffer.RemoveRange(0, messageEnd);
|
||||
return true;
|
||||
}
|
||||
|
||||
private static int FindSequence(List<byte> buffer, byte[] sequence)
|
||||
{
|
||||
if (buffer.Count < sequence.Length)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
for (int i = 0; i <= buffer.Count - sequence.Length; i++)
|
||||
{
|
||||
var match = true;
|
||||
for (int j = 0; j < sequence.Length; j++)
|
||||
{
|
||||
if (buffer[i + j] != sequence[j])
|
||||
{
|
||||
match = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (match)
|
||||
{
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
private static bool IsValidWebSocketRequest(string requestLine, IDictionary<string, string> headers)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(requestLine))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var requestLineParts = requestLine.Split(' ');
|
||||
if (requestLineParts.Length < 3 || !string.Equals(requestLineParts[0], "GET", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return HeaderContainsToken(headers, "Connection", "Upgrade") &&
|
||||
HeaderContainsToken(headers, "Upgrade", "websocket") &&
|
||||
headers.ContainsKey("Sec-WebSocket-Key");
|
||||
}
|
||||
|
||||
private static bool HeaderContainsToken(IDictionary<string, string> headers, string headerName, string expectedToken)
|
||||
{
|
||||
if (!headers.TryGetValue(headerName, out var headerValue) || string.IsNullOrWhiteSpace(headerValue))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return headerValue
|
||||
.Split(',')
|
||||
.Select(token => token.Trim())
|
||||
.Any(token => string.Equals(token, expectedToken, StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
private static string ComputeAcceptValue(string webSocketKey)
|
||||
{
|
||||
using (var sha1 = SHA1.Create())
|
||||
{
|
||||
var inputBytes = Encoding.ASCII.GetBytes($"{webSocketKey}{_webSocketAcceptMagic}");
|
||||
var hashBytes = sha1.ComputeHash(inputBytes);
|
||||
return Convert.ToBase64String(hashBytes);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<string> ReadLineAsync(Stream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
var lineBuilder = new StringBuilder();
|
||||
var buffer = new byte[1];
|
||||
var previousWasCarriageReturn = false;
|
||||
|
||||
while (true)
|
||||
{
|
||||
var bytesRead = await stream.ReadAsync(buffer, 0, 1, cancellationToken);
|
||||
if (bytesRead == 0)
|
||||
{
|
||||
return lineBuilder.Length > 0 ? lineBuilder.ToString() : null;
|
||||
}
|
||||
|
||||
var currentChar = (char)buffer[0];
|
||||
if (currentChar == '\n' && previousWasCarriageReturn)
|
||||
{
|
||||
if (lineBuilder.Length > 0 && lineBuilder[lineBuilder.Length - 1] == '\r')
|
||||
{
|
||||
lineBuilder.Length--;
|
||||
}
|
||||
|
||||
return lineBuilder.ToString();
|
||||
}
|
||||
|
||||
previousWasCarriageReturn = currentChar == '\r';
|
||||
lineBuilder.Append(currentChar);
|
||||
|
||||
if (lineBuilder.Length > _maxHeaderLineLength)
|
||||
{
|
||||
throw new InvalidDataException($"HTTP header line exceeds maximum length of {_maxHeaderLineLength}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<byte[]> ReadInitialBytesAsync(NetworkStream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
var buffer = new byte[4];
|
||||
var totalRead = 0;
|
||||
|
||||
while (totalRead < buffer.Length)
|
||||
{
|
||||
var bytesRead = await stream.ReadAsync(buffer, totalRead, buffer.Length - totalRead, cancellationToken);
|
||||
if (bytesRead == 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
totalRead += bytesRead;
|
||||
}
|
||||
|
||||
if (totalRead == 0)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
if (totalRead == buffer.Length)
|
||||
{
|
||||
return buffer;
|
||||
}
|
||||
|
||||
var initialBytes = new byte[totalRead];
|
||||
Array.Copy(buffer, initialBytes, totalRead);
|
||||
return initialBytes;
|
||||
}
|
||||
|
||||
internal static IncomingStreamPrefixKind ClassifyIncomingStreamPrefix(byte[] initialBytes)
|
||||
{
|
||||
if (LooksLikeHttpUpgrade(initialBytes))
|
||||
{
|
||||
return IncomingStreamPrefixKind.HttpWebSocketUpgrade;
|
||||
}
|
||||
|
||||
if (LooksLikeHttp2Preface(initialBytes))
|
||||
{
|
||||
return IncomingStreamPrefixKind.Http2Preface;
|
||||
}
|
||||
|
||||
if (LooksLikeTlsClientHello(initialBytes))
|
||||
{
|
||||
return IncomingStreamPrefixKind.TlsClientHello;
|
||||
}
|
||||
|
||||
if (LooksLikeWebSocketFramePrefix(initialBytes, requireReservedBitsClear: false))
|
||||
{
|
||||
return HasReservedBitsSet(initialBytes[0])
|
||||
? IncomingStreamPrefixKind.WebSocketReservedBits
|
||||
: IncomingStreamPrefixKind.PreUpgradedWebSocket;
|
||||
}
|
||||
|
||||
return IncomingStreamPrefixKind.Unknown;
|
||||
}
|
||||
|
||||
internal static string DescribeInitialBytes(byte[] initialBytes)
|
||||
{
|
||||
if (initialBytes == null || initialBytes.Length == 0)
|
||||
{
|
||||
return "no bytes read";
|
||||
}
|
||||
|
||||
var hex = BitConverter.ToString(initialBytes);
|
||||
var ascii = new string(initialBytes.Select(value => value >= 32 && value <= 126 ? (char)value : '.').ToArray());
|
||||
return $"hex={hex}, ascii=\"{ascii}\"";
|
||||
}
|
||||
|
||||
private static bool LooksLikeHttpUpgrade(byte[] initialBytes)
|
||||
{
|
||||
if (initialBytes == null || initialBytes.Length < 4)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return initialBytes[0] == (byte)'G' &&
|
||||
initialBytes[1] == (byte)'E' &&
|
||||
initialBytes[2] == (byte)'T' &&
|
||||
initialBytes[3] == (byte)' ';
|
||||
}
|
||||
|
||||
private static bool LooksLikeHttp2Preface(byte[] initialBytes)
|
||||
{
|
||||
if (initialBytes == null || initialBytes.Length < 4)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return initialBytes[0] == (byte)'P' &&
|
||||
initialBytes[1] == (byte)'R' &&
|
||||
initialBytes[2] == (byte)'I' &&
|
||||
initialBytes[3] == (byte)' ';
|
||||
}
|
||||
|
||||
private static bool LooksLikeTlsClientHello(byte[] initialBytes)
|
||||
{
|
||||
if (initialBytes == null || initialBytes.Length < 3)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return initialBytes[0] == 0x16 &&
|
||||
initialBytes[1] == 0x03 &&
|
||||
initialBytes[2] >= 0x00 &&
|
||||
initialBytes[2] <= 0x04;
|
||||
}
|
||||
|
||||
private static bool LooksLikeWebSocketFramePrefix(byte[] initialBytes, bool requireReservedBitsClear)
|
||||
{
|
||||
if (initialBytes == null || initialBytes.Length < 2)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var firstByte = initialBytes[0];
|
||||
var secondByte = initialBytes[1];
|
||||
var opcode = firstByte & 0x0F;
|
||||
var isMasked = (secondByte & 0x80) != 0;
|
||||
|
||||
if (!isMasked || !IsSupportedWebSocketOpcode(opcode))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return !requireReservedBitsClear || !HasReservedBitsSet(firstByte);
|
||||
}
|
||||
|
||||
private static bool HasReservedBitsSet(byte firstByte)
|
||||
{
|
||||
return (firstByte & 0x70) != 0;
|
||||
}
|
||||
|
||||
private static bool IsSupportedWebSocketOpcode(int opcode)
|
||||
{
|
||||
switch (opcode)
|
||||
{
|
||||
case 0x0:
|
||||
case 0x1:
|
||||
case 0x2:
|
||||
case 0x8:
|
||||
case 0x9:
|
||||
case 0xA:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task WriteHttpErrorAsync(
|
||||
NetworkStream stream,
|
||||
HttpStatusCode statusCode,
|
||||
string message,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var bodyBytes = Encoding.UTF8.GetBytes(message);
|
||||
var responseBytes = Encoding.ASCII.GetBytes(
|
||||
$"HTTP/1.1 {(int)statusCode} {statusCode}\r\n" +
|
||||
"Connection: close\r\n" +
|
||||
"Content-Type: text/plain; charset=utf-8\r\n" +
|
||||
$"Content-Length: {bodyBytes.Length}\r\n" +
|
||||
"Sec-WebSocket-Version: 13\r\n" +
|
||||
"\r\n");
|
||||
|
||||
await stream.WriteAsync(responseBytes, 0, responseBytes.Length, cancellationToken);
|
||||
await stream.WriteAsync(bodyBytes, 0, bodyBytes.Length, cancellationToken);
|
||||
await stream.FlushAsync(cancellationToken);
|
||||
}
|
||||
|
||||
private static async Task CloseWebSocketAsync(WebSocket webSocket)
|
||||
{
|
||||
if (webSocket == null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (webSocket.State != WebSocketState.Open &&
|
||||
webSocket.State != WebSocketState.CloseReceived)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
using var cts = new CancellationTokenSource(_closeTimeout);
|
||||
await webSocket.CloseAsync(WebSocketCloseStatus.NormalClosure, string.Empty, cts.Token);
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
// Graceful close timed out, abort the connection.
|
||||
webSocket.Abort();
|
||||
}
|
||||
catch (WebSocketException)
|
||||
{
|
||||
// Peer already disconnected.
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class ReplayableStream : Stream
|
||||
{
|
||||
private readonly Stream _innerStream;
|
||||
private readonly byte[] _prefixBytes;
|
||||
private int _prefixOffset;
|
||||
|
||||
public ReplayableStream(Stream innerStream, byte[] prefixBytes)
|
||||
{
|
||||
_innerStream = innerStream ?? throw new ArgumentNullException(nameof(innerStream));
|
||||
_prefixBytes = prefixBytes ?? Array.Empty<byte>();
|
||||
}
|
||||
|
||||
public override bool CanRead => _innerStream.CanRead;
|
||||
public override bool CanSeek => false;
|
||||
public override bool CanWrite => _innerStream.CanWrite;
|
||||
public override long Length => throw new NotSupportedException();
|
||||
|
||||
public override long Position
|
||||
{
|
||||
get => throw new NotSupportedException();
|
||||
set => throw new NotSupportedException();
|
||||
}
|
||||
|
||||
public override void Flush() => _innerStream.Flush();
|
||||
|
||||
public override Task FlushAsync(CancellationToken cancellationToken) => _innerStream.FlushAsync(cancellationToken);
|
||||
|
||||
public override int Read(byte[] buffer, int offset, int count)
|
||||
{
|
||||
if (TryReadPrefix(buffer, offset, count, out var bytesRead))
|
||||
{
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
return _innerStream.Read(buffer, offset, count);
|
||||
}
|
||||
|
||||
public override async Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
|
||||
{
|
||||
if (TryReadPrefix(buffer, offset, count, out var bytesRead))
|
||||
{
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
return await _innerStream.ReadAsync(buffer, offset, count, cancellationToken);
|
||||
}
|
||||
|
||||
public override async ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_prefixOffset < _prefixBytes.Length)
|
||||
{
|
||||
var bytesToCopy = Math.Min(buffer.Length, _prefixBytes.Length - _prefixOffset);
|
||||
new ReadOnlySpan<byte>(_prefixBytes, _prefixOffset, bytesToCopy).CopyTo(buffer.Span);
|
||||
_prefixOffset += bytesToCopy;
|
||||
return bytesToCopy;
|
||||
}
|
||||
|
||||
return await _innerStream.ReadAsync(buffer, cancellationToken);
|
||||
}
|
||||
|
||||
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
|
||||
|
||||
public override void SetLength(long value) => throw new NotSupportedException();
|
||||
|
||||
public override void Write(byte[] buffer, int offset, int count) => _innerStream.Write(buffer, offset, count);
|
||||
|
||||
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) =>
|
||||
_innerStream.WriteAsync(buffer, offset, count, cancellationToken);
|
||||
|
||||
public override ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = default) =>
|
||||
_innerStream.WriteAsync(buffer, cancellationToken);
|
||||
|
||||
private bool TryReadPrefix(byte[] buffer, int offset, int count, out int bytesRead)
|
||||
{
|
||||
if (_prefixOffset >= _prefixBytes.Length)
|
||||
{
|
||||
bytesRead = 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
bytesRead = Math.Min(count, _prefixBytes.Length - _prefixOffset);
|
||||
Array.Copy(_prefixBytes, _prefixOffset, buffer, offset, bytesRead);
|
||||
_prefixOffset += bytesRead;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -854,6 +854,12 @@ namespace GitHub.Runner.Worker
|
||||
// Track Node.js 20 actions for deprecation warning
|
||||
Global.DeprecatedNode20Actions = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Track actions upgraded from Node.js 20 to Node.js 24
|
||||
Global.UpgradedToNode24Actions = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Track actions stuck on Node.js 20 due to ARM32 (separate from general deprecation)
|
||||
Global.Arm32Node20Actions = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Job Outputs
|
||||
JobOutputs = new Dictionary<string, VariableValue>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
@@ -963,6 +969,9 @@ namespace GitHub.Runner.Worker
|
||||
// Verbosity (from GitHub.Step_Debug).
|
||||
Global.WriteDebug = Global.Variables.Step_Debug ?? false;
|
||||
|
||||
// Debugger enabled flag (from acquire response).
|
||||
Global.Debugger = new Dap.DebuggerConfig(message.EnableDebugger, message.DebuggerTunnel);
|
||||
|
||||
// Hook up JobServerQueueThrottling event, we will log warning on server tarpit.
|
||||
_jobServerQueue.JobServerQueueThrottling += JobServerQueueThrottling_EventReceived;
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ using GitHub.Actions.RunService.WebApi;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Worker.Container;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using Sdk.RSWebApi.Contracts;
|
||||
|
||||
@@ -27,6 +28,7 @@ namespace GitHub.Runner.Worker
|
||||
public StepsContext StepsContext { get; set; }
|
||||
public Variables Variables { get; set; }
|
||||
public bool WriteDebug { get; set; }
|
||||
public DebuggerConfig Debugger { get; set; }
|
||||
public string InfrastructureFailureCategory { get; set; }
|
||||
public JObject ContainerHookState { get; set; }
|
||||
public bool HasTemplateEvaluatorMismatch { get; set; }
|
||||
@@ -34,5 +36,7 @@ namespace GitHub.Runner.Worker
|
||||
public bool HasDeprecatedSetOutput { get; set; }
|
||||
public bool HasDeprecatedSaveState { get; set; }
|
||||
public HashSet<string> DeprecatedNode20Actions { get; set; }
|
||||
public HashSet<string> UpgradedToNode24Actions { get; set; }
|
||||
public HashSet<string> Arm32Node20Actions { get; set; }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,6 +25,14 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
|
||||
public sealed class HandlerFactory : RunnerService, IHandlerFactory
|
||||
{
|
||||
internal static bool ShouldTrackAsArm32Node20(bool deprecateArm32, string preferredNodeVersion, string finalNodeVersion, string platformWarningMessage)
|
||||
{
|
||||
return deprecateArm32 &&
|
||||
!string.IsNullOrEmpty(platformWarningMessage) &&
|
||||
string.Equals(preferredNodeVersion, Constants.Runner.NodeMigration.Node24, StringComparison.OrdinalIgnoreCase) &&
|
||||
string.Equals(finalNodeVersion, Constants.Runner.NodeMigration.Node20, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
public IHandler Create(
|
||||
IExecutionContext executionContext,
|
||||
Pipelines.ActionStepDefinitionReference action,
|
||||
@@ -65,19 +73,12 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
nodeData.NodeVersion = Common.Constants.Runner.NodeMigration.Node20;
|
||||
}
|
||||
|
||||
// Track Node.js 20 actions for deprecation annotation
|
||||
if (string.Equals(nodeData.NodeVersion, Constants.Runner.NodeMigration.Node20, StringComparison.InvariantCultureIgnoreCase))
|
||||
{
|
||||
bool warnOnNode20 = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.WarnOnNode20Flag) ?? false;
|
||||
if (warnOnNode20)
|
||||
{
|
||||
string actionName = GetActionName(action);
|
||||
if (!string.IsNullOrEmpty(actionName))
|
||||
{
|
||||
executionContext.Global.DeprecatedNode20Actions?.Add(actionName);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Read flags early; actionName is also resolved up front for tracking after version is determined
|
||||
bool warnOnNode20 = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.WarnOnNode20Flag) ?? false;
|
||||
bool deprecateArm32 = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.DeprecateLinuxArm32Flag) ?? false;
|
||||
bool killArm32 = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.KillLinuxArm32Flag) ?? false;
|
||||
string node20RemovalDate = executionContext.Global.Variables?.Get(Constants.Runner.NodeMigration.Node20RemovalDateVariable);
|
||||
string actionName = GetActionName(action);
|
||||
|
||||
// Check if node20 was explicitly specified in the action
|
||||
// We don't modify if node24 was explicitly specified
|
||||
@@ -87,7 +88,15 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
bool requireNode24 = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.RequireNode24Flag) ?? false;
|
||||
|
||||
var (nodeVersion, configWarningMessage) = NodeUtil.DetermineActionsNodeVersion(environment, useNode24ByDefault, requireNode24);
|
||||
var (finalNodeVersion, platformWarningMessage) = NodeUtil.CheckNodeVersionForLinuxArm32(nodeVersion);
|
||||
var (finalNodeVersion, platformWarningMessage) = NodeUtil.CheckNodeVersionForLinuxArm32(nodeVersion, deprecateArm32, killArm32, node20RemovalDate);
|
||||
|
||||
// ARM32 kill switch: fail the step
|
||||
if (finalNodeVersion == null)
|
||||
{
|
||||
executionContext.Error(platformWarningMessage);
|
||||
throw new InvalidOperationException(platformWarningMessage);
|
||||
}
|
||||
|
||||
nodeData.NodeVersion = finalNodeVersion;
|
||||
|
||||
if (!string.IsNullOrEmpty(configWarningMessage))
|
||||
@@ -100,6 +109,26 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
executionContext.Warning(platformWarningMessage);
|
||||
}
|
||||
|
||||
// Track actions based on their final node version
|
||||
if (!string.IsNullOrEmpty(actionName))
|
||||
{
|
||||
if (string.Equals(finalNodeVersion, Constants.Runner.NodeMigration.Node24, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
// Action was upgraded from node20 to node24
|
||||
executionContext.Global.UpgradedToNode24Actions?.Add(actionName);
|
||||
}
|
||||
else if (ShouldTrackAsArm32Node20(deprecateArm32, nodeVersion, finalNodeVersion, platformWarningMessage))
|
||||
{
|
||||
// Action is on node20 because ARM32 can't run node24
|
||||
executionContext.Global.Arm32Node20Actions?.Add(actionName);
|
||||
}
|
||||
else if (warnOnNode20)
|
||||
{
|
||||
// Action is still running on node20 (general case)
|
||||
executionContext.Global.DeprecatedNode20Actions?.Add(actionName);
|
||||
}
|
||||
}
|
||||
|
||||
// Show information about Node 24 migration in Phase 2
|
||||
if (useNode24ByDefault && !requireNode24 && string.Equals(finalNodeVersion, Constants.Runner.NodeMigration.Node24, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
@@ -109,6 +138,30 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
executionContext.Output(infoMessage);
|
||||
}
|
||||
}
|
||||
else if (string.Equals(nodeData.NodeVersion, Constants.Runner.NodeMigration.Node24, StringComparison.InvariantCultureIgnoreCase))
|
||||
{
|
||||
var (finalNodeVersion, platformWarningMessage) = NodeUtil.CheckNodeVersionForLinuxArm32(nodeData.NodeVersion, deprecateArm32, killArm32, node20RemovalDate);
|
||||
|
||||
// ARM32 kill switch: fail the step
|
||||
if (finalNodeVersion == null)
|
||||
{
|
||||
executionContext.Error(platformWarningMessage);
|
||||
throw new InvalidOperationException(platformWarningMessage);
|
||||
}
|
||||
|
||||
var preferredVersion = nodeData.NodeVersion;
|
||||
nodeData.NodeVersion = finalNodeVersion;
|
||||
|
||||
if (!string.IsNullOrEmpty(platformWarningMessage))
|
||||
{
|
||||
executionContext.Warning(platformWarningMessage);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(actionName) && ShouldTrackAsArm32Node20(deprecateArm32, preferredVersion, finalNodeVersion, platformWarningMessage))
|
||||
{
|
||||
executionContext.Global.Arm32Node20Actions?.Add(actionName);
|
||||
}
|
||||
}
|
||||
|
||||
(handler as INodeScriptActionHandler).Data = nodeData;
|
||||
}
|
||||
|
||||
@@ -58,13 +58,23 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
|
||||
public Task<string> DetermineNodeRuntimeVersion(IExecutionContext executionContext, string preferredVersion)
|
||||
{
|
||||
// Use NodeUtil to check if Node24 is requested but we're on ARM32 Linux
|
||||
var (nodeVersion, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion);
|
||||
bool deprecateArm32 = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.DeprecateLinuxArm32Flag) ?? false;
|
||||
bool killArm32 = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.KillLinuxArm32Flag) ?? false;
|
||||
string node20RemovalDate = executionContext.Global.Variables?.Get(Constants.Runner.NodeMigration.Node20RemovalDateVariable);
|
||||
|
||||
var (nodeVersion, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion, deprecateArm32, killArm32, node20RemovalDate);
|
||||
|
||||
if (nodeVersion == null)
|
||||
{
|
||||
executionContext.Error(warningMessage);
|
||||
throw new InvalidOperationException(warningMessage);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(warningMessage))
|
||||
{
|
||||
executionContext.Warning(warningMessage);
|
||||
}
|
||||
|
||||
|
||||
return Task.FromResult(nodeVersion);
|
||||
}
|
||||
|
||||
@@ -142,8 +152,18 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
|
||||
public async Task<string> DetermineNodeRuntimeVersion(IExecutionContext executionContext, string preferredVersion)
|
||||
{
|
||||
// Use NodeUtil to check if Node24 is requested but we're on ARM32 Linux
|
||||
var (nodeExternal, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion);
|
||||
bool deprecateArm32 = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.DeprecateLinuxArm32Flag) ?? false;
|
||||
bool killArm32 = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.KillLinuxArm32Flag) ?? false;
|
||||
string node20RemovalDate = executionContext.Global.Variables?.Get(Constants.Runner.NodeMigration.Node20RemovalDateVariable);
|
||||
|
||||
var (nodeExternal, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion, deprecateArm32, killArm32, node20RemovalDate);
|
||||
|
||||
if (nodeExternal == null)
|
||||
{
|
||||
executionContext.Error(warningMessage);
|
||||
throw new InvalidOperationException(warningMessage);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(warningMessage))
|
||||
{
|
||||
executionContext.Warning(warningMessage);
|
||||
@@ -273,8 +293,18 @@ namespace GitHub.Runner.Worker.Handlers
|
||||
|
||||
private string CheckPlatformForAlpineContainer(IExecutionContext executionContext, string preferredVersion)
|
||||
{
|
||||
// Use NodeUtil to check if Node24 is requested but we're on ARM32 Linux
|
||||
var (nodeExternal, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion);
|
||||
bool deprecateArm32 = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.DeprecateLinuxArm32Flag) ?? false;
|
||||
bool killArm32 = executionContext.Global.Variables?.GetBoolean(Constants.Runner.NodeMigration.KillLinuxArm32Flag) ?? false;
|
||||
string node20RemovalDate = executionContext.Global.Variables?.Get(Constants.Runner.NodeMigration.Node20RemovalDateVariable);
|
||||
|
||||
var (nodeExternal, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion, deprecateArm32, killArm32, node20RemovalDate);
|
||||
|
||||
if (nodeExternal == null)
|
||||
{
|
||||
executionContext.Error(warningMessage);
|
||||
throw new InvalidOperationException(warningMessage);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(warningMessage))
|
||||
{
|
||||
executionContext.Warning(warningMessage);
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("Test")]
|
||||
[assembly: InternalsVisibleTo("DynamicProxyGenAssembly2")]
|
||||
|
||||
@@ -736,14 +736,38 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
}
|
||||
|
||||
// Add deprecation warning annotation for Node.js 20 actions
|
||||
// Read dates from server variables with hardcoded fallbacks
|
||||
var node24DefaultDateRaw = context.Global.Variables?.Get(Constants.Runner.NodeMigration.Node24DefaultDateVariable);
|
||||
var node24DefaultDate = string.IsNullOrEmpty(node24DefaultDateRaw) ? Constants.Runner.NodeMigration.Node24DefaultDate : node24DefaultDateRaw;
|
||||
var node20RemovalDateRaw = context.Global.Variables?.Get(Constants.Runner.NodeMigration.Node20RemovalDateVariable);
|
||||
var node20RemovalDate = string.IsNullOrEmpty(node20RemovalDateRaw) ? Constants.Runner.NodeMigration.Node20RemovalDate : node20RemovalDateRaw;
|
||||
|
||||
// Add deprecation warning annotation for Node.js 20 actions (Phase 1 - actions still running on node20)
|
||||
if (context.Global.DeprecatedNode20Actions?.Count > 0)
|
||||
{
|
||||
var sortedActions = context.Global.DeprecatedNode20Actions.OrderBy(a => a, StringComparer.OrdinalIgnoreCase);
|
||||
var actionsList = string.Join(", ", sortedActions);
|
||||
var deprecationMessage = $"Node.js 20 actions are deprecated. The following actions are running on Node.js 20 and may not work as expected: {actionsList}. Actions will be forced to run with Node.js 24 by default starting June 2nd, 2026. Please check if updated versions of these actions are available that support Node.js 24. To opt into Node.js 24 now, set the FORCE_JAVASCRIPT_ACTIONS_TO_NODE24=true environment variable on the runner or in your workflow file. Once Node.js 24 becomes the default, you can temporarily opt out by setting ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION=true. For more information see: {Constants.Runner.NodeMigration.Node20DeprecationUrl}";
|
||||
var deprecationMessage = $"Node.js 20 actions are deprecated. The following actions are running on Node.js 20 and may not work as expected: {actionsList}. Actions will be forced to run with Node.js 24 by default starting {node24DefaultDate}. Node.js 20 will be removed from the runner on {node20RemovalDate}. Please check if updated versions of these actions are available that support Node.js 24. To opt into Node.js 24 now, set the FORCE_JAVASCRIPT_ACTIONS_TO_NODE24=true environment variable on the runner or in your workflow file. Once Node.js 24 becomes the default, you can temporarily opt out by setting ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION=true. For more information see: {Constants.Runner.NodeMigration.Node20DeprecationUrl}";
|
||||
context.Warning(deprecationMessage);
|
||||
}
|
||||
|
||||
// Add annotation for actions upgraded from Node.js 20 to Node.js 24 (Phase 2/3)
|
||||
if (context.Global.UpgradedToNode24Actions?.Count > 0)
|
||||
{
|
||||
var sortedActions = context.Global.UpgradedToNode24Actions.OrderBy(a => a, StringComparer.OrdinalIgnoreCase);
|
||||
var actionsList = string.Join(", ", sortedActions);
|
||||
var upgradeMessage = $"Node.js 20 is deprecated. The following actions target Node.js 20 but are being forced to run on Node.js 24: {actionsList}. For more information see: {Constants.Runner.NodeMigration.Node20DeprecationUrl}";
|
||||
context.Warning(upgradeMessage);
|
||||
}
|
||||
|
||||
// Add annotation for ARM32 actions stuck on Node.js 20 (ARM32 can't run node24)
|
||||
if (context.Global.Arm32Node20Actions?.Count > 0)
|
||||
{
|
||||
var sortedActions = context.Global.Arm32Node20Actions.OrderBy(a => a, StringComparer.OrdinalIgnoreCase);
|
||||
var actionsList = string.Join(", ", sortedActions);
|
||||
var arm32Message = $"The following actions are running on Node.js 20 because Node.js 24 is not available on Linux ARM32: {actionsList}. Linux ARM32 runners are deprecated and will no longer be supported after {node20RemovalDate}. Please migrate to a supported platform. For more information see: {Constants.Runner.NodeMigration.Node20DeprecationUrl}";
|
||||
context.Warning(arm32Message);
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
|
||||
@@ -13,6 +13,7 @@ using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using Sdk.RSWebApi.Contracts;
|
||||
@@ -28,6 +29,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public sealed class JobRunner : RunnerService, IJobRunner
|
||||
{
|
||||
private const string DebuggerConnectionTelemetryPrefix = "DebuggerConnectionResult";
|
||||
private IJobServerQueue _jobServerQueue;
|
||||
private RunnerSettings _runnerSettings;
|
||||
private ITempDirectoryManager _tempDirectoryManager;
|
||||
@@ -112,6 +114,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
IExecutionContext jobContext = null;
|
||||
CancellationTokenRegistration? runnerShutdownRegistration = null;
|
||||
IDapDebugger dapDebugger = null;
|
||||
try
|
||||
{
|
||||
// Create the job execution context.
|
||||
@@ -178,6 +181,26 @@ namespace GitHub.Runner.Worker
|
||||
_tempDirectoryManager = HostContext.GetService<ITempDirectoryManager>();
|
||||
_tempDirectoryManager.InitializeTempDirectory(jobContext);
|
||||
|
||||
// Setup the debugger
|
||||
if (jobContext.Global.Debugger?.Enabled == true)
|
||||
{
|
||||
Trace.Info("Debugger enabled for this job run");
|
||||
|
||||
try
|
||||
{
|
||||
dapDebugger = HostContext.GetService<IDapDebugger>();
|
||||
await dapDebugger.StartAsync(jobContext);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Failed to start DAP debugger: {ex.Message}");
|
||||
AddDebuggerConnectionTelemetry(jobContext, $"Failed: {ex.Message}");
|
||||
jobContext.Error("Failed to start debugger.");
|
||||
return await CompleteJobAsync(server, jobContext, message, TaskResult.Failed);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Get the job extension.
|
||||
Trace.Info("Getting job extension.");
|
||||
IJobExtension jobExtension = HostContext.CreateService<IJobExtension>();
|
||||
@@ -219,6 +242,33 @@ namespace GitHub.Runner.Worker
|
||||
await Task.WhenAny(_jobServerQueue.JobRecordUpdated.Task, Task.Delay(1000));
|
||||
}
|
||||
|
||||
// Wait for DAP debugger client connection and handshake after "Set up job"
|
||||
// so the job page shows the setup step before we block on the debugger
|
||||
if (dapDebugger != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
await dapDebugger.WaitUntilReadyAsync();
|
||||
AddDebuggerConnectionTelemetry(jobContext, "Connected");
|
||||
}
|
||||
catch (OperationCanceledException) when (jobRequestCancellationToken.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info("Job was cancelled before debugger client connected.");
|
||||
AddDebuggerConnectionTelemetry(jobContext, "Canceled");
|
||||
jobContext.Error("Job was cancelled before debugger client connected.");
|
||||
return await CompleteJobAsync(server, jobContext, message, TaskResult.Canceled);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"DAP debugger failed to become ready: {ex.Message}");
|
||||
AddDebuggerConnectionTelemetry(jobContext, $"Failed: {ex.Message}");
|
||||
|
||||
// If debugging was requested but the debugger is not available, fail the job
|
||||
jobContext.Error("The debugger failed to start or no debugger client connected in time.");
|
||||
return await CompleteJobAsync(server, jobContext, message, TaskResult.Failed);
|
||||
}
|
||||
}
|
||||
|
||||
// Run all job steps
|
||||
Trace.Info("Run all job steps.");
|
||||
var stepsRunner = HostContext.GetService<IStepsRunner>();
|
||||
@@ -259,6 +309,11 @@ namespace GitHub.Runner.Worker
|
||||
runnerShutdownRegistration = null;
|
||||
}
|
||||
|
||||
if (dapDebugger != null)
|
||||
{
|
||||
await dapDebugger.OnJobCompletedAsync();
|
||||
}
|
||||
|
||||
await ShutdownQueue(throwOnFailure: false);
|
||||
}
|
||||
}
|
||||
@@ -440,6 +495,15 @@ namespace GitHub.Runner.Worker
|
||||
throw new AggregateException(exceptions);
|
||||
}
|
||||
|
||||
private static void AddDebuggerConnectionTelemetry(IExecutionContext jobContext, string result)
|
||||
{
|
||||
jobContext.Global.JobTelemetry.Add(new JobTelemetry
|
||||
{
|
||||
Type = JobTelemetryType.General,
|
||||
Message = $"{DebuggerConnectionTelemetryPrefix}: {result}"
|
||||
});
|
||||
}
|
||||
|
||||
private void MaskTelemetrySecrets(List<JobTelemetry> jobTelemetry)
|
||||
{
|
||||
foreach (var telemetryItem in jobTelemetry)
|
||||
|
||||
@@ -23,6 +23,7 @@
|
||||
<PackageReference Include="System.ServiceProcess.ServiceController" Version="8.0.1" />
|
||||
<PackageReference Include="System.Threading.Channels" Version="8.0.0" />
|
||||
<PackageReference Include="YamlDotNet.Signed" Version="5.3.0" />
|
||||
<PackageReference Include="Microsoft.DevTunnels.Connections" Version="1.3.16" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -10,6 +10,7 @@ using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using GitHub.Runner.Worker.Expressions;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
@@ -50,6 +51,7 @@ namespace GitHub.Runner.Worker
|
||||
jobContext.JobContext.Status = (jobContext.Result ?? TaskResult.Succeeded).ToActionResult();
|
||||
var scopeInputs = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
|
||||
bool checkPostJobActions = false;
|
||||
var dapDebugger = HostContext.GetService<IDapDebugger>();
|
||||
while (jobContext.JobSteps.Count > 0 || !checkPostJobActions)
|
||||
{
|
||||
if (jobContext.JobSteps.Count == 0 && !checkPostJobActions)
|
||||
@@ -226,9 +228,14 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else
|
||||
{
|
||||
// Pause for DAP debugger before step execution
|
||||
await dapDebugger?.OnStepStartingAsync(step);
|
||||
|
||||
// Run the step
|
||||
await RunStepAsync(step, jobContext.CancellationToken);
|
||||
CompleteStep(step);
|
||||
|
||||
dapDebugger?.OnStepCompleted(step);
|
||||
}
|
||||
}
|
||||
finally
|
||||
@@ -255,6 +262,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
Trace.Info($"Current state: job state = '{jobContext.Result}'");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private async Task RunStepAsync(IStep step, CancellationToken jobCancellationToken)
|
||||
|
||||
@@ -17,10 +17,9 @@ namespace GitHub.DistributedTask.Expressions2
|
||||
String expression,
|
||||
ITraceWriter trace,
|
||||
IEnumerable<INamedValueInfo> namedValues,
|
||||
IEnumerable<IFunctionInfo> functions,
|
||||
Boolean allowCaseFunction = true)
|
||||
IEnumerable<IFunctionInfo> functions)
|
||||
{
|
||||
var context = new ParseContext(expression, trace, namedValues, functions, allowCaseFunction: allowCaseFunction);
|
||||
var context = new ParseContext(expression, trace, namedValues, functions);
|
||||
context.Trace.Info($"Parsing expression: <{expression}>");
|
||||
return CreateTree(context);
|
||||
}
|
||||
@@ -416,12 +415,6 @@ namespace GitHub.DistributedTask.Expressions2
|
||||
String name,
|
||||
out IFunctionInfo functionInfo)
|
||||
{
|
||||
if (String.Equals(name, "case", StringComparison.OrdinalIgnoreCase) && !context.AllowCaseFunction)
|
||||
{
|
||||
functionInfo = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
return ExpressionConstants.WellKnownFunctions.TryGetValue(name, out functionInfo) ||
|
||||
context.ExtensionFunctions.TryGetValue(name, out functionInfo);
|
||||
}
|
||||
@@ -429,7 +422,6 @@ namespace GitHub.DistributedTask.Expressions2
|
||||
private sealed class ParseContext
|
||||
{
|
||||
public Boolean AllowUnknownKeywords;
|
||||
public Boolean AllowCaseFunction;
|
||||
public readonly String Expression;
|
||||
public readonly Dictionary<String, IFunctionInfo> ExtensionFunctions = new Dictionary<String, IFunctionInfo>(StringComparer.OrdinalIgnoreCase);
|
||||
public readonly Dictionary<String, INamedValueInfo> ExtensionNamedValues = new Dictionary<String, INamedValueInfo>(StringComparer.OrdinalIgnoreCase);
|
||||
@@ -445,8 +437,7 @@ namespace GitHub.DistributedTask.Expressions2
|
||||
ITraceWriter trace,
|
||||
IEnumerable<INamedValueInfo> namedValues,
|
||||
IEnumerable<IFunctionInfo> functions,
|
||||
Boolean allowUnknownKeywords = false,
|
||||
Boolean allowCaseFunction = true)
|
||||
Boolean allowUnknownKeywords = false)
|
||||
{
|
||||
Expression = expression ?? String.Empty;
|
||||
if (Expression.Length > ExpressionConstants.MaxLength)
|
||||
@@ -467,7 +458,6 @@ namespace GitHub.DistributedTask.Expressions2
|
||||
|
||||
LexicalAnalyzer = new LexicalAnalyzer(Expression);
|
||||
AllowUnknownKeywords = allowUnknownKeywords;
|
||||
AllowCaseFunction = allowCaseFunction;
|
||||
}
|
||||
|
||||
private class NoOperationTraceWriter : ITraceWriter
|
||||
|
||||
@@ -86,12 +86,6 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
|
||||
internal ITraceWriter TraceWriter { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets a value indicating whether the case expression function is allowed.
|
||||
/// Defaults to true. Set to false to disable the case function.
|
||||
/// </summary>
|
||||
internal Boolean AllowCaseFunction { get; set; } = true;
|
||||
|
||||
private IDictionary<String, Int32> FileIds
|
||||
{
|
||||
get
|
||||
|
||||
@@ -57,7 +57,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -94,7 +94,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -123,7 +123,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -152,7 +152,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
|
||||
@@ -253,6 +253,20 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool EnableDebugger
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public DebuggerTunnelInfo DebuggerTunnel
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the collection of variables associated with the current context.
|
||||
/// </summary>
|
||||
|
||||
24
src/Sdk/DTPipelines/Pipelines/DebuggerTunnelInfo.cs
Normal file
24
src/Sdk/DTPipelines/Pipelines/DebuggerTunnelInfo.cs
Normal file
@@ -0,0 +1,24 @@
|
||||
using System.Runtime.Serialization;
|
||||
|
||||
namespace GitHub.DistributedTask.Pipelines
|
||||
{
|
||||
/// <summary>
|
||||
/// Dev Tunnel information the runner needs to host the debugger tunnel.
|
||||
/// Matches the run-service <c>DebuggerTunnel</c> contract.
|
||||
/// </summary>
|
||||
[DataContract]
|
||||
public sealed class DebuggerTunnelInfo
|
||||
{
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string TunnelId { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string ClusterId { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public string HostToken { get; set; }
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public ushort Port { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -681,7 +681,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
|
||||
var node = default(ExpressionNode);
|
||||
try
|
||||
{
|
||||
node = expressionParser.CreateTree(condition, null, namedValues, functions, allowCaseFunction: context.AllowCaseFunction) as ExpressionNode;
|
||||
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
|
||||
@@ -2556,6 +2556,25 @@ namespace GitHub.DistributedTask.WebApi
|
||||
}
|
||||
}
|
||||
|
||||
[Serializable]
|
||||
public sealed class FailedToDownloadActionException : DistributedTaskException
|
||||
{
|
||||
public FailedToDownloadActionException(String message)
|
||||
: base(message)
|
||||
{
|
||||
}
|
||||
|
||||
public FailedToDownloadActionException(String message, Exception innerException)
|
||||
: base(message, innerException)
|
||||
{
|
||||
}
|
||||
|
||||
private FailedToDownloadActionException(SerializationInfo info, StreamingContext context)
|
||||
: base(info, context)
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
[Serializable]
|
||||
public sealed class InvalidActionArchiveException : DistributedTaskException
|
||||
{
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
@@ -17,10 +17,9 @@ namespace GitHub.Actions.Expressions
|
||||
String expression,
|
||||
ITraceWriter trace,
|
||||
IEnumerable<INamedValueInfo> namedValues,
|
||||
IEnumerable<IFunctionInfo> functions,
|
||||
Boolean allowCaseFunction = true)
|
||||
IEnumerable<IFunctionInfo> functions)
|
||||
{
|
||||
var context = new ParseContext(expression, trace, namedValues, functions, allowCaseFunction: allowCaseFunction);
|
||||
var context = new ParseContext(expression, trace, namedValues, functions);
|
||||
context.Trace.Info($"Parsing expression: <{expression}>");
|
||||
return CreateTree(context);
|
||||
}
|
||||
@@ -322,7 +321,7 @@ namespace GitHub.Actions.Expressions
|
||||
context.Operators.Pop();
|
||||
}
|
||||
var functionOperands = PopOperands(context, parameterCount);
|
||||
|
||||
|
||||
// Node already exists on the operand stack
|
||||
function = (Function)context.Operands.Peek();
|
||||
|
||||
@@ -416,12 +415,6 @@ namespace GitHub.Actions.Expressions
|
||||
String name,
|
||||
out IFunctionInfo functionInfo)
|
||||
{
|
||||
if (String.Equals(name, "case", StringComparison.OrdinalIgnoreCase) && !context.AllowCaseFunction)
|
||||
{
|
||||
functionInfo = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
return ExpressionConstants.WellKnownFunctions.TryGetValue(name, out functionInfo) ||
|
||||
context.ExtensionFunctions.TryGetValue(name, out functionInfo);
|
||||
}
|
||||
@@ -429,7 +422,6 @@ namespace GitHub.Actions.Expressions
|
||||
private sealed class ParseContext
|
||||
{
|
||||
public Boolean AllowUnknownKeywords;
|
||||
public Boolean AllowCaseFunction;
|
||||
public readonly String Expression;
|
||||
public readonly Dictionary<String, IFunctionInfo> ExtensionFunctions = new Dictionary<String, IFunctionInfo>(StringComparer.OrdinalIgnoreCase);
|
||||
public readonly Dictionary<String, INamedValueInfo> ExtensionNamedValues = new Dictionary<String, INamedValueInfo>(StringComparer.OrdinalIgnoreCase);
|
||||
@@ -445,8 +437,7 @@ namespace GitHub.Actions.Expressions
|
||||
ITraceWriter trace,
|
||||
IEnumerable<INamedValueInfo> namedValues,
|
||||
IEnumerable<IFunctionInfo> functions,
|
||||
Boolean allowUnknownKeywords = false,
|
||||
Boolean allowCaseFunction = true)
|
||||
Boolean allowUnknownKeywords = false)
|
||||
{
|
||||
Expression = expression ?? String.Empty;
|
||||
if (Expression.Length > ExpressionConstants.MaxLength)
|
||||
@@ -467,7 +458,6 @@ namespace GitHub.Actions.Expressions
|
||||
|
||||
LexicalAnalyzer = new LexicalAnalyzer(Expression);
|
||||
AllowUnknownKeywords = allowUnknownKeywords;
|
||||
AllowCaseFunction = allowCaseFunction;
|
||||
}
|
||||
|
||||
private class NoOperationTraceWriter : ITraceWriter
|
||||
|
||||
@@ -1828,7 +1828,7 @@ namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
var node = default(ExpressionNode);
|
||||
try
|
||||
{
|
||||
node = expressionParser.CreateTree(condition, null, namedValues, functions, allowCaseFunction: context.AllowCaseFunction) as ExpressionNode;
|
||||
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
@@ -113,12 +113,6 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating
|
||||
/// </summary>
|
||||
internal Boolean StrictJsonParsing { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets a value indicating whether the case expression function is allowed.
|
||||
/// Defaults to true. Set to false to disable the case function.
|
||||
/// </summary>
|
||||
internal Boolean AllowCaseFunction { get; set; } = true;
|
||||
|
||||
internal ITraceWriter TraceWriter { get; set; }
|
||||
|
||||
private IDictionary<String, Int32> FileIds
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
@@ -55,7 +55,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -93,7 +93,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -123,7 +123,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -153,7 +153,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -289,4 +289,4 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
using GitHub.DistributedTask.Expressions2.Sdk;
|
||||
using GitHub.DistributedTask.ObjectTemplating;
|
||||
using System;
|
||||
@@ -9,7 +9,7 @@ namespace GitHub.Runner.Common.Tests.Sdk
|
||||
{
|
||||
/// <summary>
|
||||
/// Regression tests for ExpressionParser.CreateTree to verify that
|
||||
/// allowCaseFunction does not accidentally set allowUnknownKeywords.
|
||||
/// the case function does not accidentally set allowUnknownKeywords.
|
||||
/// </summary>
|
||||
public sealed class ExpressionParserL0
|
||||
{
|
||||
@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common.Tests.Sdk
|
||||
[Trait("Category", "Sdk")]
|
||||
public void CreateTree_RejectsUnrecognizedNamedValue()
|
||||
{
|
||||
// Regression: allowCaseFunction was passed positionally into
|
||||
// Regression: the case function parameter was passed positionally into
|
||||
// the allowUnknownKeywords parameter, causing all named values
|
||||
// to be silently accepted.
|
||||
var parser = new ExpressionParser();
|
||||
@@ -52,7 +52,7 @@ namespace GitHub.Runner.Common.Tests.Sdk
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Sdk")]
|
||||
public void CreateTree_CaseFunctionWorks_WhenAllowed()
|
||||
public void CreateTree_CaseFunctionWorks()
|
||||
{
|
||||
var parser = new ExpressionParser();
|
||||
var namedValues = new List<INamedValueInfo>
|
||||
@@ -60,35 +60,17 @@ namespace GitHub.Runner.Common.Tests.Sdk
|
||||
new NamedValueInfo<ContextValueNode>("github"),
|
||||
};
|
||||
|
||||
var node = parser.CreateTree("case(github.event_name, 'push', 'Push Event')", null, namedValues, null, allowCaseFunction: true);
|
||||
var node = parser.CreateTree("case(github.event_name, 'push', 'Push Event')", null, namedValues, null);
|
||||
|
||||
Assert.NotNull(node);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Sdk")]
|
||||
public void CreateTree_CaseFunctionRejected_WhenDisallowed()
|
||||
{
|
||||
var parser = new ExpressionParser();
|
||||
var namedValues = new List<INamedValueInfo>
|
||||
{
|
||||
new NamedValueInfo<ContextValueNode>("github"),
|
||||
};
|
||||
|
||||
var ex = Assert.Throws<ParseException>(() =>
|
||||
parser.CreateTree("case(github.event_name, 'push', 'Push Event')", null, namedValues, null, allowCaseFunction: false));
|
||||
|
||||
Assert.Contains("Unrecognized function", ex.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Sdk")]
|
||||
public void CreateTree_CaseFunctionDoesNotAffectUnknownKeywords()
|
||||
{
|
||||
// The key regression test: with allowCaseFunction=true (default),
|
||||
// unrecognized named values must still be rejected.
|
||||
// The key regression test: unrecognized named values must still be rejected.
|
||||
var parser = new ExpressionParser();
|
||||
var namedValues = new List<INamedValueInfo>
|
||||
{
|
||||
@@ -96,7 +78,7 @@ namespace GitHub.Runner.Common.Tests.Sdk
|
||||
};
|
||||
|
||||
var ex = Assert.Throws<ParseException>(() =>
|
||||
parser.CreateTree("github.ref", null, namedValues, null, allowCaseFunction: true));
|
||||
parser.CreateTree("github.ref", null, namedValues, null));
|
||||
|
||||
Assert.Contains("Unrecognized named-value", ex.Message);
|
||||
}
|
||||
|
||||
126
src/Test/L0/Sdk/RSWebApi/AgentJobRequestMessageL0.cs
Normal file
126
src/Test/L0/Sdk/RSWebApi/AgentJobRequestMessageL0.cs
Normal file
@@ -0,0 +1,126 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Runtime.Serialization.Json;
|
||||
using System.Text;
|
||||
using Xunit;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Actions.RunService.WebApi.Tests;
|
||||
|
||||
public sealed class AgentJobRequestMessageL0
|
||||
{
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void VerifyEnableDebuggerDeserialization_WithTrue()
|
||||
{
|
||||
// Arrange
|
||||
var serializer = new DataContractJsonSerializer(typeof(AgentJobRequestMessage));
|
||||
string jsonWithEnabledDebugger = DoubleQuotify("{'EnableDebugger': true}");
|
||||
|
||||
// Act
|
||||
using var stream = new MemoryStream();
|
||||
stream.Write(Encoding.UTF8.GetBytes(jsonWithEnabledDebugger));
|
||||
stream.Position = 0;
|
||||
var recoveredMessage = serializer.ReadObject(stream) as AgentJobRequestMessage;
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(recoveredMessage);
|
||||
Assert.True(recoveredMessage.EnableDebugger, "EnableDebugger should be true when JSON contains 'EnableDebugger': true");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void VerifyEnableDebuggerDeserialization_DefaultToFalse()
|
||||
{
|
||||
// Arrange
|
||||
var serializer = new DataContractJsonSerializer(typeof(AgentJobRequestMessage));
|
||||
string jsonWithoutDebugger = DoubleQuotify("{'messageType': 'PipelineAgentJobRequest'}");
|
||||
|
||||
// Act
|
||||
using var stream = new MemoryStream();
|
||||
stream.Write(Encoding.UTF8.GetBytes(jsonWithoutDebugger));
|
||||
stream.Position = 0;
|
||||
var recoveredMessage = serializer.ReadObject(stream) as AgentJobRequestMessage;
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(recoveredMessage);
|
||||
Assert.False(recoveredMessage.EnableDebugger, "EnableDebugger should default to false when JSON field is absent");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void VerifyEnableDebuggerDeserialization_WithFalse()
|
||||
{
|
||||
// Arrange
|
||||
var serializer = new DataContractJsonSerializer(typeof(AgentJobRequestMessage));
|
||||
string jsonWithDisabledDebugger = DoubleQuotify("{'EnableDebugger': false}");
|
||||
|
||||
// Act
|
||||
using var stream = new MemoryStream();
|
||||
stream.Write(Encoding.UTF8.GetBytes(jsonWithDisabledDebugger));
|
||||
stream.Position = 0;
|
||||
var recoveredMessage = serializer.ReadObject(stream) as AgentJobRequestMessage;
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(recoveredMessage);
|
||||
Assert.False(recoveredMessage.EnableDebugger, "EnableDebugger should be false when JSON contains 'EnableDebugger': false");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void VerifyDebuggerTunnelDeserialization_WithTunnel()
|
||||
{
|
||||
// Arrange
|
||||
var serializer = new DataContractJsonSerializer(typeof(AgentJobRequestMessage), new DataContractJsonSerializerSettings
|
||||
{
|
||||
KnownTypes = new[] { typeof(DebuggerTunnelInfo) }
|
||||
});
|
||||
string json = DoubleQuotify(
|
||||
"{'EnableDebugger': true, 'DebuggerTunnel': {'TunnelId': 'tun-123', 'ClusterId': 'use2', 'HostToken': 'tok-abc', 'Port': 4711}}");
|
||||
|
||||
// Act
|
||||
using var stream = new MemoryStream();
|
||||
stream.Write(Encoding.UTF8.GetBytes(json));
|
||||
stream.Position = 0;
|
||||
var recoveredMessage = serializer.ReadObject(stream) as AgentJobRequestMessage;
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(recoveredMessage);
|
||||
Assert.True(recoveredMessage.EnableDebugger);
|
||||
Assert.NotNull(recoveredMessage.DebuggerTunnel);
|
||||
Assert.Equal("tun-123", recoveredMessage.DebuggerTunnel.TunnelId);
|
||||
Assert.Equal("use2", recoveredMessage.DebuggerTunnel.ClusterId);
|
||||
Assert.Equal("tok-abc", recoveredMessage.DebuggerTunnel.HostToken);
|
||||
Assert.Equal(4711, recoveredMessage.DebuggerTunnel.Port);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void VerifyDebuggerTunnelDeserialization_WithoutTunnel()
|
||||
{
|
||||
// Arrange
|
||||
var serializer = new DataContractJsonSerializer(typeof(AgentJobRequestMessage));
|
||||
string json = DoubleQuotify("{'EnableDebugger': true}");
|
||||
|
||||
// Act
|
||||
using var stream = new MemoryStream();
|
||||
stream.Write(Encoding.UTF8.GetBytes(json));
|
||||
stream.Position = 0;
|
||||
var recoveredMessage = serializer.ReadObject(stream) as AgentJobRequestMessage;
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(recoveredMessage);
|
||||
Assert.True(recoveredMessage.EnableDebugger);
|
||||
Assert.Null(recoveredMessage.DebuggerTunnel);
|
||||
}
|
||||
|
||||
private static string DoubleQuotify(string text)
|
||||
{
|
||||
return text.Replace('\'', '"');
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
using GitHub.Runner.Listener.Check;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using GitHub.Runner.Worker.Container.ContainerHooks;
|
||||
using GitHub.Runner.Worker.Handlers;
|
||||
using System;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Net;
|
||||
@@ -198,7 +199,8 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
Func<Task> action = async () => await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
await Assert.ThrowsAsync<ActionNotFoundException>(action);
|
||||
var ex = await Assert.ThrowsAsync<FailedToDownloadActionException>(action);
|
||||
Assert.IsType<ActionNotFoundException>(ex.InnerException);
|
||||
|
||||
var watermarkFile = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), ActionName, "main.completed");
|
||||
Assert.False(File.Exists(watermarkFile));
|
||||
@@ -1253,6 +1255,659 @@ runs:
|
||||
}
|
||||
#endif
|
||||
|
||||
// =================================================================
|
||||
// Tests for batched action resolution optimization
|
||||
// =================================================================
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_BatchesResolutionAcrossCompositeActions()
|
||||
{
|
||||
// Verifies that when multiple composite actions at the same depth
|
||||
// reference sub-actions, those sub-actions are resolved in a single
|
||||
// batched API call rather than one call per composite.
|
||||
//
|
||||
// Action tree:
|
||||
// CompositePrestep (composite) → [Node action, CompositePrestep2 (composite)]
|
||||
// CompositePrestep2 (composite) → [Node action, Docker action]
|
||||
//
|
||||
// Without batching: 3 API calls (depth 0, depth 1 for CompositePrestep, depth 2 for CompositePrestep2)
|
||||
// With batching: still 3 calls at most, but the key is that depth-1
|
||||
// sub-actions from all composites at depth 0 are batched into 1 call.
|
||||
// And the same action appearing at multiple depths triggers only 1 resolve.
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
|
||||
var resolveCallCount = 0;
|
||||
var resolvedActions = new List<ActionReferenceList>();
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
resolveCallCount++;
|
||||
resolvedActions.Add(actions);
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actionId = Guid.NewGuid();
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action",
|
||||
Id = actionId,
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "CompositePrestep",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
var result = await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
// The composite tree is:
|
||||
// depth 0: CompositePrestep
|
||||
// depth 1: Node@RepositoryActionWithWrapperActionfile_Node + CompositePrestep2
|
||||
// depth 2: Node@RepositoryActionWithWrapperActionfile_Node + Docker@RepositoryActionWithWrapperActionfile_Docker
|
||||
//
|
||||
// With batching:
|
||||
// Call 1 (depth 0, resolve): CompositePrestep
|
||||
// Call 2 (depth 0→1, pre-resolve): Node + CompositePrestep2 in one batch
|
||||
// Call 3 (depth 1→2, pre-resolve): Docker only (Node already cached from call 2)
|
||||
Assert.Equal(3, resolveCallCount);
|
||||
|
||||
// Call 1: depth 0 resolve — just the top-level composite
|
||||
var call1Keys = resolvedActions[0].Actions.Select(a => $"{a.NameWithOwner}@{a.Ref}").OrderBy(k => k).ToList();
|
||||
Assert.Equal(new[] { "TingluoHuang/runner_L0@CompositePrestep" }, call1Keys);
|
||||
|
||||
// Call 2: depth 0→1 pre-resolve — batch both children of CompositePrestep
|
||||
var call2Keys = resolvedActions[1].Actions.Select(a => $"{a.NameWithOwner}@{a.Ref}").OrderBy(k => k).ToList();
|
||||
Assert.Equal(new[] { "TingluoHuang/runner_L0@CompositePrestep2", "TingluoHuang/runner_L0@RepositoryActionWithWrapperActionfile_Node" }, call2Keys);
|
||||
|
||||
// Call 3: depth 1→2 pre-resolve — only Docker (Node was cached in call 2)
|
||||
var call3Keys = resolvedActions[2].Actions.Select(a => $"{a.NameWithOwner}@{a.Ref}").OrderBy(k => k).ToList();
|
||||
Assert.Equal(new[] { "TingluoHuang/runner_L0@RepositoryActionWithWrapperActionfile_Docker" }, call3Keys);
|
||||
|
||||
// Verify all actions were downloaded
|
||||
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "CompositePrestep.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "CompositePrestep2.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed")));
|
||||
|
||||
// Verify pre-step tracking still works correctly
|
||||
Assert.Equal(1, result.PreStepTracker.Count);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_DeduplicatesResolutionAcrossDepthLevels()
|
||||
{
|
||||
// Verifies that an action appearing at multiple depths in the
|
||||
// composite tree is only resolved once (not re-resolved at each level).
|
||||
//
|
||||
// CompositePrestep uses Node action at depth 1.
|
||||
// CompositePrestep2 (also at depth 1) uses the SAME Node action at depth 2.
|
||||
// The Node action should only be resolved once total.
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
|
||||
var allResolvedKeys = new List<string>();
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
allResolvedKeys.Add(key);
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actionId = Guid.NewGuid();
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action",
|
||||
Id = actionId,
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "CompositePrestep",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
// TingluoHuang/runner_L0@RepositoryActionWithWrapperActionfile_Node appears
|
||||
// at both depth 1 (sub-step of CompositePrestep) and depth 2 (sub-step of
|
||||
// CompositePrestep2). With deduplication it should only be resolved once.
|
||||
var nodeActionKey = "TingluoHuang/runner_L0@RepositoryActionWithWrapperActionfile_Node";
|
||||
var nodeResolveCount = allResolvedKeys.FindAll(k => k == nodeActionKey).Count;
|
||||
Assert.Equal(1, nodeResolveCount);
|
||||
|
||||
// Verify the total number of unique actions resolved matches the tree
|
||||
var uniqueKeys = new HashSet<string>(allResolvedKeys);
|
||||
// Expected unique actions: CompositePrestep, Node, CompositePrestep2, Docker = 4
|
||||
Assert.Equal(4, uniqueKeys.Count);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_MultipleTopLevelActions_BatchesResolution()
|
||||
{
|
||||
// Verifies that multiple independent actions at depth 0 are
|
||||
// resolved in a single API call.
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
// Node action has pre+post, needs IActionRunner instances
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
|
||||
var resolveCallCount = 0;
|
||||
var firstCallActionCount = 0;
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
resolveCallCount++;
|
||||
if (resolveCallCount == 1)
|
||||
{
|
||||
firstCallActionCount = actions.Actions.Count;
|
||||
}
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action1",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "RepositoryActionWithWrapperActionfile_Node",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
},
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action2",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "RepositoryActionWithWrapperActionfile_Docker",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
// Both actions are at depth 0 — should be resolved in a single batch call
|
||||
Assert.Equal(1, resolveCallCount);
|
||||
Assert.Equal(2, firstCallActionCount);
|
||||
|
||||
// Verify both were downloaded
|
||||
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
#if OS_LINUX
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_NestedCompositeContainers_BatchedResolution()
|
||||
{
|
||||
// Verifies batching with nested composite actions that reference
|
||||
// container actions (Linux-only since containers require Linux).
|
||||
//
|
||||
// CompositeContainerNested (composite):
|
||||
// → repositoryactionwithdockerfile (Dockerfile)
|
||||
// → CompositeContainerNested2 (composite):
|
||||
// → repositoryactionwithdockerfile (Dockerfile, same as above)
|
||||
// → notpullorbuildimagesmultipletimes1 (Dockerfile)
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
|
||||
var resolveCallCount = 0;
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
resolveCallCount++;
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actionId = Guid.NewGuid();
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action",
|
||||
Id = actionId,
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "CompositeContainerNested",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
var result = await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
// Tree has 3 depth levels with 5 unique actions.
|
||||
// With batching, should need at most 3 resolve calls (one per depth level).
|
||||
Assert.True(resolveCallCount <= 3, $"Expected at most 3 resolve calls but got {resolveCallCount}");
|
||||
|
||||
// repositoryactionwithdockerfile appears at both depth 1 and depth 2.
|
||||
// Container setup should still work correctly — 2 unique Docker images.
|
||||
Assert.Equal(2, result.ContainerSetupSteps.Count);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_ParallelDownloads_MultipleUniqueActions()
|
||||
{
|
||||
// Verifies that multiple unique top-level actions are downloaded via
|
||||
// DownloadActionsInParallelAsync (the parallel code path), and that
|
||||
// all actions are correctly resolved and downloaded.
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
// Node action has pre step, and CompositePrestep recurses into
|
||||
// sub-actions that also need IActionRunner instances
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
|
||||
var resolveCallCount = 0;
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
Interlocked.Increment(ref resolveCallCount);
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action1",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "RepositoryActionWithWrapperActionfile_Node",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
},
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action2",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "RepositoryActionWithWrapperActionfile_Docker",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
},
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action3",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "CompositePrestep",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
// 3 unique actions at depth 0 → triggers DownloadActionsInParallelAsync
|
||||
// (parallel path used when uniqueDownloads.Count > 1)
|
||||
var nodeCompleted = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed");
|
||||
var dockerCompleted = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed");
|
||||
var compositeCompleted = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "CompositePrestep.completed");
|
||||
|
||||
Assert.True(File.Exists(nodeCompleted), $"Expected watermark at {nodeCompleted}");
|
||||
Assert.True(File.Exists(dockerCompleted), $"Expected watermark at {dockerCompleted}");
|
||||
Assert.True(File.Exists(compositeCompleted), $"Expected watermark at {compositeCompleted}");
|
||||
|
||||
// All depth-0 actions resolved in a single batch call.
|
||||
// Composite sub-actions may add 1-2 more calls.
|
||||
Assert.True(resolveCallCount >= 1, "Expected at least 1 resolve call");
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_DownloadsNextLevelActionsBeforeRecursing()
|
||||
{
|
||||
// Verifies that depth-1 actions are downloaded before the depth-2
|
||||
// pre-resolve fires. We detect this by snapshotting watermark state
|
||||
// inside the 3rd ResolveActionDownloadInfoAsync callback (which is
|
||||
// the depth-2 pre-resolve). If pre-download works, depth-1 watermarks
|
||||
// already exist at that point.
|
||||
//
|
||||
// Action tree:
|
||||
// CompositePrestep (composite) → [Node, CompositePrestep2 (composite)]
|
||||
// CompositePrestep2 (composite) → [Node, Docker]
|
||||
//
|
||||
// Without pre-download: downloads happen during recursion (serial per depth)
|
||||
// With pre-download: depth 1 actions (Node + CompositePrestep2) are
|
||||
// downloaded in parallel before recursing, so recursion is a no-op
|
||||
// for downloads.
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
|
||||
// Track watermark state at the time of each resolve call.
|
||||
// If pre-download works, when the 3rd resolve fires (depth 2
|
||||
// pre-resolve for Docker), the depth-1 actions (Node +
|
||||
// CompositePrestep2) should already have watermarks on disk.
|
||||
var resolveCallCount = 0;
|
||||
var watermarksAtResolve3 = new Dictionary<string, bool>();
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
resolveCallCount++;
|
||||
if (resolveCallCount == 3)
|
||||
{
|
||||
// At the time of the 3rd resolve, check if depth-1 actions
|
||||
// are already downloaded (pre-download should have done this)
|
||||
var actionsDir2 = _hc.GetDirectory(WellKnownDirectory.Actions);
|
||||
watermarksAtResolve3["Node"] = File.Exists(Path.Combine(actionsDir2, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed"));
|
||||
watermarksAtResolve3["CompositePrestep2"] = File.Exists(Path.Combine(actionsDir2, "TingluoHuang/runner_L0", "CompositePrestep2.completed"));
|
||||
}
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actionId = Guid.NewGuid();
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action",
|
||||
Id = actionId,
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "CompositePrestep",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
var result = await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
// All actions should be downloaded (watermarks exist)
|
||||
var actionsDir = _hc.GetDirectory(WellKnownDirectory.Actions);
|
||||
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "CompositePrestep.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "CompositePrestep2.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed")));
|
||||
|
||||
// 3 resolve calls total
|
||||
Assert.Equal(3, resolveCallCount);
|
||||
|
||||
// The key assertion: at the time of the 3rd resolve call
|
||||
// (pre-resolve for depth 2), the depth-1 actions should
|
||||
// ALREADY be downloaded thanks to pre-download.
|
||||
// Without pre-download, these watermarks wouldn't exist yet
|
||||
// because depth-1 downloads would only happen during recursion.
|
||||
Assert.True(watermarksAtResolve3["Node"],
|
||||
"Node action should be pre-downloaded before depth 2 pre-resolve");
|
||||
Assert.True(watermarksAtResolve3["CompositePrestep2"],
|
||||
"CompositePrestep2 should be pre-downloaded before depth 2 pre-resolve");
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_ParallelDownloadsAtSameDepth()
|
||||
{
|
||||
// Verifies that multiple unique actions at the same depth are
|
||||
// downloaded concurrently (Task.WhenAll) rather than sequentially.
|
||||
// We detect this by checking that all watermarks exist after a
|
||||
// single PrepareActionsAsync call with multiple top-level actions.
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action1",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "RepositoryActionWithWrapperActionfile_Node",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
},
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action2",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "RepositoryActionWithWrapperActionfile_Docker",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert - both downloaded (parallel path used when > 1 unique download)
|
||||
var actionsDir = _hc.GetDirectory(WellKnownDirectory.Actions);
|
||||
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
|
||||
@@ -504,7 +504,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Load_Node24Action()
|
||||
@@ -1006,6 +1006,45 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
_ec.Setup(x => x.AddIssue(It.IsAny<Issue>(), It.IsAny<ExecutionContextLogOptions>())).Callback((Issue issue, ExecutionContextLogOptions logOptions) => { _hc.GetTrace().Info($"[{issue.Type}]{logOptions.LogMessageOverride ?? issue.Message}"); });
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Evaluate_Default_Input_Case_Function()
|
||||
{
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
|
||||
var actionManifest = new ActionManifestManager();
|
||||
actionManifest.Initialize(_hc);
|
||||
|
||||
_ec.Object.ExpressionValues["github"] = new LegacyContextData.DictionaryContextData
|
||||
{
|
||||
{ "ref", new LegacyContextData.StringContextData("refs/heads/main") },
|
||||
};
|
||||
_ec.Object.ExpressionValues["strategy"] = new LegacyContextData.DictionaryContextData();
|
||||
_ec.Object.ExpressionValues["matrix"] = new LegacyContextData.DictionaryContextData();
|
||||
_ec.Object.ExpressionValues["steps"] = new LegacyContextData.DictionaryContextData();
|
||||
_ec.Object.ExpressionValues["job"] = new LegacyContextData.DictionaryContextData();
|
||||
_ec.Object.ExpressionValues["runner"] = new LegacyContextData.DictionaryContextData();
|
||||
_ec.Object.ExpressionValues["env"] = new LegacyContextData.DictionaryContextData();
|
||||
_ec.Object.ExpressionFunctions.Add(new LegacyExpressions.FunctionInfo<GitHub.Runner.Worker.Expressions.HashFilesFunction>("hashFiles", 1, 255));
|
||||
|
||||
// Act — evaluate a case() expression as a default input value.
|
||||
// The feature flag is set, so this should succeed.
|
||||
var token = new BasicExpressionToken(null, null, null, "case(true, 'matched', 'default')");
|
||||
var result = actionManifest.EvaluateDefaultInput(_ec.Object, "testInput", token);
|
||||
|
||||
// Assert — case() should evaluate successfully
|
||||
Assert.Equal("matched", result);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
private void Teardown()
|
||||
{
|
||||
_hc?.Dispose();
|
||||
|
||||
779
src/Test/L0/Worker/DapDebuggerL0.cs
Normal file
779
src/Test/L0/Worker/DapDebuggerL0.cs
Normal file
@@ -0,0 +1,779 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Net;
|
||||
using System.Net.Sockets;
|
||||
using System.Net.WebSockets;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Moq;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using Newtonsoft.Json;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class DapDebuggerL0
|
||||
{
|
||||
private const string TimeoutEnvironmentVariable = "ACTIONS_RUNNER_DAP_CONNECTION_TIMEOUT";
|
||||
private const string TunnelConnectTimeoutVariable = "ACTIONS_RUNNER_DAP_TUNNEL_CONNECT_TIMEOUT_SECONDS";
|
||||
private DapDebugger _debugger;
|
||||
|
||||
private TestHostContext CreateTestContext(bool enableWebSocketBridge = false, [CallerMemberName] string testName = "")
|
||||
{
|
||||
var hc = new TestHostContext(this, testName);
|
||||
_debugger = new DapDebugger();
|
||||
_debugger.Initialize(hc);
|
||||
_debugger.SkipTunnelRelay = true;
|
||||
_debugger.SkipWebSocketBridge = !enableWebSocketBridge;
|
||||
return hc;
|
||||
}
|
||||
|
||||
private static async Task WithEnvironmentVariableAsync(string name, string value, Func<Task> action)
|
||||
{
|
||||
var originalValue = Environment.GetEnvironmentVariable(name);
|
||||
Environment.SetEnvironmentVariable(name, value);
|
||||
try
|
||||
{
|
||||
await action();
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable(name, originalValue);
|
||||
}
|
||||
}
|
||||
|
||||
private static void WithEnvironmentVariable(string name, string value, Action action)
|
||||
{
|
||||
var originalValue = Environment.GetEnvironmentVariable(name);
|
||||
Environment.SetEnvironmentVariable(name, value);
|
||||
try
|
||||
{
|
||||
action();
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable(name, originalValue);
|
||||
}
|
||||
}
|
||||
|
||||
private static ushort GetFreePort()
|
||||
{
|
||||
using var listener = new TcpListener(IPAddress.Loopback, 0);
|
||||
listener.Start();
|
||||
return (ushort)((IPEndPoint)listener.LocalEndpoint).Port;
|
||||
}
|
||||
|
||||
private static async Task<TcpClient> ConnectClientAsync(int port)
|
||||
{
|
||||
var client = new TcpClient();
|
||||
await client.ConnectAsync(IPAddress.Loopback, port);
|
||||
return client;
|
||||
}
|
||||
|
||||
private static async Task<ClientWebSocket> ConnectWebSocketClientAsync(int port)
|
||||
{
|
||||
var client = new ClientWebSocket();
|
||||
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
|
||||
return client;
|
||||
}
|
||||
|
||||
private static async Task SendRequestAsync(NetworkStream stream, Request request)
|
||||
{
|
||||
var json = JsonConvert.SerializeObject(request);
|
||||
var body = Encoding.UTF8.GetBytes(json);
|
||||
var header = $"Content-Length: {body.Length}\r\n\r\n";
|
||||
var headerBytes = Encoding.ASCII.GetBytes(header);
|
||||
|
||||
await stream.WriteAsync(headerBytes, 0, headerBytes.Length);
|
||||
await stream.WriteAsync(body, 0, body.Length);
|
||||
await stream.FlushAsync();
|
||||
}
|
||||
|
||||
private static async Task SendRequestAsync(WebSocket client, Request request)
|
||||
{
|
||||
var json = JsonConvert.SerializeObject(request);
|
||||
var body = Encoding.UTF8.GetBytes(json);
|
||||
|
||||
await client.SendAsync(new ArraySegment<byte>(body), WebSocketMessageType.Text, endOfMessage: true, CancellationToken.None);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reads a single DAP-framed message from a stream with a timeout.
|
||||
/// Parses the Content-Length header, reads exactly that many bytes,
|
||||
/// and returns the JSON body. Fails with a clear error on timeout.
|
||||
/// </summary>
|
||||
private static async Task<string> ReadDapMessageAsync(NetworkStream stream, TimeSpan timeout)
|
||||
{
|
||||
using var cts = new CancellationTokenSource(timeout);
|
||||
var token = cts.Token;
|
||||
|
||||
var headerBuilder = new StringBuilder();
|
||||
var buffer = new byte[1];
|
||||
var contentLength = -1;
|
||||
|
||||
while (true)
|
||||
{
|
||||
var readTask = stream.ReadAsync(buffer, 0, 1, token);
|
||||
var bytesRead = await readTask;
|
||||
if (bytesRead == 0)
|
||||
{
|
||||
throw new EndOfStreamException("Connection closed while reading DAP headers");
|
||||
}
|
||||
|
||||
headerBuilder.Append((char)buffer[0]);
|
||||
var headers = headerBuilder.ToString();
|
||||
if (headers.EndsWith("\r\n\r\n", StringComparison.Ordinal))
|
||||
{
|
||||
foreach (var line in headers.Split(new[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries))
|
||||
{
|
||||
if (line.StartsWith("Content-Length: ", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
contentLength = int.Parse(line.Substring("Content-Length: ".Length).Trim());
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (contentLength < 0)
|
||||
{
|
||||
throw new InvalidOperationException("No Content-Length header found in DAP message");
|
||||
}
|
||||
|
||||
var body = new byte[contentLength];
|
||||
var totalRead = 0;
|
||||
while (totalRead < contentLength)
|
||||
{
|
||||
var bytesRead = await stream.ReadAsync(body, totalRead, contentLength - totalRead, token);
|
||||
if (bytesRead == 0)
|
||||
{
|
||||
throw new EndOfStreamException("Connection closed while reading DAP body");
|
||||
}
|
||||
totalRead += bytesRead;
|
||||
}
|
||||
|
||||
return Encoding.UTF8.GetString(body);
|
||||
}
|
||||
|
||||
private static async Task<string> ReadWebSocketDataUntilAsync(WebSocket client, TimeSpan timeout, params string[] expectedFragments)
|
||||
{
|
||||
using var cts = new CancellationTokenSource(timeout);
|
||||
var buffer = new byte[4096];
|
||||
var allMessages = new StringBuilder();
|
||||
|
||||
while (true)
|
||||
{
|
||||
using var messageStream = new MemoryStream();
|
||||
WebSocketReceiveResult result;
|
||||
do
|
||||
{
|
||||
result = await client.ReceiveAsync(new ArraySegment<byte>(buffer), cts.Token);
|
||||
if (result.MessageType == WebSocketMessageType.Close)
|
||||
{
|
||||
throw new EndOfStreamException("WebSocket closed before expected DAP messages were received.");
|
||||
}
|
||||
|
||||
if (result.Count > 0)
|
||||
{
|
||||
messageStream.Write(buffer, 0, result.Count);
|
||||
}
|
||||
}
|
||||
while (!result.EndOfMessage);
|
||||
|
||||
var messageText = Encoding.UTF8.GetString(messageStream.ToArray());
|
||||
allMessages.Append(messageText);
|
||||
|
||||
var text = allMessages.ToString();
|
||||
var containsAllFragments = true;
|
||||
foreach (var fragment in expectedFragments)
|
||||
{
|
||||
if (!text.Contains(fragment, StringComparison.Ordinal))
|
||||
{
|
||||
containsAllFragments = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (containsAllFragments)
|
||||
{
|
||||
return text;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static Mock<IExecutionContext> CreateJobContextWithTunnel(CancellationToken cancellationToken, ushort port, string jobName = null)
|
||||
{
|
||||
var tunnel = new GitHub.DistributedTask.Pipelines.DebuggerTunnelInfo
|
||||
{
|
||||
TunnelId = "test-tunnel",
|
||||
ClusterId = "test-cluster",
|
||||
HostToken = "test-token",
|
||||
Port = port
|
||||
};
|
||||
var debuggerConfig = new DebuggerConfig(true, tunnel);
|
||||
var jobContext = new Mock<IExecutionContext>();
|
||||
jobContext.Setup(x => x.CancellationToken).Returns(cancellationToken);
|
||||
jobContext.Setup(x => x.Global).Returns(new GlobalContext { Debugger = debuggerConfig });
|
||||
jobContext
|
||||
.Setup(x => x.GetGitHubContext(It.IsAny<string>()))
|
||||
.Returns((string contextName) => string.Equals(contextName, "job", StringComparison.Ordinal) ? jobName : null);
|
||||
return jobContext;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void InitializeSucceeds()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
Assert.NotNull(_debugger);
|
||||
Assert.False(_debugger.IsActive);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StartAsyncFailsWithoutValidTunnelConfig()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = new Mock<IExecutionContext>();
|
||||
jobContext.Setup(x => x.CancellationToken).Returns(cts.Token);
|
||||
jobContext.Setup(x => x.Global).Returns(new GlobalContext
|
||||
{
|
||||
Debugger = new DebuggerConfig(true, null)
|
||||
});
|
||||
|
||||
await Assert.ThrowsAsync<ArgumentException>(() => _debugger.StartAsync(jobContext.Object));
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StartAsyncUsesPortFromTunnelConfig()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
using var client = await ConnectClientAsync(port);
|
||||
Assert.True(client.Connected);
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StartAsyncWithWebSocketBridgeAcceptsInitializeOverWebSocket()
|
||||
{
|
||||
using (CreateTestContext(enableWebSocketBridge: true))
|
||||
{
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
Assert.NotEqual(0, _debugger.InternalDapPort);
|
||||
Assert.NotEqual(port, _debugger.InternalDapPort);
|
||||
|
||||
using var client = await ConnectWebSocketClientAsync(port);
|
||||
await SendRequestAsync(client, new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "initialize"
|
||||
});
|
||||
|
||||
var response = await ReadWebSocketDataUntilAsync(
|
||||
client,
|
||||
TimeSpan.FromSeconds(5),
|
||||
"\"type\":\"response\"",
|
||||
"\"command\":\"initialize\"",
|
||||
"\"event\":\"initialized\"");
|
||||
|
||||
Assert.Contains("\"success\":true", response);
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StartAsyncWithWebSocketBridgeAcceptsPreUpgradedWebSocketStream()
|
||||
{
|
||||
using (CreateTestContext(enableWebSocketBridge: true))
|
||||
{
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
Assert.NotEqual(0, _debugger.InternalDapPort);
|
||||
Assert.NotEqual(port, _debugger.InternalDapPort);
|
||||
|
||||
using var tcpClient = await ConnectClientAsync(port);
|
||||
using var webSocket = WebSocket.CreateFromStream(
|
||||
tcpClient.GetStream(),
|
||||
isServer: false,
|
||||
subProtocol: null,
|
||||
keepAliveInterval: TimeSpan.FromSeconds(30));
|
||||
|
||||
await SendRequestAsync(webSocket, new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "initialize"
|
||||
});
|
||||
|
||||
var response = await ReadWebSocketDataUntilAsync(
|
||||
webSocket,
|
||||
TimeSpan.FromSeconds(5),
|
||||
"\"type\":\"response\"",
|
||||
"\"command\":\"initialize\"",
|
||||
"\"event\":\"initialized\"");
|
||||
|
||||
Assert.Contains("\"success\":true", response);
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveTimeoutUsesCustomTimeoutFromEnvironment()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
WithEnvironmentVariable(TimeoutEnvironmentVariable, "30", () =>
|
||||
{
|
||||
Assert.Equal(30, _debugger.ResolveTimeout());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveTimeoutIgnoresInvalidTimeoutFromEnvironment()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
WithEnvironmentVariable(TimeoutEnvironmentVariable, "not-a-number", () =>
|
||||
{
|
||||
Assert.Equal(15, _debugger.ResolveTimeout());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveTimeoutIgnoresZeroTimeoutFromEnvironment()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
WithEnvironmentVariable(TimeoutEnvironmentVariable, "0", () =>
|
||||
{
|
||||
Assert.Equal(15, _debugger.ResolveTimeout());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StartAndStopLifecycle()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
using var client = await ConnectClientAsync(port);
|
||||
Assert.True(client.Connected);
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StartAndStopMultipleTimesDoesNotThrow()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
foreach (var port in new[] { GetFreePort(), GetFreePort() })
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task WaitUntilReadyCompletesAfterClientConnectionAndConfigurationDone()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
var waitTask = _debugger.WaitUntilReadyAsync();
|
||||
using var client = await ConnectClientAsync(port);
|
||||
await SendRequestAsync(client.GetStream(), new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "configurationDone"
|
||||
});
|
||||
|
||||
await waitTask;
|
||||
Assert.Equal(DapSessionState.Ready, _debugger.State);
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StartStoresJobContextForThreadsRequest()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port, "ci-job");
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
using var client = await ConnectClientAsync(port);
|
||||
var stream = client.GetStream();
|
||||
await SendRequestAsync(client.GetStream(), new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "threads"
|
||||
});
|
||||
|
||||
var response = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
|
||||
Assert.Contains("\"command\":\"threads\"", response);
|
||||
Assert.Contains("\"name\":\"Job: ci-job\"", response);
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task CancellationUnblocksAndOnJobCompletedTerminates()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
var waitTask = _debugger.WaitUntilReadyAsync();
|
||||
using var client = await ConnectClientAsync(port);
|
||||
await SendRequestAsync(client.GetStream(), new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "configurationDone"
|
||||
});
|
||||
|
||||
await waitTask;
|
||||
cts.Cancel();
|
||||
|
||||
// In the real runner, JobRunner always calls OnJobCompletedAsync
|
||||
// from a finally block. The cancellation callback only unblocks
|
||||
// pending waits; OnJobCompletedAsync handles state + cleanup.
|
||||
await _debugger.OnJobCompletedAsync();
|
||||
Assert.Equal(DapSessionState.Terminated, _debugger.State);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StopWithoutStartDoesNotThrow()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task OnJobCompletedTerminatesSession()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
var waitTask = _debugger.WaitUntilReadyAsync();
|
||||
using var client = await ConnectClientAsync(port);
|
||||
await SendRequestAsync(client.GetStream(), new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "configurationDone"
|
||||
});
|
||||
|
||||
await waitTask;
|
||||
await _debugger.OnJobCompletedAsync();
|
||||
Assert.Equal(DapSessionState.Terminated, _debugger.State);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task WaitUntilReadyBeforeStartIsNoOp()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
await _debugger.WaitUntilReadyAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task WaitUntilReadyJobCancellationPropagatesAsOperationCancelledException()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
var waitTask = _debugger.WaitUntilReadyAsync();
|
||||
await Task.Delay(50);
|
||||
cts.Cancel();
|
||||
|
||||
var ex = await Assert.ThrowsAnyAsync<OperationCanceledException>(() => waitTask);
|
||||
Assert.IsNotType<TimeoutException>(ex);
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task InitializeRequestOverSocketPreservesProtocolMetadataWhenSecretsCollide()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
hc.SecretMasker.AddValue("response");
|
||||
hc.SecretMasker.AddValue("initialize");
|
||||
hc.SecretMasker.AddValue("event");
|
||||
hc.SecretMasker.AddValue("initialized");
|
||||
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
using var client = await ConnectClientAsync(port);
|
||||
var stream = client.GetStream();
|
||||
|
||||
await SendRequestAsync(stream, new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "initialize"
|
||||
});
|
||||
|
||||
var response = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
|
||||
Assert.Contains("\"type\":\"response\"", response);
|
||||
Assert.Contains("\"command\":\"initialize\"", response);
|
||||
Assert.Contains("\"success\":true", response);
|
||||
|
||||
var initializedEvent = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
|
||||
Assert.Contains("\"type\":\"event\"", initializedEvent);
|
||||
Assert.Contains("\"event\":\"initialized\"", initializedEvent);
|
||||
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task CancellationDuringStepPauseReleasesWait()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
// Complete handshake so session is ready
|
||||
var waitTask = _debugger.WaitUntilReadyAsync();
|
||||
using var client = await ConnectClientAsync(port);
|
||||
var stream = client.GetStream();
|
||||
await SendRequestAsync(stream, new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "configurationDone"
|
||||
});
|
||||
await waitTask;
|
||||
|
||||
// Simulate a step starting (which pauses)
|
||||
var step = new Mock<IStep>();
|
||||
step.Setup(s => s.DisplayName).Returns("Test Step");
|
||||
step.Setup(s => s.ExecutionContext).Returns((IExecutionContext)null);
|
||||
var stepTask = _debugger.OnStepStartingAsync(step.Object);
|
||||
|
||||
// Give the step time to pause
|
||||
await Task.Delay(50);
|
||||
|
||||
// Cancel the job — should release the step pause
|
||||
cts.Cancel();
|
||||
await stepTask;
|
||||
|
||||
// In the real runner, OnJobCompletedAsync always follows.
|
||||
await _debugger.OnJobCompletedAsync();
|
||||
Assert.Equal(DapSessionState.Terminated, _debugger.State);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StopAsyncSafeAtAnyLifecyclePoint()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
// StopAsync before start
|
||||
await _debugger.StopAsync();
|
||||
|
||||
// Start then immediate stop (no connection, no WaitUntilReady)
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
await _debugger.StopAsync();
|
||||
|
||||
// StopAsync after already stopped
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task OnJobCompletedSendsTerminatedAndExitedEvents()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContextWithTunnel(cts.Token, port);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
var waitTask = _debugger.WaitUntilReadyAsync();
|
||||
using var client = await ConnectClientAsync(port);
|
||||
var stream = client.GetStream();
|
||||
await SendRequestAsync(stream, new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "configurationDone"
|
||||
});
|
||||
|
||||
// Read the configurationDone response
|
||||
await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
|
||||
await waitTask;
|
||||
|
||||
// Complete the job — events are sent via OnJobCompletedAsync
|
||||
await _debugger.OnJobCompletedAsync();
|
||||
|
||||
var msg1 = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
|
||||
var msg2 = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
|
||||
|
||||
// Both events should arrive (order may vary)
|
||||
var combined = msg1 + msg2;
|
||||
Assert.Contains("\"event\":\"terminated\"", combined);
|
||||
Assert.Contains("\"event\":\"exited\"", combined);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveTunnelConnectTimeoutReturnsDefaultWhenNoVariable()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
Assert.Equal(30, _debugger.ResolveTunnelConnectTimeout());
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveTunnelConnectTimeoutUsesCustomValue()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
WithEnvironmentVariable(TunnelConnectTimeoutVariable, "60", () =>
|
||||
{
|
||||
Assert.Equal(60, _debugger.ResolveTunnelConnectTimeout());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveTunnelConnectTimeoutIgnoresInvalidValue()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
WithEnvironmentVariable(TunnelConnectTimeoutVariable, "not-a-number", () =>
|
||||
{
|
||||
Assert.Equal(30, _debugger.ResolveTunnelConnectTimeout());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveTunnelConnectTimeoutIgnoresZeroValue()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
WithEnvironmentVariable(TunnelConnectTimeoutVariable, "0", () =>
|
||||
{
|
||||
Assert.Equal(30, _debugger.ResolveTunnelConnectTimeout());
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
233
src/Test/L0/Worker/DapMessagesL0.cs
Normal file
233
src/Test/L0/Worker/DapMessagesL0.cs
Normal file
@@ -0,0 +1,233 @@
|
||||
using System.Collections.Generic;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using Xunit;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class DapMessagesL0
|
||||
{
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void RequestSerializesCorrectly()
|
||||
{
|
||||
var request = new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "initialize",
|
||||
Arguments = JObject.FromObject(new { clientID = "test-client" })
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(request);
|
||||
var deserialized = JsonConvert.DeserializeObject<Request>(json);
|
||||
|
||||
Assert.Equal(1, deserialized.Seq);
|
||||
Assert.Equal("request", deserialized.Type);
|
||||
Assert.Equal("initialize", deserialized.Command);
|
||||
Assert.Equal("test-client", deserialized.Arguments["clientID"].ToString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResponseSerializesCorrectly()
|
||||
{
|
||||
var response = new Response
|
||||
{
|
||||
Seq = 2,
|
||||
Type = "response",
|
||||
RequestSeq = 1,
|
||||
Success = true,
|
||||
Command = "initialize",
|
||||
Body = new Capabilities { SupportsConfigurationDoneRequest = true }
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(response);
|
||||
var deserialized = JsonConvert.DeserializeObject<Response>(json);
|
||||
|
||||
Assert.Equal(2, deserialized.Seq);
|
||||
Assert.Equal("response", deserialized.Type);
|
||||
Assert.Equal(1, deserialized.RequestSeq);
|
||||
Assert.True(deserialized.Success);
|
||||
Assert.Equal("initialize", deserialized.Command);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EventSerializesWithCorrectType()
|
||||
{
|
||||
var evt = new Event
|
||||
{
|
||||
EventType = "stopped",
|
||||
Body = new StoppedEventBody
|
||||
{
|
||||
Reason = "entry",
|
||||
Description = "Stopped at entry",
|
||||
ThreadId = 1,
|
||||
AllThreadsStopped = true
|
||||
}
|
||||
};
|
||||
|
||||
Assert.Equal("event", evt.Type);
|
||||
|
||||
var json = JsonConvert.SerializeObject(evt);
|
||||
Assert.Contains("\"type\":\"event\"", json);
|
||||
Assert.Contains("\"event\":\"stopped\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void StoppedEventBodyOmitsNullFields()
|
||||
{
|
||||
var body = new StoppedEventBody
|
||||
{
|
||||
Reason = "step"
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(body);
|
||||
Assert.Contains("\"reason\":\"step\"", json);
|
||||
Assert.DoesNotContain("\"threadId\"", json);
|
||||
Assert.DoesNotContain("\"allThreadsStopped\"", json);
|
||||
Assert.DoesNotContain("\"description\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CapabilitiesMvpDefaults()
|
||||
{
|
||||
var caps = new Capabilities
|
||||
{
|
||||
SupportsConfigurationDoneRequest = true,
|
||||
SupportsFunctionBreakpoints = false,
|
||||
SupportsStepBack = false
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(caps);
|
||||
var deserialized = JsonConvert.DeserializeObject<Capabilities>(json);
|
||||
|
||||
Assert.True(deserialized.SupportsConfigurationDoneRequest);
|
||||
Assert.False(deserialized.SupportsFunctionBreakpoints);
|
||||
Assert.False(deserialized.SupportsStepBack);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ContinueResponseBodySerialization()
|
||||
{
|
||||
var body = new ContinueResponseBody { AllThreadsContinued = true };
|
||||
var json = JsonConvert.SerializeObject(body);
|
||||
var deserialized = JsonConvert.DeserializeObject<ContinueResponseBody>(json);
|
||||
|
||||
Assert.True(deserialized.AllThreadsContinued);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ThreadsResponseBodySerialization()
|
||||
{
|
||||
var body = new ThreadsResponseBody
|
||||
{
|
||||
Threads = new List<Thread>
|
||||
{
|
||||
new Thread { Id = 1, Name = "Job Thread" }
|
||||
}
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(body);
|
||||
var deserialized = JsonConvert.DeserializeObject<ThreadsResponseBody>(json);
|
||||
|
||||
Assert.Single(deserialized.Threads);
|
||||
Assert.Equal(1, deserialized.Threads[0].Id);
|
||||
Assert.Equal("Job Thread", deserialized.Threads[0].Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void StackFrameSerialization()
|
||||
{
|
||||
var frame = new StackFrame
|
||||
{
|
||||
Id = 1,
|
||||
Name = "Step: Checkout",
|
||||
Line = 1,
|
||||
Column = 1,
|
||||
PresentationHint = "normal"
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(frame);
|
||||
var deserialized = JsonConvert.DeserializeObject<StackFrame>(json);
|
||||
|
||||
Assert.Equal(1, deserialized.Id);
|
||||
Assert.Equal("Step: Checkout", deserialized.Name);
|
||||
Assert.Equal("normal", deserialized.PresentationHint);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExitedEventBodySerialization()
|
||||
{
|
||||
var body = new ExitedEventBody { ExitCode = 130 };
|
||||
var json = JsonConvert.SerializeObject(body);
|
||||
var deserialized = JsonConvert.DeserializeObject<ExitedEventBody>(json);
|
||||
|
||||
Assert.Equal(130, deserialized.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void DapCommandEnumValues()
|
||||
{
|
||||
Assert.Equal(0, (int)DapCommand.Continue);
|
||||
Assert.Equal(1, (int)DapCommand.Next);
|
||||
Assert.Equal(4, (int)DapCommand.Disconnect);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void RequestDeserializesFromRawJson()
|
||||
{
|
||||
var json = @"{""seq"":5,""type"":""request"",""command"":""continue"",""arguments"":{""threadId"":1}}";
|
||||
var request = JsonConvert.DeserializeObject<Request>(json);
|
||||
|
||||
Assert.Equal(5, request.Seq);
|
||||
Assert.Equal("request", request.Type);
|
||||
Assert.Equal("continue", request.Command);
|
||||
Assert.Equal(1, request.Arguments["threadId"].Value<int>());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ErrorResponseBodySerialization()
|
||||
{
|
||||
var body = new ErrorResponseBody
|
||||
{
|
||||
Error = new Message
|
||||
{
|
||||
Id = 1,
|
||||
Format = "Something went wrong",
|
||||
ShowUser = true
|
||||
}
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(body);
|
||||
var deserialized = JsonConvert.DeserializeObject<ErrorResponseBody>(json);
|
||||
|
||||
Assert.Equal(1, deserialized.Error.Id);
|
||||
Assert.Equal("Something went wrong", deserialized.Error.Format);
|
||||
Assert.True(deserialized.Error.ShowUser);
|
||||
}
|
||||
}
|
||||
}
|
||||
237
src/Test/L0/Worker/DapReplExecutorL0.cs
Normal file
237
src/Test/L0/Worker/DapReplExecutorL0.cs
Normal file
@@ -0,0 +1,237 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.Runner.Common.Tests;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class DapReplExecutorL0
|
||||
{
|
||||
private TestHostContext _hc;
|
||||
private DapReplExecutor _executor;
|
||||
private List<Event> _sentEvents;
|
||||
|
||||
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
|
||||
{
|
||||
_hc = new TestHostContext(this, testName);
|
||||
_sentEvents = new List<Event>();
|
||||
_executor = new DapReplExecutor(_hc, (category, text) =>
|
||||
{
|
||||
_sentEvents.Add(new Event
|
||||
{
|
||||
EventType = "output",
|
||||
Body = new OutputEventBody
|
||||
{
|
||||
Category = category,
|
||||
Output = text
|
||||
}
|
||||
});
|
||||
});
|
||||
return _hc;
|
||||
}
|
||||
|
||||
private Mock<IExecutionContext> CreateMockContext(
|
||||
DictionaryContextData exprValues = null,
|
||||
IDictionary<string, IDictionary<string, string>> jobDefaults = null)
|
||||
{
|
||||
var mock = new Mock<IExecutionContext>();
|
||||
mock.Setup(x => x.ExpressionValues).Returns(exprValues ?? new DictionaryContextData());
|
||||
mock.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
|
||||
|
||||
var global = new GlobalContext
|
||||
{
|
||||
PrependPath = new List<string>(),
|
||||
JobDefaults = jobDefaults
|
||||
?? new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase),
|
||||
};
|
||||
mock.Setup(x => x.Global).Returns(global);
|
||||
|
||||
return mock;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task ExecuteRunCommand_NullContext_ReturnsError()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var command = new RunCommand { Script = "echo hello" };
|
||||
var result = await _executor.ExecuteRunCommandAsync(command, null, CancellationToken.None);
|
||||
|
||||
Assert.Equal("error", result.Type);
|
||||
Assert.Contains("No execution context available", result.Result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExpandExpressions_NoExpressions_ReturnsInput()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var context = CreateMockContext();
|
||||
var result = _executor.ExpandExpressions("echo hello", context.Object);
|
||||
|
||||
Assert.Equal("echo hello", result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExpandExpressions_NullInput_ReturnsEmpty()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var context = CreateMockContext();
|
||||
var result = _executor.ExpandExpressions(null, context.Object);
|
||||
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExpandExpressions_EmptyInput_ReturnsEmpty()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var context = CreateMockContext();
|
||||
var result = _executor.ExpandExpressions("", context.Object);
|
||||
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExpandExpressions_UnterminatedExpression_KeepsLiteral()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var context = CreateMockContext();
|
||||
var result = _executor.ExpandExpressions("echo ${{ github.repo", context.Object);
|
||||
|
||||
Assert.Equal("echo ${{ github.repo", result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveDefaultShell_NoJobDefaults_ReturnsPlatformDefault()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var context = CreateMockContext();
|
||||
var result = _executor.ResolveDefaultShell(context.Object);
|
||||
|
||||
#if OS_WINDOWS
|
||||
Assert.True(result == "pwsh" || result == "powershell");
|
||||
#else
|
||||
Assert.Equal("sh", result);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveDefaultShell_WithJobDefault_ReturnsJobDefault()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var jobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["run"] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["shell"] = "bash"
|
||||
}
|
||||
};
|
||||
var context = CreateMockContext(jobDefaults: jobDefaults);
|
||||
var result = _executor.ResolveDefaultShell(context.Object);
|
||||
|
||||
Assert.Equal("bash", result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void BuildEnvironment_MergesEnvContextAndReplOverrides()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
var envData = new DictionaryContextData
|
||||
{
|
||||
["FOO"] = new StringContextData("bar"),
|
||||
};
|
||||
exprValues["env"] = envData;
|
||||
|
||||
var context = CreateMockContext(exprValues);
|
||||
var replEnv = new Dictionary<string, string> { { "BAZ", "qux" } };
|
||||
var result = _executor.BuildEnvironment(context.Object, replEnv);
|
||||
|
||||
Assert.Equal("bar", result["FOO"]);
|
||||
Assert.Equal("qux", result["BAZ"]);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void BuildEnvironment_ReplOverridesWin()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
var envData = new DictionaryContextData
|
||||
{
|
||||
["FOO"] = new StringContextData("original"),
|
||||
};
|
||||
exprValues["env"] = envData;
|
||||
|
||||
var context = CreateMockContext(exprValues);
|
||||
var replEnv = new Dictionary<string, string> { { "FOO", "override" } };
|
||||
var result = _executor.BuildEnvironment(context.Object, replEnv);
|
||||
|
||||
Assert.Equal("override", result["FOO"]);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void BuildEnvironment_NullReplEnv_ReturnsContextEnvOnly()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
var envData = new DictionaryContextData
|
||||
{
|
||||
["FOO"] = new StringContextData("bar"),
|
||||
};
|
||||
exprValues["env"] = envData;
|
||||
|
||||
var context = CreateMockContext(exprValues);
|
||||
var result = _executor.BuildEnvironment(context.Object, null);
|
||||
|
||||
Assert.Equal("bar", result["FOO"]);
|
||||
Assert.False(result.ContainsKey("BAZ"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
314
src/Test/L0/Worker/DapReplParserL0.cs
Normal file
314
src/Test/L0/Worker/DapReplParserL0.cs
Normal file
@@ -0,0 +1,314 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.CompilerServices;
|
||||
using GitHub.Runner.Common.Tests;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class DapReplParserL0
|
||||
{
|
||||
#region help command
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_HelpReturnsHelpCommand()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("help", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var help = Assert.IsType<HelpCommand>(cmd);
|
||||
Assert.Null(help.Topic);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_HelpCaseInsensitive()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("Help", out var error);
|
||||
Assert.Null(error);
|
||||
Assert.IsType<HelpCommand>(cmd);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_HelpWithTopic()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("help(\"run\")", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var help = Assert.IsType<HelpCommand>(cmd);
|
||||
Assert.Equal("run", help.Topic);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region run command — basic
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunSimpleScript()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo hello\")", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("echo hello", run.Script);
|
||||
Assert.Null(run.Shell);
|
||||
Assert.Null(run.Env);
|
||||
Assert.Null(run.WorkingDirectory);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithShell()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo hello\", shell: \"bash\")", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("echo hello", run.Script);
|
||||
Assert.Equal("bash", run.Shell);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithWorkingDirectory()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"ls\", working_directory: \"/tmp\")", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("ls", run.Script);
|
||||
Assert.Equal("/tmp", run.WorkingDirectory);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithEnv()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo $FOO\", env: { FOO: \"bar\" })", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("echo $FOO", run.Script);
|
||||
Assert.NotNull(run.Env);
|
||||
Assert.Equal("bar", run.Env["FOO"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithMultipleEnvVars()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo\", env: { A: \"1\", B: \"2\" })", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal(2, run.Env.Count);
|
||||
Assert.Equal("1", run.Env["A"]);
|
||||
Assert.Equal("2", run.Env["B"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithAllOptions()
|
||||
{
|
||||
var input = "run(\"echo $X\", shell: \"zsh\", env: { X: \"1\" }, working_directory: \"/tmp\")";
|
||||
var cmd = DapReplParser.TryParse(input, out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("echo $X", run.Script);
|
||||
Assert.Equal("zsh", run.Shell);
|
||||
Assert.Equal("1", run.Env["X"]);
|
||||
Assert.Equal("/tmp", run.WorkingDirectory);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region run command — edge cases
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithEscapedQuotes()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo \\\"hello\\\"\")", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("echo \"hello\"", run.Script);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithCommaInEnvValue()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo\", env: { CSV: \"a,b,c\" })", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("a,b,c", run.Env["CSV"]);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region error cases
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunEmptyArgsReturnsError()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run()", out var error);
|
||||
|
||||
Assert.NotNull(error);
|
||||
Assert.Null(cmd);
|
||||
Assert.Contains("requires a script argument", error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunUnquotedArgReturnsError()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(echo hello)", out var error);
|
||||
|
||||
Assert.NotNull(error);
|
||||
Assert.Null(cmd);
|
||||
Assert.Contains("quoted string", error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunUnknownOptionReturnsError()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo\", timeout: \"10\")", out var error);
|
||||
|
||||
Assert.NotNull(error);
|
||||
Assert.Null(cmd);
|
||||
Assert.Contains("Unknown option", error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunMissingClosingParenReturnsError()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo\"", out var error);
|
||||
|
||||
Assert.NotNull(error);
|
||||
Assert.Null(cmd);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region non-DSL input falls through
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_ExpressionReturnsNull()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("github.repository", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
Assert.Null(cmd);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_WrappedExpressionReturnsNull()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("${{ github.event_name }}", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
Assert.Null(cmd);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_EmptyInputReturnsNull()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("", out var error);
|
||||
Assert.Null(error);
|
||||
Assert.Null(cmd);
|
||||
|
||||
cmd = DapReplParser.TryParse(null, out error);
|
||||
Assert.Null(error);
|
||||
Assert.Null(cmd);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region help text
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetGeneralHelp_ContainsCommands()
|
||||
{
|
||||
var help = DapReplParser.GetGeneralHelp();
|
||||
|
||||
Assert.Contains("help", help);
|
||||
Assert.Contains("run", help);
|
||||
Assert.Contains("expression", help, System.StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetRunHelp_ContainsOptions()
|
||||
{
|
||||
var help = DapReplParser.GetRunHelp();
|
||||
|
||||
Assert.Contains("shell", help);
|
||||
Assert.Contains("env", help);
|
||||
Assert.Contains("working_directory", help);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region internal parser helpers
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void SplitArguments_HandlesNestedBraces()
|
||||
{
|
||||
var args = DapReplParser.SplitArguments("\"hello\", env: { A: \"1\", B: \"2\" }", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
Assert.Equal(2, args.Count);
|
||||
Assert.Equal("\"hello\"", args[0].Trim());
|
||||
Assert.Contains("A:", args[1]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ParseEnvBlock_HandlesEmptyBlock()
|
||||
{
|
||||
var result = DapReplParser.ParseEnvBlock("{ }", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
Assert.NotNull(result);
|
||||
Assert.Empty(result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
728
src/Test/L0/Worker/DapVariableProviderL0.cs
Normal file
728
src/Test/L0/Worker/DapVariableProviderL0.cs
Normal file
@@ -0,0 +1,728 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.CompilerServices;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Tests;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class DapVariableProviderL0
|
||||
{
|
||||
private TestHostContext _hc;
|
||||
private DapVariableProvider _provider;
|
||||
|
||||
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
|
||||
{
|
||||
_hc = new TestHostContext(this, testName);
|
||||
_provider = new DapVariableProvider(_hc.SecretMasker);
|
||||
return _hc;
|
||||
}
|
||||
|
||||
private Moq.Mock<GitHub.Runner.Worker.IExecutionContext> CreateMockContext(DictionaryContextData expressionValues)
|
||||
{
|
||||
var mock = new Moq.Mock<GitHub.Runner.Worker.IExecutionContext>();
|
||||
mock.Setup(x => x.ExpressionValues).Returns(expressionValues);
|
||||
return mock;
|
||||
}
|
||||
|
||||
#region GetScopes tests
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetScopes_ReturnsEmptyWhenContextIsNull()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var scopes = _provider.GetScopes(null);
|
||||
Assert.Empty(scopes);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetScopes_ReturnsOnlyPopulatedScopes()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "repository", new StringContextData("owner/repo") }
|
||||
};
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "CI", new StringContextData("true") },
|
||||
{ "HOME", new StringContextData("/home/runner") }
|
||||
};
|
||||
// "runner" is not set — should not appear in scopes
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var scopes = _provider.GetScopes(ctx.Object);
|
||||
|
||||
Assert.Equal(2, scopes.Count);
|
||||
Assert.Equal("github", scopes[0].Name);
|
||||
Assert.Equal("env", scopes[1].Name);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetScopes_ReportsNamedVariableCount()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "A", new StringContextData("1") },
|
||||
{ "B", new StringContextData("2") },
|
||||
{ "C", new StringContextData("3") }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var scopes = _provider.GetScopes(ctx.Object);
|
||||
|
||||
Assert.Single(scopes);
|
||||
Assert.Equal(3, scopes[0].NamedVariables);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetScopes_SecretsGetSpecialPresentationHint()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["secrets"] = new DictionaryContextData
|
||||
{
|
||||
{ "MY_SECRET", new StringContextData("super-secret") }
|
||||
};
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "CI", new StringContextData("true") }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var scopes = _provider.GetScopes(ctx.Object);
|
||||
|
||||
var envScope = scopes.Find(s => s.Name == "env");
|
||||
var secretsScope = scopes.Find(s => s.Name == "secrets");
|
||||
|
||||
Assert.NotNull(envScope);
|
||||
Assert.Null(envScope.PresentationHint);
|
||||
|
||||
Assert.NotNull(secretsScope);
|
||||
Assert.Equal("registers", secretsScope.PresentationHint);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetVariables — basic types
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_ReturnsEmptyWhenContextIsNull()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var variables = _provider.GetVariables(null, 1);
|
||||
Assert.Empty(variables);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_ReturnsStringVariables()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "CI", new StringContextData("true") },
|
||||
{ "HOME", new StringContextData("/home/runner") }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
// "env" is at ScopeNames index 1 → variablesReference = 2
|
||||
var variables = _provider.GetVariables(ctx.Object, 2);
|
||||
|
||||
Assert.Equal(2, variables.Count);
|
||||
|
||||
var ciVar = variables.Find(v => v.Name == "CI");
|
||||
Assert.NotNull(ciVar);
|
||||
Assert.Equal("true", ciVar.Value);
|
||||
Assert.Equal("string", ciVar.Type);
|
||||
Assert.Equal(0, ciVar.VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_ReturnsBooleanVariables()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "event_name", new StringContextData("push") },
|
||||
};
|
||||
// Use a nested dict with boolean to test
|
||||
var jobDict = new DictionaryContextData();
|
||||
// BooleanContextData is a valid PipelineContextData type
|
||||
// but job context typically has strings. Use env scope instead.
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "flag", new BooleanContextData(true) }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
// "env" is at index 1 → ref 2
|
||||
var variables = _provider.GetVariables(ctx.Object, 2);
|
||||
|
||||
var flagVar = variables.Find(v => v.Name == "flag");
|
||||
Assert.NotNull(flagVar);
|
||||
Assert.Equal("true", flagVar.Value);
|
||||
Assert.Equal("boolean", flagVar.Type);
|
||||
Assert.Equal(0, flagVar.VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_ReturnsNumberVariables()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "count", new NumberContextData(42) }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 2);
|
||||
|
||||
var countVar = variables.Find(v => v.Name == "count");
|
||||
Assert.NotNull(countVar);
|
||||
Assert.Equal("42", countVar.Value);
|
||||
Assert.Equal("number", countVar.Type);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_HandlesNullValues()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
var dict = new DictionaryContextData();
|
||||
dict["present"] = new StringContextData("yes");
|
||||
dict["missing"] = null;
|
||||
exprValues["env"] = dict;
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 2);
|
||||
|
||||
var nullVar = variables.Find(v => v.Name == "missing");
|
||||
Assert.NotNull(nullVar);
|
||||
Assert.Equal("null", nullVar.Value);
|
||||
Assert.Equal("null", nullVar.Type);
|
||||
Assert.Equal(0, nullVar.VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetVariables — nested expansion
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_NestedDictionaryIsExpandable()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var innerDict = new DictionaryContextData
|
||||
{
|
||||
{ "name", new StringContextData("push") },
|
||||
{ "ref", new StringContextData("refs/heads/main") }
|
||||
};
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "event", innerDict }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
// "github" is at index 0 → ref 1
|
||||
var variables = _provider.GetVariables(ctx.Object, 1);
|
||||
|
||||
var eventVar = variables.Find(v => v.Name == "event");
|
||||
Assert.NotNull(eventVar);
|
||||
Assert.Equal("object", eventVar.Type);
|
||||
Assert.True(eventVar.VariablesReference > 0, "Nested dict should have a non-zero variablesReference");
|
||||
Assert.Equal(2, eventVar.NamedVariables);
|
||||
|
||||
// Now expand it
|
||||
var children = _provider.GetVariables(ctx.Object, eventVar.VariablesReference);
|
||||
Assert.Equal(2, children.Count);
|
||||
|
||||
var nameVar = children.Find(v => v.Name == "name");
|
||||
Assert.NotNull(nameVar);
|
||||
Assert.Equal("push", nameVar.Value);
|
||||
Assert.Equal("${{ github.event.name }}", nameVar.EvaluateName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_NestedArrayIsExpandable()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var array = new ArrayContextData();
|
||||
array.Add(new StringContextData("item0"));
|
||||
array.Add(new StringContextData("item1"));
|
||||
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "list", array }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 2);
|
||||
|
||||
var listVar = variables.Find(v => v.Name == "list");
|
||||
Assert.NotNull(listVar);
|
||||
Assert.Equal("array", listVar.Type);
|
||||
Assert.True(listVar.VariablesReference > 0);
|
||||
Assert.Equal(2, listVar.IndexedVariables);
|
||||
|
||||
// Expand the array
|
||||
var items = _provider.GetVariables(ctx.Object, listVar.VariablesReference);
|
||||
Assert.Equal(2, items.Count);
|
||||
Assert.Equal("[0]", items[0].Name);
|
||||
Assert.Equal("item0", items[0].Value);
|
||||
Assert.Equal("[1]", items[1].Name);
|
||||
Assert.Equal("item1", items[1].Value);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Secret masking
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_SecretsScopeValuesAreRedacted()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["secrets"] = new DictionaryContextData
|
||||
{
|
||||
{ "MY_TOKEN", new StringContextData("ghp_abc123secret") },
|
||||
{ "DB_PASSWORD", new StringContextData("p@ssword!") }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
// "secrets" is at index 5 → ref 6
|
||||
var variables = _provider.GetVariables(ctx.Object, 6);
|
||||
|
||||
Assert.Equal(2, variables.Count);
|
||||
foreach (var v in variables)
|
||||
{
|
||||
Assert.Equal("***", v.Value);
|
||||
Assert.Equal("string", v.Type);
|
||||
}
|
||||
|
||||
// Keys should still be visible
|
||||
Assert.Contains(variables, v => v.Name == "MY_TOKEN");
|
||||
Assert.Contains(variables, v => v.Name == "DB_PASSWORD");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_NonSecretScopeValuesMaskedBySecretMasker()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
// Register a known secret value with the masker
|
||||
hc.SecretMasker.AddValue("super-secret-token");
|
||||
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "SAFE", new StringContextData("hello world") },
|
||||
{ "LEAKED", new StringContextData("prefix-super-secret-token-suffix") }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 2);
|
||||
|
||||
var safeVar = variables.Find(v => v.Name == "SAFE");
|
||||
Assert.NotNull(safeVar);
|
||||
Assert.Equal("hello world", safeVar.Value);
|
||||
|
||||
var leakedVar = variables.Find(v => v.Name == "LEAKED");
|
||||
Assert.NotNull(leakedVar);
|
||||
Assert.DoesNotContain("super-secret-token", leakedVar.Value);
|
||||
Assert.Contains("***", leakedVar.Value);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Reset
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Reset_InvalidatesNestedReferences()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var innerDict = new DictionaryContextData
|
||||
{
|
||||
{ "name", new StringContextData("push") }
|
||||
};
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "event", innerDict }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 1);
|
||||
var eventVar = variables.Find(v => v.Name == "event");
|
||||
Assert.True(eventVar.VariablesReference > 0);
|
||||
|
||||
var savedRef = eventVar.VariablesReference;
|
||||
|
||||
// Reset should clear all dynamic references
|
||||
_provider.Reset();
|
||||
|
||||
var children = _provider.GetVariables(ctx.Object, savedRef);
|
||||
Assert.Empty(children);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EvaluateName
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_SetsEvaluateNameWithDotPath()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "repository", new StringContextData("owner/repo") }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 1);
|
||||
|
||||
var repoVar = variables.Find(v => v.Name == "repository");
|
||||
Assert.NotNull(repoVar);
|
||||
Assert.Equal("${{ github.repository }}", repoVar.EvaluateName);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EvaluateExpression
|
||||
|
||||
/// <summary>
|
||||
/// Creates a mock execution context with Global set up so that
|
||||
/// ToPipelineTemplateEvaluator() works for real expression evaluation.
|
||||
/// </summary>
|
||||
private Moq.Mock<IExecutionContext> CreateEvaluatableContext(
|
||||
TestHostContext hc,
|
||||
DictionaryContextData expressionValues)
|
||||
{
|
||||
var mock = new Moq.Mock<IExecutionContext>();
|
||||
mock.Setup(x => x.ExpressionValues).Returns(expressionValues);
|
||||
mock.Setup(x => x.ExpressionFunctions)
|
||||
.Returns(new List<GitHub.DistributedTask.Expressions2.IFunctionInfo>());
|
||||
mock.Setup(x => x.Global).Returns(new GlobalContext
|
||||
{
|
||||
FileTable = new List<string>(),
|
||||
Variables = new Variables(hc, new Dictionary<string, VariableValue>()),
|
||||
});
|
||||
// ToPipelineTemplateEvaluator uses ToTemplateTraceWriter which calls
|
||||
// context.Write — provide a no-op so it doesn't NRE.
|
||||
mock.Setup(x => x.Write(Moq.It.IsAny<string>(), Moq.It.IsAny<string>()));
|
||||
return mock;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EvaluateExpression_ReturnsValueForSimpleExpression()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "repository", new StringContextData("owner/repo") }
|
||||
};
|
||||
|
||||
var ctx = CreateEvaluatableContext(hc, exprValues);
|
||||
var result = _provider.EvaluateExpression("github.repository", ctx.Object);
|
||||
|
||||
Assert.Equal("owner/repo", result.Result);
|
||||
Assert.Equal("string", result.Type);
|
||||
Assert.Equal(0, result.VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EvaluateExpression_StripsWrapperSyntax()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "event_name", new StringContextData("push") }
|
||||
};
|
||||
|
||||
var ctx = CreateEvaluatableContext(hc, exprValues);
|
||||
var result = _provider.EvaluateExpression("${{ github.event_name }}", ctx.Object);
|
||||
|
||||
Assert.Equal("push", result.Result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EvaluateExpression_MasksSecretInResult()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
hc.SecretMasker.AddValue("super-secret");
|
||||
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "TOKEN", new StringContextData("super-secret") }
|
||||
};
|
||||
|
||||
var ctx = CreateEvaluatableContext(hc, exprValues);
|
||||
var result = _provider.EvaluateExpression("env.TOKEN", ctx.Object);
|
||||
|
||||
Assert.DoesNotContain("super-secret", result.Result);
|
||||
Assert.Contains("***", result.Result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EvaluateExpression_ReturnsErrorForInvalidExpression()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData();
|
||||
|
||||
var ctx = CreateEvaluatableContext(hc, exprValues);
|
||||
// An invalid expression syntax should not throw — it should
|
||||
// return an error result.
|
||||
var result = _provider.EvaluateExpression("!!!invalid[[", ctx.Object);
|
||||
|
||||
Assert.Contains("error", result.Result, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EvaluateExpression_ReturnsMessageWhenNoContext()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var result = _provider.EvaluateExpression("github.repository", null);
|
||||
|
||||
Assert.Contains("no execution context", result.Result, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EvaluateExpression_ReturnsEmptyForEmptyExpression()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
var ctx = CreateEvaluatableContext(hc, exprValues);
|
||||
var result = _provider.EvaluateExpression("", ctx.Object);
|
||||
|
||||
Assert.Equal(string.Empty, result.Result);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region InferResultType
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void InferResultType_ClassifiesCorrectly()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
Assert.Equal("null", DapVariableProvider.InferResultType(null));
|
||||
Assert.Equal("null", DapVariableProvider.InferResultType("null"));
|
||||
Assert.Equal("boolean", DapVariableProvider.InferResultType("true"));
|
||||
Assert.Equal("boolean", DapVariableProvider.InferResultType("false"));
|
||||
Assert.Equal("number", DapVariableProvider.InferResultType("42"));
|
||||
Assert.Equal("number", DapVariableProvider.InferResultType("3.14"));
|
||||
Assert.Equal("object", DapVariableProvider.InferResultType("{\"key\":\"val\"}"));
|
||||
Assert.Equal("object", DapVariableProvider.InferResultType("[1,2,3]"));
|
||||
Assert.Equal("string", DapVariableProvider.InferResultType("hello world"));
|
||||
Assert.Equal("string", DapVariableProvider.InferResultType("owner/repo"));
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Non-string secret type redaction
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_SecretsScopeRedactsNumberContextData()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["secrets"] = new DictionaryContextData
|
||||
{
|
||||
{ "NUMERIC_SECRET", new NumberContextData(12345) }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 6);
|
||||
|
||||
Assert.Single(variables);
|
||||
Assert.Equal("NUMERIC_SECRET", variables[0].Name);
|
||||
Assert.Equal("***", variables[0].Value);
|
||||
Assert.Equal("string", variables[0].Type);
|
||||
Assert.Equal(0, variables[0].VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_SecretsScopeRedactsBooleanContextData()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["secrets"] = new DictionaryContextData
|
||||
{
|
||||
{ "BOOL_SECRET", new BooleanContextData(true) }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 6);
|
||||
|
||||
Assert.Single(variables);
|
||||
Assert.Equal("BOOL_SECRET", variables[0].Name);
|
||||
Assert.Equal("***", variables[0].Value);
|
||||
Assert.Equal("string", variables[0].Type);
|
||||
Assert.Equal(0, variables[0].VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_SecretsScopeRedactsNestedDictionary()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["secrets"] = new DictionaryContextData
|
||||
{
|
||||
{ "NESTED_SECRET", new DictionaryContextData
|
||||
{
|
||||
{ "inner_key", new StringContextData("inner_value") }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 6);
|
||||
|
||||
Assert.Single(variables);
|
||||
Assert.Equal("NESTED_SECRET", variables[0].Name);
|
||||
Assert.Equal("***", variables[0].Value);
|
||||
Assert.Equal("string", variables[0].Type);
|
||||
// Nested container should NOT be drillable under secrets
|
||||
Assert.Equal(0, variables[0].VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_SecretsScopeRedactsNullValue()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
var secrets = new DictionaryContextData();
|
||||
secrets["NULL_SECRET"] = null;
|
||||
exprValues["secrets"] = secrets;
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 6);
|
||||
|
||||
Assert.Single(variables);
|
||||
Assert.Equal("NULL_SECRET", variables[0].Name);
|
||||
Assert.Equal("***", variables[0].Value);
|
||||
Assert.Equal(0, variables[0].VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -370,5 +370,504 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
Assert.Contains("./.github/actions/my-action", deprecatedActions);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Node20Action_TrackedAsUpgradedWhenUseNode24ByDefaultEnabled()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange.
|
||||
var hf = new HandlerFactory();
|
||||
hf.Initialize(hc);
|
||||
|
||||
var variables = new Dictionary<string, VariableValue>
|
||||
{
|
||||
{ Constants.Runner.NodeMigration.WarnOnNode20Flag, new VariableValue("true") },
|
||||
{ Constants.Runner.NodeMigration.UseNode24ByDefaultFlag, new VariableValue("true") }
|
||||
};
|
||||
Variables serverVariables = new(hc, variables);
|
||||
var deprecatedActions = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
var upgradedActions = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
_ec.Setup(x => x.Global).Returns(new GlobalContext()
|
||||
{
|
||||
Variables = serverVariables,
|
||||
EnvironmentVariables = new Dictionary<string, string>(),
|
||||
DeprecatedNode20Actions = deprecatedActions,
|
||||
UpgradedToNode24Actions = upgradedActions
|
||||
});
|
||||
|
||||
var actionRef = new RepositoryPathReference
|
||||
{
|
||||
Name = "actions/checkout",
|
||||
Ref = "v4"
|
||||
};
|
||||
|
||||
// Act.
|
||||
var data = new NodeJSActionExecutionData();
|
||||
data.NodeVersion = "node20";
|
||||
var handler = hf.Create(
|
||||
_ec.Object,
|
||||
actionRef,
|
||||
new Mock<IStepHost>().Object,
|
||||
data,
|
||||
new Dictionary<string, string>(),
|
||||
new Dictionary<string, string>(),
|
||||
new Variables(hc, new Dictionary<string, VariableValue>()),
|
||||
"",
|
||||
new List<JobExtensionRunner>()
|
||||
) as INodeScriptActionHandler;
|
||||
|
||||
// On non-ARM32 platforms, action should be upgraded to node24
|
||||
// and tracked in UpgradedToNode24Actions, NOT in DeprecatedNode20Actions
|
||||
bool isArm32Linux = System.Runtime.InteropServices.RuntimeInformation.ProcessArchitecture == System.Runtime.InteropServices.Architecture.Arm &&
|
||||
System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Linux);
|
||||
|
||||
if (!isArm32Linux)
|
||||
{
|
||||
Assert.Equal("node24", handler.Data.NodeVersion);
|
||||
Assert.Contains("actions/checkout@v4", upgradedActions);
|
||||
Assert.DoesNotContain("actions/checkout@v4", deprecatedActions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Node20Action_NotUpgradedWhenPhase1Only()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange.
|
||||
var hf = new HandlerFactory();
|
||||
hf.Initialize(hc);
|
||||
|
||||
var variables = new Dictionary<string, VariableValue>
|
||||
{
|
||||
{ Constants.Runner.NodeMigration.WarnOnNode20Flag, new VariableValue("true") }
|
||||
};
|
||||
Variables serverVariables = new(hc, variables);
|
||||
var deprecatedActions = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
var upgradedActions = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
_ec.Setup(x => x.Global).Returns(new GlobalContext()
|
||||
{
|
||||
Variables = serverVariables,
|
||||
EnvironmentVariables = new Dictionary<string, string>(),
|
||||
DeprecatedNode20Actions = deprecatedActions,
|
||||
UpgradedToNode24Actions = upgradedActions
|
||||
});
|
||||
|
||||
var actionRef = new RepositoryPathReference
|
||||
{
|
||||
Name = "actions/checkout",
|
||||
Ref = "v4"
|
||||
};
|
||||
|
||||
// Act.
|
||||
var data = new NodeJSActionExecutionData();
|
||||
data.NodeVersion = "node20";
|
||||
var handler = hf.Create(
|
||||
_ec.Object,
|
||||
actionRef,
|
||||
new Mock<IStepHost>().Object,
|
||||
data,
|
||||
new Dictionary<string, string>(),
|
||||
new Dictionary<string, string>(),
|
||||
new Variables(hc, new Dictionary<string, VariableValue>()),
|
||||
"",
|
||||
new List<JobExtensionRunner>()
|
||||
) as INodeScriptActionHandler;
|
||||
|
||||
// In Phase 1 (no UseNode24ByDefault), action stays on node20
|
||||
// and should be in DeprecatedNode20Actions
|
||||
Assert.Equal("node20", handler.Data.NodeVersion);
|
||||
Assert.Contains("actions/checkout@v4", deprecatedActions);
|
||||
Assert.Empty(upgradedActions);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExplicitNode24Action_KillArm32Flag_ThrowsOnArm32()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange.
|
||||
var hf = new HandlerFactory();
|
||||
hf.Initialize(hc);
|
||||
|
||||
var variables = new Dictionary<string, VariableValue>
|
||||
{
|
||||
{ Constants.Runner.NodeMigration.KillLinuxArm32Flag, new VariableValue("true") }
|
||||
};
|
||||
Variables serverVariables = new(hc, variables);
|
||||
|
||||
_ec.Setup(x => x.Global).Returns(new GlobalContext()
|
||||
{
|
||||
Variables = serverVariables,
|
||||
EnvironmentVariables = new Dictionary<string, string>()
|
||||
});
|
||||
|
||||
var actionRef = new RepositoryPathReference
|
||||
{
|
||||
Name = "actions/checkout",
|
||||
Ref = "v5"
|
||||
};
|
||||
|
||||
// Act - action explicitly declares node24
|
||||
var data = new NodeJSActionExecutionData();
|
||||
data.NodeVersion = "node24";
|
||||
|
||||
bool isArm32Linux = System.Runtime.InteropServices.RuntimeInformation.ProcessArchitecture == System.Runtime.InteropServices.Architecture.Arm &&
|
||||
System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Linux);
|
||||
|
||||
if (isArm32Linux)
|
||||
{
|
||||
// On ARM32 Linux, kill flag should cause the handler to throw
|
||||
Assert.Throws<InvalidOperationException>(() => hf.Create(
|
||||
_ec.Object,
|
||||
actionRef,
|
||||
new Mock<IStepHost>().Object,
|
||||
data,
|
||||
new Dictionary<string, string>(),
|
||||
new Dictionary<string, string>(),
|
||||
new Variables(hc, new Dictionary<string, VariableValue>()),
|
||||
"",
|
||||
new List<JobExtensionRunner>()
|
||||
));
|
||||
}
|
||||
else
|
||||
{
|
||||
// On other platforms, should proceed normally
|
||||
var handler = hf.Create(
|
||||
_ec.Object,
|
||||
actionRef,
|
||||
new Mock<IStepHost>().Object,
|
||||
data,
|
||||
new Dictionary<string, string>(),
|
||||
new Dictionary<string, string>(),
|
||||
new Variables(hc, new Dictionary<string, VariableValue>()),
|
||||
"",
|
||||
new List<JobExtensionRunner>()
|
||||
) as INodeScriptActionHandler;
|
||||
|
||||
Assert.Equal("node24", handler.Data.NodeVersion);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExplicitNode24Action_DeprecateArm32Flag_DowngradesToNode20OnArm32()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange.
|
||||
var hf = new HandlerFactory();
|
||||
hf.Initialize(hc);
|
||||
|
||||
var variables = new Dictionary<string, VariableValue>
|
||||
{
|
||||
{ Constants.Runner.NodeMigration.DeprecateLinuxArm32Flag, new VariableValue("true") }
|
||||
};
|
||||
Variables serverVariables = new(hc, variables);
|
||||
var arm32Actions = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
_ec.Setup(x => x.Global).Returns(new GlobalContext()
|
||||
{
|
||||
Variables = serverVariables,
|
||||
EnvironmentVariables = new Dictionary<string, string>(),
|
||||
Arm32Node20Actions = arm32Actions
|
||||
});
|
||||
|
||||
var actionRef = new RepositoryPathReference
|
||||
{
|
||||
Name = "actions/checkout",
|
||||
Ref = "v5"
|
||||
};
|
||||
|
||||
// Act - action explicitly declares node24
|
||||
var data = new NodeJSActionExecutionData();
|
||||
data.NodeVersion = "node24";
|
||||
var handler = hf.Create(
|
||||
_ec.Object,
|
||||
actionRef,
|
||||
new Mock<IStepHost>().Object,
|
||||
data,
|
||||
new Dictionary<string, string>(),
|
||||
new Dictionary<string, string>(),
|
||||
new Variables(hc, new Dictionary<string, VariableValue>()),
|
||||
"",
|
||||
new List<JobExtensionRunner>()
|
||||
) as INodeScriptActionHandler;
|
||||
|
||||
bool isArm32Linux = System.Runtime.InteropServices.RuntimeInformation.ProcessArchitecture == System.Runtime.InteropServices.Architecture.Arm &&
|
||||
System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Linux);
|
||||
|
||||
if (isArm32Linux)
|
||||
{
|
||||
// On ARM32 Linux, should downgrade to node20 and track
|
||||
Assert.Equal("node20", handler.Data.NodeVersion);
|
||||
Assert.Contains("actions/checkout@v5", arm32Actions);
|
||||
}
|
||||
else
|
||||
{
|
||||
// On other platforms, should remain node24
|
||||
Assert.Equal("node24", handler.Data.NodeVersion);
|
||||
Assert.Empty(arm32Actions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExplicitNode24Action_NoArm32Flags_StaysNode24()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange.
|
||||
var hf = new HandlerFactory();
|
||||
hf.Initialize(hc);
|
||||
|
||||
var variables = new Dictionary<string, VariableValue>();
|
||||
Variables serverVariables = new(hc, variables);
|
||||
var arm32Actions = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
var deprecatedActions = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
_ec.Setup(x => x.Global).Returns(new GlobalContext()
|
||||
{
|
||||
Variables = serverVariables,
|
||||
EnvironmentVariables = new Dictionary<string, string>(),
|
||||
Arm32Node20Actions = arm32Actions,
|
||||
DeprecatedNode20Actions = deprecatedActions
|
||||
});
|
||||
|
||||
var actionRef = new RepositoryPathReference
|
||||
{
|
||||
Name = "actions/checkout",
|
||||
Ref = "v5"
|
||||
};
|
||||
|
||||
// Act - action explicitly declares node24, no ARM32 flags
|
||||
var data = new NodeJSActionExecutionData();
|
||||
data.NodeVersion = "node24";
|
||||
var handler = hf.Create(
|
||||
_ec.Object,
|
||||
actionRef,
|
||||
new Mock<IStepHost>().Object,
|
||||
data,
|
||||
new Dictionary<string, string>(),
|
||||
new Dictionary<string, string>(),
|
||||
new Variables(hc, new Dictionary<string, VariableValue>()),
|
||||
"",
|
||||
new List<JobExtensionRunner>()
|
||||
) as INodeScriptActionHandler;
|
||||
|
||||
// On non-ARM32 platforms, should stay node24 and not be tracked in any list
|
||||
bool isArm32Linux = System.Runtime.InteropServices.RuntimeInformation.ProcessArchitecture == System.Runtime.InteropServices.Architecture.Arm &&
|
||||
System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Linux);
|
||||
|
||||
if (!isArm32Linux)
|
||||
{
|
||||
Assert.Equal("node24", handler.Data.NodeVersion);
|
||||
Assert.Empty(arm32Actions);
|
||||
Assert.Empty(deprecatedActions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Node20Action_RequireNode24_ForcesNode24()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange.
|
||||
var hf = new HandlerFactory();
|
||||
hf.Initialize(hc);
|
||||
|
||||
var variables = new Dictionary<string, VariableValue>
|
||||
{
|
||||
{ Constants.Runner.NodeMigration.RequireNode24Flag, new VariableValue("true") },
|
||||
{ Constants.Runner.NodeMigration.WarnOnNode20Flag, new VariableValue("true") }
|
||||
};
|
||||
Variables serverVariables = new(hc, variables);
|
||||
var upgradedActions = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
var deprecatedActions = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
_ec.Setup(x => x.Global).Returns(new GlobalContext()
|
||||
{
|
||||
Variables = serverVariables,
|
||||
EnvironmentVariables = new Dictionary<string, string>(),
|
||||
UpgradedToNode24Actions = upgradedActions,
|
||||
DeprecatedNode20Actions = deprecatedActions
|
||||
});
|
||||
|
||||
var actionRef = new RepositoryPathReference
|
||||
{
|
||||
Name = "actions/checkout",
|
||||
Ref = "v4"
|
||||
};
|
||||
|
||||
// Act.
|
||||
var data = new NodeJSActionExecutionData();
|
||||
data.NodeVersion = "node20";
|
||||
|
||||
bool isArm32Linux = System.Runtime.InteropServices.RuntimeInformation.ProcessArchitecture == System.Runtime.InteropServices.Architecture.Arm &&
|
||||
System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Linux);
|
||||
|
||||
if (!isArm32Linux)
|
||||
{
|
||||
var handler = hf.Create(
|
||||
_ec.Object,
|
||||
actionRef,
|
||||
new Mock<IStepHost>().Object,
|
||||
data,
|
||||
new Dictionary<string, string>(),
|
||||
new Dictionary<string, string>(),
|
||||
new Variables(hc, new Dictionary<string, VariableValue>()),
|
||||
"",
|
||||
new List<JobExtensionRunner>()
|
||||
) as INodeScriptActionHandler;
|
||||
|
||||
// Phase 3: RequireNode24 forces node24, ignoring env vars
|
||||
Assert.Equal("node24", handler.Data.NodeVersion);
|
||||
Assert.Contains("actions/checkout@v4", upgradedActions);
|
||||
Assert.Empty(deprecatedActions);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Node20Action_KillArm32Flag_ThrowsOnArm32()
|
||||
{
|
||||
using (TestHostContext hc = CreateTestContext())
|
||||
{
|
||||
// Arrange.
|
||||
var hf = new HandlerFactory();
|
||||
hf.Initialize(hc);
|
||||
|
||||
var variables = new Dictionary<string, VariableValue>
|
||||
{
|
||||
{ Constants.Runner.NodeMigration.KillLinuxArm32Flag, new VariableValue("true") }
|
||||
};
|
||||
Variables serverVariables = new(hc, variables);
|
||||
|
||||
_ec.Setup(x => x.Global).Returns(new GlobalContext()
|
||||
{
|
||||
Variables = serverVariables,
|
||||
EnvironmentVariables = new Dictionary<string, string>()
|
||||
});
|
||||
|
||||
var actionRef = new RepositoryPathReference
|
||||
{
|
||||
Name = "actions/checkout",
|
||||
Ref = "v4"
|
||||
};
|
||||
|
||||
var data = new NodeJSActionExecutionData();
|
||||
data.NodeVersion = "node20";
|
||||
|
||||
bool isArm32Linux = System.Runtime.InteropServices.RuntimeInformation.ProcessArchitecture == System.Runtime.InteropServices.Architecture.Arm &&
|
||||
System.Runtime.InteropServices.RuntimeInformation.IsOSPlatform(System.Runtime.InteropServices.OSPlatform.Linux);
|
||||
|
||||
if (isArm32Linux)
|
||||
{
|
||||
Assert.Throws<InvalidOperationException>(() => hf.Create(
|
||||
_ec.Object,
|
||||
actionRef,
|
||||
new Mock<IStepHost>().Object,
|
||||
data,
|
||||
new Dictionary<string, string>(),
|
||||
new Dictionary<string, string>(),
|
||||
new Variables(hc, new Dictionary<string, VariableValue>()),
|
||||
"",
|
||||
new List<JobExtensionRunner>()
|
||||
));
|
||||
}
|
||||
else
|
||||
{
|
||||
// On non-ARM32, should proceed normally (node20 stays)
|
||||
var handler = hf.Create(
|
||||
_ec.Object,
|
||||
actionRef,
|
||||
new Mock<IStepHost>().Object,
|
||||
data,
|
||||
new Dictionary<string, string>(),
|
||||
new Dictionary<string, string>(),
|
||||
new Variables(hc, new Dictionary<string, VariableValue>()),
|
||||
"",
|
||||
new List<JobExtensionRunner>()
|
||||
) as INodeScriptActionHandler;
|
||||
|
||||
Assert.Equal("node20", handler.Data.NodeVersion);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExplicitNode24Action_DeprecateArm32_UsesOriginalVersionForTracking()
|
||||
{
|
||||
// Regression test: verifies that when an action explicitly declares node24
|
||||
// and ARM32 deprecation downgrades it to node20, the tracking call uses
|
||||
// the original preferred version ("node24"), not the already-overwritten
|
||||
// nodeData.NodeVersion ("node20"). Without this fix, ShouldTrackAsArm32Node20
|
||||
// would receive (preferred="node20", final="node20") and never return true.
|
||||
string originalPreferred = "node24";
|
||||
string finalAfterArm32Downgrade = "node20";
|
||||
string deprecationWarning = "Linux ARM32 runners are deprecated and will no longer be supported after September 16th, 2026. Please migrate to a supported platform.";
|
||||
|
||||
// Correct: use the original preferred version before assignment
|
||||
bool correctTracking = HandlerFactory.ShouldTrackAsArm32Node20(
|
||||
deprecateArm32: true,
|
||||
preferredNodeVersion: originalPreferred,
|
||||
finalNodeVersion: finalAfterArm32Downgrade,
|
||||
platformWarningMessage: deprecationWarning);
|
||||
Assert.True(correctTracking);
|
||||
|
||||
// Bug scenario: if nodeData.NodeVersion was already overwritten to finalNodeVersion
|
||||
bool buggyTracking = HandlerFactory.ShouldTrackAsArm32Node20(
|
||||
deprecateArm32: true,
|
||||
preferredNodeVersion: finalAfterArm32Downgrade,
|
||||
finalNodeVersion: finalAfterArm32Downgrade,
|
||||
platformWarningMessage: deprecationWarning);
|
||||
Assert.False(buggyTracking);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
[InlineData(true, "node24", "node20", "Linux ARM32 runners are deprecated", true)]
|
||||
[InlineData(true, "node20", "node20", "Linux ARM32 runners are deprecated", false)]
|
||||
[InlineData(true, "node24", "node24", "Linux ARM32 runners are deprecated", false)]
|
||||
[InlineData(true, "node24", "node20", null, false)]
|
||||
[InlineData(false, "node24", "node20", "Linux ARM32 runners are deprecated", false)]
|
||||
public void ShouldTrackAsArm32Node20_ClassifiesOnlyPlatformDowngrades(
|
||||
bool deprecateArm32,
|
||||
string preferredNodeVersion,
|
||||
string finalNodeVersion,
|
||||
string platformWarningMessage,
|
||||
bool expected)
|
||||
{
|
||||
bool actual = HandlerFactory.ShouldTrackAsArm32Node20(
|
||||
deprecateArm32,
|
||||
preferredNodeVersion,
|
||||
finalNodeVersion,
|
||||
platformWarningMessage);
|
||||
|
||||
Assert.Equal(expected, actual);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
@@ -547,6 +548,10 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
|
||||
var _stepsRunner = new StepsRunner();
|
||||
_stepsRunner.Initialize(hc);
|
||||
|
||||
var mockDapDebugger = new Mock<IDapDebugger>();
|
||||
hc.SetSingleton(mockDapDebugger.Object);
|
||||
|
||||
await _stepsRunner.RunAsync(_jobEc);
|
||||
|
||||
Assert.Equal("Create custom image", snapshotStep.DisplayName);
|
||||
|
||||
@@ -59,5 +59,161 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
Assert.Equal("node20", nodeVersion);
|
||||
Assert.Null(warningMessage);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CheckNodeVersionForArm32_DeprecationFlagShowsWarning()
|
||||
{
|
||||
string preferredVersion = "node24";
|
||||
var (nodeVersion, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion, deprecateArm32: true);
|
||||
|
||||
bool isArm32 = RuntimeInformation.ProcessArchitecture == Architecture.Arm ||
|
||||
Environment.GetEnvironmentVariable("PROCESSOR_ARCHITECTURE")?.Contains("ARM") == true;
|
||||
bool isLinux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
|
||||
|
||||
if (isArm32 && isLinux)
|
||||
{
|
||||
Assert.Equal("node20", nodeVersion);
|
||||
Assert.NotNull(warningMessage);
|
||||
Assert.Contains("deprecated", warningMessage);
|
||||
Assert.Contains("no longer be supported", warningMessage);
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Equal("node24", nodeVersion);
|
||||
Assert.Null(warningMessage);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CheckNodeVersionForArm32_DeprecationFlagWithNode20PassesThrough()
|
||||
{
|
||||
// Even with deprecation flag, node20 should pass through (not downgraded further)
|
||||
string preferredVersion = "node20";
|
||||
var (nodeVersion, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion, deprecateArm32: true);
|
||||
|
||||
bool isArm32 = RuntimeInformation.ProcessArchitecture == Architecture.Arm ||
|
||||
Environment.GetEnvironmentVariable("PROCESSOR_ARCHITECTURE")?.Contains("ARM") == true;
|
||||
bool isLinux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
|
||||
|
||||
if (isArm32 && isLinux)
|
||||
{
|
||||
Assert.Equal("node20", nodeVersion);
|
||||
Assert.NotNull(warningMessage);
|
||||
Assert.Contains("deprecated", warningMessage);
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Equal("node20", nodeVersion);
|
||||
Assert.Null(warningMessage);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CheckNodeVersionForArm32_KillFlagReturnsNull()
|
||||
{
|
||||
string preferredVersion = "node24";
|
||||
var (nodeVersion, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion, killArm32: true);
|
||||
|
||||
bool isArm32 = RuntimeInformation.ProcessArchitecture == Architecture.Arm ||
|
||||
Environment.GetEnvironmentVariable("PROCESSOR_ARCHITECTURE")?.Contains("ARM") == true;
|
||||
bool isLinux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
|
||||
|
||||
if (isArm32 && isLinux)
|
||||
{
|
||||
Assert.Null(nodeVersion);
|
||||
Assert.NotNull(warningMessage);
|
||||
Assert.Contains("no longer supported", warningMessage);
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Equal("node24", nodeVersion);
|
||||
Assert.Null(warningMessage);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CheckNodeVersionForArm32_KillTakesPrecedenceOverDeprecation()
|
||||
{
|
||||
string preferredVersion = "node20";
|
||||
var (nodeVersion, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(preferredVersion, deprecateArm32: true, killArm32: true);
|
||||
|
||||
bool isArm32 = RuntimeInformation.ProcessArchitecture == Architecture.Arm ||
|
||||
Environment.GetEnvironmentVariable("PROCESSOR_ARCHITECTURE")?.Contains("ARM") == true;
|
||||
bool isLinux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
|
||||
|
||||
if (isArm32 && isLinux)
|
||||
{
|
||||
Assert.Null(nodeVersion);
|
||||
Assert.NotNull(warningMessage);
|
||||
Assert.Contains("no longer supported", warningMessage);
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Equal("node20", nodeVersion);
|
||||
Assert.Null(warningMessage);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CheckNodeVersionForArm32_ServerOverridableDateUsedInDeprecationWarning()
|
||||
{
|
||||
string preferredVersion = "node24";
|
||||
string customDate = "December 1st, 2027";
|
||||
var (nodeVersion, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(
|
||||
preferredVersion, deprecateArm32: true, node20RemovalDate: customDate);
|
||||
|
||||
bool isArm32 = RuntimeInformation.ProcessArchitecture == Architecture.Arm ||
|
||||
Environment.GetEnvironmentVariable("PROCESSOR_ARCHITECTURE")?.Contains("ARM") == true;
|
||||
bool isLinux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
|
||||
|
||||
if (isArm32 && isLinux)
|
||||
{
|
||||
Assert.Equal("node20", nodeVersion);
|
||||
Assert.NotNull(warningMessage);
|
||||
Assert.Contains(customDate, warningMessage);
|
||||
Assert.DoesNotContain(Constants.Runner.NodeMigration.Node20RemovalDate, warningMessage);
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Equal("node24", nodeVersion);
|
||||
Assert.Null(warningMessage);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CheckNodeVersionForArm32_FallbackDateUsedWhenNoOverride()
|
||||
{
|
||||
string preferredVersion = "node24";
|
||||
var (nodeVersion, warningMessage) = Common.Util.NodeUtil.CheckNodeVersionForLinuxArm32(
|
||||
preferredVersion, deprecateArm32: true);
|
||||
|
||||
bool isArm32 = RuntimeInformation.ProcessArchitecture == Architecture.Arm ||
|
||||
Environment.GetEnvironmentVariable("PROCESSOR_ARCHITECTURE")?.Contains("ARM") == true;
|
||||
bool isLinux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
|
||||
|
||||
if (isArm32 && isLinux)
|
||||
{
|
||||
Assert.Equal("node20", nodeVersion);
|
||||
Assert.NotNull(warningMessage);
|
||||
Assert.Contains(Constants.Runner.NodeMigration.Node20RemovalDate, warningMessage);
|
||||
}
|
||||
else
|
||||
{
|
||||
Assert.Equal("node24", nodeVersion);
|
||||
Assert.Null(warningMessage);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
@@ -61,6 +62,10 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
|
||||
_stepsRunner = new StepsRunner();
|
||||
_stepsRunner.Initialize(hc);
|
||||
|
||||
var mockDapDebugger = new Mock<IDapDebugger>();
|
||||
hc.SetSingleton(mockDapDebugger.Object);
|
||||
|
||||
return hc;
|
||||
}
|
||||
|
||||
|
||||
245
src/Test/L0/Worker/WebSocketDapBridgeL0.cs
Normal file
245
src/Test/L0/Worker/WebSocketDapBridgeL0.cs
Normal file
@@ -0,0 +1,245 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Net;
|
||||
using System.Net.Sockets;
|
||||
using System.Net.WebSockets;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class WebSocketDapBridgeL0
|
||||
{
|
||||
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
|
||||
{
|
||||
return new TestHostContext(this, testName);
|
||||
}
|
||||
|
||||
private static ushort GetFreePort()
|
||||
{
|
||||
using var listener = new TcpListener(IPAddress.Loopback, 0);
|
||||
listener.Start();
|
||||
return (ushort)((IPEndPoint)listener.LocalEndpoint).Port;
|
||||
}
|
||||
|
||||
private static async Task<byte[]> ReadWebSocketMessageAsync(ClientWebSocket client, TimeSpan timeout)
|
||||
{
|
||||
using var cts = new CancellationTokenSource(timeout);
|
||||
using var buffer = new MemoryStream();
|
||||
var receiveBuffer = new byte[1024];
|
||||
|
||||
while (true)
|
||||
{
|
||||
var result = await client.ReceiveAsync(new ArraySegment<byte>(receiveBuffer), cts.Token);
|
||||
if (result.MessageType == WebSocketMessageType.Close)
|
||||
{
|
||||
throw new EndOfStreamException("WebSocket closed unexpectedly.");
|
||||
}
|
||||
|
||||
if (result.Count > 0)
|
||||
{
|
||||
buffer.Write(receiveBuffer, 0, result.Count);
|
||||
}
|
||||
|
||||
if (result.EndOfMessage)
|
||||
{
|
||||
return buffer.ToArray();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task BridgeForwardsWebSocketFramesToTcpAndBack()
|
||||
{
|
||||
using var hc = CreateTestContext();
|
||||
using var targetListener = new TcpListener(IPAddress.Loopback, 0);
|
||||
targetListener.Start();
|
||||
|
||||
var targetPort = ((IPEndPoint)targetListener.LocalEndpoint).Port;
|
||||
var bridgePort = GetFreePort();
|
||||
|
||||
await using var bridge = new WebSocketDapBridge(hc.GetTrace("DapWebSocketBridge"), bridgePort, targetPort);
|
||||
bridge.Start();
|
||||
|
||||
var echoTask = Task.Run(async () =>
|
||||
{
|
||||
using var targetClient = await targetListener.AcceptTcpClientAsync();
|
||||
using var stream = targetClient.GetStream();
|
||||
|
||||
var headerBuilder = new StringBuilder();
|
||||
var buffer = new byte[1];
|
||||
var contentLength = -1;
|
||||
|
||||
while (true)
|
||||
{
|
||||
var bytesRead = await stream.ReadAsync(buffer, 0, 1);
|
||||
if (bytesRead == 0) break;
|
||||
|
||||
headerBuilder.Append((char)buffer[0]);
|
||||
var headers = headerBuilder.ToString();
|
||||
if (headers.EndsWith("\r\n\r\n", StringComparison.Ordinal))
|
||||
{
|
||||
foreach (var line in headers.Split(new[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries))
|
||||
{
|
||||
if (line.StartsWith("Content-Length: ", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
contentLength = int.Parse(line.Substring("Content-Length: ".Length).Trim());
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
var body = new byte[contentLength];
|
||||
var totalRead = 0;
|
||||
while (totalRead < contentLength)
|
||||
{
|
||||
var bytesRead = await stream.ReadAsync(body, totalRead, contentLength - totalRead);
|
||||
if (bytesRead == 0) break;
|
||||
totalRead += bytesRead;
|
||||
}
|
||||
|
||||
var header = $"Content-Length: {body.Length}\r\n\r\n";
|
||||
var headerBytes = Encoding.ASCII.GetBytes(header);
|
||||
await stream.WriteAsync(headerBytes, 0, headerBytes.Length);
|
||||
await stream.WriteAsync(body, 0, body.Length);
|
||||
await stream.FlushAsync();
|
||||
});
|
||||
|
||||
using var client = new ClientWebSocket();
|
||||
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{bridgePort}/"), CancellationToken.None);
|
||||
|
||||
var dapMessage = "{\"type\":\"request\",\"seq\":1,\"command\":\"initialize\"}";
|
||||
var payload = Encoding.UTF8.GetBytes(dapMessage);
|
||||
await client.SendAsync(new ArraySegment<byte>(payload), WebSocketMessageType.Text, endOfMessage: true, CancellationToken.None);
|
||||
|
||||
var echoed = await ReadWebSocketMessageAsync(client, TimeSpan.FromSeconds(5));
|
||||
Assert.Equal(payload, echoed);
|
||||
|
||||
await echoTask;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task BridgeRejectsNonWebSocketRequests()
|
||||
{
|
||||
using var hc = CreateTestContext();
|
||||
var bridgePort = GetFreePort();
|
||||
|
||||
await using var bridge = new WebSocketDapBridge(hc.GetTrace("DapWebSocketBridge"), bridgePort, GetFreePort());
|
||||
bridge.Start();
|
||||
|
||||
using var client = new TcpClient();
|
||||
await client.ConnectAsync(IPAddress.Loopback, bridgePort);
|
||||
using var stream = client.GetStream();
|
||||
|
||||
var request = Encoding.ASCII.GetBytes(
|
||||
"GET / HTTP/1.1\r\n" +
|
||||
"Host: localhost\r\n" +
|
||||
"\r\n");
|
||||
await stream.WriteAsync(request, 0, request.Length);
|
||||
await stream.FlushAsync();
|
||||
|
||||
// Read until the server closes the connection (Connection: close).
|
||||
// A single ReadAsync may return a partial response on some platforms.
|
||||
using var ms = new MemoryStream();
|
||||
var responseBuffer = new byte[1024];
|
||||
int bytesRead;
|
||||
while ((bytesRead = await stream.ReadAsync(responseBuffer, 0, responseBuffer.Length)) > 0)
|
||||
{
|
||||
ms.Write(responseBuffer, 0, bytesRead);
|
||||
}
|
||||
|
||||
var response = Encoding.ASCII.GetString(ms.ToArray());
|
||||
|
||||
Assert.Contains("400 BadRequest", response);
|
||||
Assert.Contains("Expected a websocket upgrade request.", response);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
[InlineData(new byte[] { (byte)'G', (byte)'E', (byte)'T', (byte)' ' }, 1)]
|
||||
[InlineData(new byte[] { 0x81, 0x85, 0x00, 0x00 }, 2)]
|
||||
[InlineData(new byte[] { 0xC1, 0x85, 0x00, 0x00 }, 3)]
|
||||
[InlineData(new byte[] { (byte)'P', (byte)'R', (byte)'I', (byte)' ' }, 4)]
|
||||
[InlineData(new byte[] { 0x16, 0x03, 0x03, 0x01 }, 5)]
|
||||
[InlineData(new byte[] { (byte)'B', (byte)'A', (byte)'D', (byte)'!' }, 0)]
|
||||
public void ClassifyIncomingStreamPrefixDetectsExpectedProtocols(byte[] initialBytes, int expectedKind)
|
||||
{
|
||||
var actualKind = WebSocketDapBridge.ClassifyIncomingStreamPrefix(initialBytes);
|
||||
Assert.Equal((WebSocketDapBridge.IncomingStreamPrefixKind)expectedKind, actualKind);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task BridgeRejectsOversizedWebSocketMessage()
|
||||
{
|
||||
using var hc = CreateTestContext();
|
||||
using var targetListener = new TcpListener(IPAddress.Loopback, 0);
|
||||
targetListener.Start();
|
||||
|
||||
var targetPort = ((IPEndPoint)targetListener.LocalEndpoint).Port;
|
||||
var bridgePort = GetFreePort();
|
||||
|
||||
await using var bridge = new WebSocketDapBridge(hc.GetTrace("DapWebSocketBridge"), bridgePort, targetPort);
|
||||
bridge.MaxInboundMessageSize = 64; // artificially small limit for testing
|
||||
bridge.Start();
|
||||
|
||||
using var client = new ClientWebSocket();
|
||||
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{bridgePort}/"), CancellationToken.None);
|
||||
|
||||
// Send a message that exceeds the 64-byte limit
|
||||
var oversizedPayload = new byte[128];
|
||||
Array.Fill(oversizedPayload, (byte)'X');
|
||||
await client.SendAsync(
|
||||
new ArraySegment<byte>(oversizedPayload),
|
||||
WebSocketMessageType.Text,
|
||||
endOfMessage: true,
|
||||
CancellationToken.None);
|
||||
|
||||
// The bridge should close the connection with MessageTooBig
|
||||
var receiveBuffer = new byte[256];
|
||||
var result = await client.ReceiveAsync(
|
||||
new ArraySegment<byte>(receiveBuffer),
|
||||
new CancellationTokenSource(TimeSpan.FromSeconds(5)).Token);
|
||||
|
||||
Assert.Equal(WebSocketMessageType.Close, result.MessageType);
|
||||
Assert.Equal(WebSocketCloseStatus.MessageTooBig, client.CloseStatus);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task BridgeDisposeCompletesWhenPeerDoesNotCloseGracefully()
|
||||
{
|
||||
using var hc = CreateTestContext();
|
||||
using var targetListener = new TcpListener(IPAddress.Loopback, 0);
|
||||
targetListener.Start();
|
||||
|
||||
var targetPort = ((IPEndPoint)targetListener.LocalEndpoint).Port;
|
||||
var bridgePort = GetFreePort();
|
||||
|
||||
var bridge = new WebSocketDapBridge(hc.GetTrace("DapWebSocketBridge"), bridgePort, targetPort);
|
||||
bridge.Start();
|
||||
|
||||
// Connect a raw TCP client but never perform WebSocket close handshake
|
||||
using var rawClient = new TcpClient();
|
||||
await rawClient.ConnectAsync(IPAddress.Loopback, bridgePort);
|
||||
|
||||
// Dispose should complete within a bounded time, not hang
|
||||
var disposeTask = bridge.DisposeAsync().AsTask();
|
||||
var completed = await Task.WhenAny(disposeTask, Task.Delay(TimeSpan.FromSeconds(15)));
|
||||
Assert.True(completed == disposeTask, "Bridge dispose should complete within the timeout, not hang on a non-cooperative peer");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,7 +17,7 @@ LAYOUT_DIR="$SCRIPT_DIR/../_layout"
|
||||
DOWNLOAD_DIR="$SCRIPT_DIR/../_downloads/netcore2x"
|
||||
PACKAGE_DIR="$SCRIPT_DIR/../_package"
|
||||
DOTNETSDK_ROOT="$SCRIPT_DIR/../_dotnetsdk"
|
||||
DOTNETSDK_VERSION="8.0.418"
|
||||
DOTNETSDK_VERSION="8.0.419"
|
||||
DOTNETSDK_INSTALLDIR="$DOTNETSDK_ROOT/$DOTNETSDK_VERSION"
|
||||
RUNNER_VERSION=$(cat runnerversion)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"sdk": {
|
||||
"version": "8.0.418"
|
||||
"version": "8.0.419"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
2.332.0
|
||||
2.333.0
|
||||
|
||||
Reference in New Issue
Block a user