mirror of
https://github.com/actions/runner.git
synced 2026-04-07 11:03:54 +08:00
Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
df507886cb | ||
|
|
5c6dd47e76 | ||
|
|
7ff994b932 | ||
|
|
b9275b59cf | ||
|
|
f0c228635e | ||
|
|
9728019b24 | ||
|
|
e17e7aabbf | ||
|
|
4259ffb6dc |
301
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
301
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
@@ -14,7 +14,7 @@
|
||||
"devDependencies": {
|
||||
"@stylistic/eslint-plugin": "^5.10.0",
|
||||
"@types/node": "^22.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.57.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.57.2",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"eslint": "^8.47.0",
|
||||
@@ -308,16 +308,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/eslint-plugin": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.57.1.tgz",
|
||||
"integrity": "sha512-Gn3aqnvNl4NGc6x3/Bqk1AOn0thyTU9bqDRhiRnUWezgvr2OnhYCWCgC8zXXRVqBsIL1pSDt7T9nJUe0oM0kDQ==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.57.2.tgz",
|
||||
"integrity": "sha512-NZZgp0Fm2IkD+La5PR81sd+g+8oS6JwJje+aRWsDocxHkjyRw0J5L5ZTlN3LI1LlOcGL7ph3eaIUmTXMIjLk0w==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@eslint-community/regexpp": "^4.12.2",
|
||||
"@typescript-eslint/scope-manager": "8.57.1",
|
||||
"@typescript-eslint/type-utils": "8.57.1",
|
||||
"@typescript-eslint/utils": "8.57.1",
|
||||
"@typescript-eslint/visitor-keys": "8.57.1",
|
||||
"@typescript-eslint/scope-manager": "8.57.2",
|
||||
"@typescript-eslint/type-utils": "8.57.2",
|
||||
"@typescript-eslint/utils": "8.57.2",
|
||||
"@typescript-eslint/visitor-keys": "8.57.2",
|
||||
"ignore": "^7.0.5",
|
||||
"natural-compare": "^1.4.0",
|
||||
"ts-api-utils": "^2.4.0"
|
||||
@@ -330,7 +330,7 @@
|
||||
"url": "https://opencollective.com/typescript-eslint"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@typescript-eslint/parser": "^8.57.1",
|
||||
"@typescript-eslint/parser": "^8.57.2",
|
||||
"eslint": "^8.57.0 || ^9.0.0 || ^10.0.0",
|
||||
"typescript": ">=4.8.4 <6.0.0"
|
||||
}
|
||||
@@ -358,15 +358,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/parser": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.57.1.tgz",
|
||||
"integrity": "sha512-k4eNDan0EIMTT/dUKc/g+rsJ6wcHYhNPdY19VoX/EOtaAG8DLtKCykhrUnuHPYvinn5jhAPgD2Qw9hXBwrahsw==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.57.2.tgz",
|
||||
"integrity": "sha512-30ScMRHIAD33JJQkgfGW1t8CURZtjc2JpTrq5n2HFhOefbAhb7ucc7xJwdWcrEtqUIYJ73Nybpsggii6GtAHjA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/scope-manager": "8.57.1",
|
||||
"@typescript-eslint/types": "8.57.1",
|
||||
"@typescript-eslint/typescript-estree": "8.57.1",
|
||||
"@typescript-eslint/visitor-keys": "8.57.1",
|
||||
"@typescript-eslint/scope-manager": "8.57.2",
|
||||
"@typescript-eslint/types": "8.57.2",
|
||||
"@typescript-eslint/typescript-estree": "8.57.2",
|
||||
"@typescript-eslint/visitor-keys": "8.57.2",
|
||||
"debug": "^4.4.3"
|
||||
},
|
||||
"engines": {
|
||||
@@ -382,13 +382,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/project-service": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.57.1.tgz",
|
||||
"integrity": "sha512-vx1F37BRO1OftsYlmG9xay1TqnjNVlqALymwWVuYTdo18XuKxtBpCj1QlzNIEHlvlB27osvXFWptYiEWsVdYsg==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.57.2.tgz",
|
||||
"integrity": "sha512-FuH0wipFywXRTHf+bTTjNyuNQQsQC3qh/dYzaM4I4W0jrCqjCVuUh99+xd9KamUfmCGPvbO8NDngo/vsnNVqgw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/tsconfig-utils": "^8.57.1",
|
||||
"@typescript-eslint/types": "^8.57.1",
|
||||
"@typescript-eslint/tsconfig-utils": "^8.57.2",
|
||||
"@typescript-eslint/types": "^8.57.2",
|
||||
"debug": "^4.4.3"
|
||||
},
|
||||
"engines": {
|
||||
@@ -403,13 +403,13 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/scope-manager": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.57.1.tgz",
|
||||
"integrity": "sha512-hs/QcpCwlwT2L5S+3fT6gp0PabyGk4Q0Rv2doJXA0435/OpnSR3VRgvrp8Xdoc3UAYSg9cyUjTeFXZEPg/3OKg==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.57.2.tgz",
|
||||
"integrity": "sha512-snZKH+W4WbWkrBqj4gUNRIGb/jipDW3qMqVJ4C9rzdFc+wLwruxk+2a5D+uoFcKPAqyqEnSb4l2ULuZf95eSkw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.57.1",
|
||||
"@typescript-eslint/visitor-keys": "8.57.1"
|
||||
"@typescript-eslint/types": "8.57.2",
|
||||
"@typescript-eslint/visitor-keys": "8.57.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -420,9 +420,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/tsconfig-utils": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.57.1.tgz",
|
||||
"integrity": "sha512-0lgOZB8cl19fHO4eI46YUx2EceQqhgkPSuCGLlGi79L2jwYY1cxeYc1Nae8Aw1xjgW3PKVDLlr3YJ6Bxx8HkWg==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.57.2.tgz",
|
||||
"integrity": "sha512-3Lm5DSM+DCowsUOJC+YqHHnKEfFh5CoGkj5Z31NQSNF4l5wdOwqGn99wmwN/LImhfY3KJnmordBq/4+VDe2eKw==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -436,14 +436,14 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.57.1.tgz",
|
||||
"integrity": "sha512-+Bwwm0ScukFdyoJsh2u6pp4S9ktegF98pYUU0hkphOOqdMB+1sNQhIz8y5E9+4pOioZijrkfNO/HUJVAFFfPKA==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.57.2.tgz",
|
||||
"integrity": "sha512-Co6ZCShm6kIbAM/s+oYVpKFfW7LBc6FXoPXjTRQ449PPNBY8U0KZXuevz5IFuuUj2H9ss40atTaf9dlGLzbWZg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.57.1",
|
||||
"@typescript-eslint/typescript-estree": "8.57.1",
|
||||
"@typescript-eslint/utils": "8.57.1",
|
||||
"@typescript-eslint/types": "8.57.2",
|
||||
"@typescript-eslint/typescript-estree": "8.57.2",
|
||||
"@typescript-eslint/utils": "8.57.2",
|
||||
"debug": "^4.4.3",
|
||||
"ts-api-utils": "^2.4.0"
|
||||
},
|
||||
@@ -460,9 +460,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/type-utils/node_modules/ts-api-utils": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz",
|
||||
"integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==",
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz",
|
||||
"integrity": "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=18.12"
|
||||
@@ -472,9 +472,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/types": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.57.1.tgz",
|
||||
"integrity": "sha512-S29BOBPJSFUiblEl6RzPPjJt6w25A6XsBqRVDt53tA/tlL8q7ceQNZHTjPeONt/3S7KRI4quk+yP9jK2WjBiPQ==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.57.2.tgz",
|
||||
"integrity": "sha512-/iZM6FnM4tnx9csuTxspMW4BOSegshwX5oBDznJ7S4WggL7Vczz5d2W11ecc4vRrQMQHXRSxzrCsyG5EsPPTbA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -485,15 +485,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.57.1.tgz",
|
||||
"integrity": "sha512-ybe2hS9G6pXpqGtPli9Gx9quNV0TWLOmh58ADlmZe9DguLq0tiAKVjirSbtM1szG6+QH6rVXyU6GTLQbWnMY+g==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.57.2.tgz",
|
||||
"integrity": "sha512-2MKM+I6g8tJxfSmFKOnHv2t8Sk3T6rF20A1Puk0svLK+uVapDZB/4pfAeB7nE83uAZrU6OxW+HmOd5wHVdXwXA==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/project-service": "8.57.1",
|
||||
"@typescript-eslint/tsconfig-utils": "8.57.1",
|
||||
"@typescript-eslint/types": "8.57.1",
|
||||
"@typescript-eslint/visitor-keys": "8.57.1",
|
||||
"@typescript-eslint/project-service": "8.57.2",
|
||||
"@typescript-eslint/tsconfig-utils": "8.57.2",
|
||||
"@typescript-eslint/types": "8.57.2",
|
||||
"@typescript-eslint/visitor-keys": "8.57.2",
|
||||
"debug": "^4.4.3",
|
||||
"minimatch": "^10.2.2",
|
||||
"semver": "^7.7.3",
|
||||
@@ -521,9 +521,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
|
||||
"version": "5.0.4",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.4.tgz",
|
||||
"integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==",
|
||||
"version": "5.0.5",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
|
||||
"integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"balanced-match": "^4.0.2"
|
||||
@@ -548,9 +548,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/typescript-estree/node_modules/ts-api-utils": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz",
|
||||
"integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==",
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz",
|
||||
"integrity": "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=18.12"
|
||||
@@ -560,15 +560,15 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/utils": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.57.1.tgz",
|
||||
"integrity": "sha512-XUNSJ/lEVFttPMMoDVA2r2bwrl8/oPx8cURtczkSEswY5T3AeLmCy+EKWQNdL4u0MmAHOjcWrqJp2cdvgjn8dQ==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.57.2.tgz",
|
||||
"integrity": "sha512-krRIbvPK1ju1WBKIefiX+bngPs+odIQUtR7kymzPfo1POVw3jlF+nLkmexdSSd4UCbDcQn+wMBATOOmpBbqgKg==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.9.1",
|
||||
"@typescript-eslint/scope-manager": "8.57.1",
|
||||
"@typescript-eslint/types": "8.57.1",
|
||||
"@typescript-eslint/typescript-estree": "8.57.1"
|
||||
"@typescript-eslint/scope-manager": "8.57.2",
|
||||
"@typescript-eslint/types": "8.57.2",
|
||||
"@typescript-eslint/typescript-estree": "8.57.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
@@ -583,12 +583,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@typescript-eslint/visitor-keys": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.57.1.tgz",
|
||||
"integrity": "sha512-YWnmJkXbofiz9KbnbbwuA2rpGkFPLbAIetcCNO6mJ8gdhdZ/v7WDXsoGFAJuM6ikUFKTlSQnjWnVO4ux+UzS6A==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.57.2.tgz",
|
||||
"integrity": "sha512-zhahknjobV2FiD6Ee9iLbS7OV9zi10rG26odsQdfBO/hjSzUQbkIYgda+iNKK1zNiW2ey+Lf8MU5btN17V3dUw==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"@typescript-eslint/types": "8.57.1",
|
||||
"@typescript-eslint/types": "8.57.2",
|
||||
"eslint-visitor-keys": "^5.0.0"
|
||||
},
|
||||
"engines": {
|
||||
@@ -899,10 +899,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"license": "MIT",
|
||||
"version": "1.1.13",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz",
|
||||
"integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==",
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
@@ -1771,9 +1770,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/eslint-plugin-github/node_modules/brace-expansion": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz",
|
||||
"integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==",
|
||||
"version": "5.0.5",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
|
||||
"integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
|
||||
"dev": true,
|
||||
"dependencies": {
|
||||
"balanced-match": "^4.0.2"
|
||||
@@ -2187,9 +2186,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/flatted": {
|
||||
"version": "3.2.7",
|
||||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz",
|
||||
"integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==",
|
||||
"version": "3.4.2",
|
||||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.2.tgz",
|
||||
"integrity": "sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/for-each": {
|
||||
@@ -4323,9 +4322,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/tinyglobby/node_modules/picomatch": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
|
||||
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz",
|
||||
"integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==",
|
||||
"dev": true,
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
@@ -4923,16 +4922,16 @@
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/eslint-plugin": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.57.1.tgz",
|
||||
"integrity": "sha512-Gn3aqnvNl4NGc6x3/Bqk1AOn0thyTU9bqDRhiRnUWezgvr2OnhYCWCgC8zXXRVqBsIL1pSDt7T9nJUe0oM0kDQ==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.57.2.tgz",
|
||||
"integrity": "sha512-NZZgp0Fm2IkD+La5PR81sd+g+8oS6JwJje+aRWsDocxHkjyRw0J5L5ZTlN3LI1LlOcGL7ph3eaIUmTXMIjLk0w==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@eslint-community/regexpp": "^4.12.2",
|
||||
"@typescript-eslint/scope-manager": "8.57.1",
|
||||
"@typescript-eslint/type-utils": "8.57.1",
|
||||
"@typescript-eslint/utils": "8.57.1",
|
||||
"@typescript-eslint/visitor-keys": "8.57.1",
|
||||
"@typescript-eslint/scope-manager": "8.57.2",
|
||||
"@typescript-eslint/type-utils": "8.57.2",
|
||||
"@typescript-eslint/utils": "8.57.2",
|
||||
"@typescript-eslint/visitor-keys": "8.57.2",
|
||||
"ignore": "^7.0.5",
|
||||
"natural-compare": "^1.4.0",
|
||||
"ts-api-utils": "^2.4.0"
|
||||
@@ -4954,84 +4953,84 @@
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/parser": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.57.1.tgz",
|
||||
"integrity": "sha512-k4eNDan0EIMTT/dUKc/g+rsJ6wcHYhNPdY19VoX/EOtaAG8DLtKCykhrUnuHPYvinn5jhAPgD2Qw9hXBwrahsw==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.57.2.tgz",
|
||||
"integrity": "sha512-30ScMRHIAD33JJQkgfGW1t8CURZtjc2JpTrq5n2HFhOefbAhb7ucc7xJwdWcrEtqUIYJ73Nybpsggii6GtAHjA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/scope-manager": "8.57.1",
|
||||
"@typescript-eslint/types": "8.57.1",
|
||||
"@typescript-eslint/typescript-estree": "8.57.1",
|
||||
"@typescript-eslint/visitor-keys": "8.57.1",
|
||||
"@typescript-eslint/scope-manager": "8.57.2",
|
||||
"@typescript-eslint/types": "8.57.2",
|
||||
"@typescript-eslint/typescript-estree": "8.57.2",
|
||||
"@typescript-eslint/visitor-keys": "8.57.2",
|
||||
"debug": "^4.4.3"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/project-service": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.57.1.tgz",
|
||||
"integrity": "sha512-vx1F37BRO1OftsYlmG9xay1TqnjNVlqALymwWVuYTdo18XuKxtBpCj1QlzNIEHlvlB27osvXFWptYiEWsVdYsg==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.57.2.tgz",
|
||||
"integrity": "sha512-FuH0wipFywXRTHf+bTTjNyuNQQsQC3qh/dYzaM4I4W0jrCqjCVuUh99+xd9KamUfmCGPvbO8NDngo/vsnNVqgw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/tsconfig-utils": "^8.57.1",
|
||||
"@typescript-eslint/types": "^8.57.1",
|
||||
"@typescript-eslint/tsconfig-utils": "^8.57.2",
|
||||
"@typescript-eslint/types": "^8.57.2",
|
||||
"debug": "^4.4.3"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/scope-manager": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.57.1.tgz",
|
||||
"integrity": "sha512-hs/QcpCwlwT2L5S+3fT6gp0PabyGk4Q0Rv2doJXA0435/OpnSR3VRgvrp8Xdoc3UAYSg9cyUjTeFXZEPg/3OKg==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.57.2.tgz",
|
||||
"integrity": "sha512-snZKH+W4WbWkrBqj4gUNRIGb/jipDW3qMqVJ4C9rzdFc+wLwruxk+2a5D+uoFcKPAqyqEnSb4l2ULuZf95eSkw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "8.57.1",
|
||||
"@typescript-eslint/visitor-keys": "8.57.1"
|
||||
"@typescript-eslint/types": "8.57.2",
|
||||
"@typescript-eslint/visitor-keys": "8.57.2"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/tsconfig-utils": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.57.1.tgz",
|
||||
"integrity": "sha512-0lgOZB8cl19fHO4eI46YUx2EceQqhgkPSuCGLlGi79L2jwYY1cxeYc1Nae8Aw1xjgW3PKVDLlr3YJ6Bxx8HkWg==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.57.2.tgz",
|
||||
"integrity": "sha512-3Lm5DSM+DCowsUOJC+YqHHnKEfFh5CoGkj5Z31NQSNF4l5wdOwqGn99wmwN/LImhfY3KJnmordBq/4+VDe2eKw==",
|
||||
"dev": true,
|
||||
"requires": {}
|
||||
},
|
||||
"@typescript-eslint/type-utils": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.57.1.tgz",
|
||||
"integrity": "sha512-+Bwwm0ScukFdyoJsh2u6pp4S9ktegF98pYUU0hkphOOqdMB+1sNQhIz8y5E9+4pOioZijrkfNO/HUJVAFFfPKA==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.57.2.tgz",
|
||||
"integrity": "sha512-Co6ZCShm6kIbAM/s+oYVpKFfW7LBc6FXoPXjTRQ449PPNBY8U0KZXuevz5IFuuUj2H9ss40atTaf9dlGLzbWZg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "8.57.1",
|
||||
"@typescript-eslint/typescript-estree": "8.57.1",
|
||||
"@typescript-eslint/utils": "8.57.1",
|
||||
"@typescript-eslint/types": "8.57.2",
|
||||
"@typescript-eslint/typescript-estree": "8.57.2",
|
||||
"@typescript-eslint/utils": "8.57.2",
|
||||
"debug": "^4.4.3",
|
||||
"ts-api-utils": "^2.4.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"ts-api-utils": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz",
|
||||
"integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==",
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz",
|
||||
"integrity": "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==",
|
||||
"dev": true,
|
||||
"requires": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/types": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.57.1.tgz",
|
||||
"integrity": "sha512-S29BOBPJSFUiblEl6RzPPjJt6w25A6XsBqRVDt53tA/tlL8q7ceQNZHTjPeONt/3S7KRI4quk+yP9jK2WjBiPQ==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.57.2.tgz",
|
||||
"integrity": "sha512-/iZM6FnM4tnx9csuTxspMW4BOSegshwX5oBDznJ7S4WggL7Vczz5d2W11ecc4vRrQMQHXRSxzrCsyG5EsPPTbA==",
|
||||
"dev": true
|
||||
},
|
||||
"@typescript-eslint/typescript-estree": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.57.1.tgz",
|
||||
"integrity": "sha512-ybe2hS9G6pXpqGtPli9Gx9quNV0TWLOmh58ADlmZe9DguLq0tiAKVjirSbtM1szG6+QH6rVXyU6GTLQbWnMY+g==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.57.2.tgz",
|
||||
"integrity": "sha512-2MKM+I6g8tJxfSmFKOnHv2t8Sk3T6rF20A1Puk0svLK+uVapDZB/4pfAeB7nE83uAZrU6OxW+HmOd5wHVdXwXA==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/project-service": "8.57.1",
|
||||
"@typescript-eslint/tsconfig-utils": "8.57.1",
|
||||
"@typescript-eslint/types": "8.57.1",
|
||||
"@typescript-eslint/visitor-keys": "8.57.1",
|
||||
"@typescript-eslint/project-service": "8.57.2",
|
||||
"@typescript-eslint/tsconfig-utils": "8.57.2",
|
||||
"@typescript-eslint/types": "8.57.2",
|
||||
"@typescript-eslint/visitor-keys": "8.57.2",
|
||||
"debug": "^4.4.3",
|
||||
"minimatch": "^10.2.2",
|
||||
"semver": "^7.7.3",
|
||||
@@ -5046,9 +5045,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "5.0.4",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.4.tgz",
|
||||
"integrity": "sha512-h+DEnpVvxmfVefa4jFbCf5HdH5YMDXRsmKflpf1pILZWRFlTbJpxeU55nJl4Smt5HQaGzg1o6RHFPJaOqnmBDg==",
|
||||
"version": "5.0.5",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
|
||||
"integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"balanced-match": "^4.0.2"
|
||||
@@ -5064,33 +5063,33 @@
|
||||
}
|
||||
},
|
||||
"ts-api-utils": {
|
||||
"version": "2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.4.0.tgz",
|
||||
"integrity": "sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==",
|
||||
"version": "2.5.0",
|
||||
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.5.0.tgz",
|
||||
"integrity": "sha512-OJ/ibxhPlqrMM0UiNHJ/0CKQkoKF243/AEmplt3qpRgkW8VG7IfOS41h7V8TjITqdByHzrjcS/2si+y4lIh8NA==",
|
||||
"dev": true,
|
||||
"requires": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/utils": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.57.1.tgz",
|
||||
"integrity": "sha512-XUNSJ/lEVFttPMMoDVA2r2bwrl8/oPx8cURtczkSEswY5T3AeLmCy+EKWQNdL4u0MmAHOjcWrqJp2cdvgjn8dQ==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.57.2.tgz",
|
||||
"integrity": "sha512-krRIbvPK1ju1WBKIefiX+bngPs+odIQUtR7kymzPfo1POVw3jlF+nLkmexdSSd4UCbDcQn+wMBATOOmpBbqgKg==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@eslint-community/eslint-utils": "^4.9.1",
|
||||
"@typescript-eslint/scope-manager": "8.57.1",
|
||||
"@typescript-eslint/types": "8.57.1",
|
||||
"@typescript-eslint/typescript-estree": "8.57.1"
|
||||
"@typescript-eslint/scope-manager": "8.57.2",
|
||||
"@typescript-eslint/types": "8.57.2",
|
||||
"@typescript-eslint/typescript-estree": "8.57.2"
|
||||
}
|
||||
},
|
||||
"@typescript-eslint/visitor-keys": {
|
||||
"version": "8.57.1",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.57.1.tgz",
|
||||
"integrity": "sha512-YWnmJkXbofiz9KbnbbwuA2rpGkFPLbAIetcCNO6mJ8gdhdZ/v7WDXsoGFAJuM6ikUFKTlSQnjWnVO4ux+UzS6A==",
|
||||
"version": "8.57.2",
|
||||
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.57.2.tgz",
|
||||
"integrity": "sha512-zhahknjobV2FiD6Ee9iLbS7OV9zi10rG26odsQdfBO/hjSzUQbkIYgda+iNKK1zNiW2ey+Lf8MU5btN17V3dUw==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"@typescript-eslint/types": "8.57.1",
|
||||
"@typescript-eslint/types": "8.57.2",
|
||||
"eslint-visitor-keys": "^5.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -5306,9 +5305,9 @@
|
||||
}
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||
"version": "1.1.13",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz",
|
||||
"integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==",
|
||||
"requires": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
@@ -5891,9 +5890,9 @@
|
||||
"dev": true
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz",
|
||||
"integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==",
|
||||
"version": "5.0.5",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
|
||||
"integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
|
||||
"dev": true,
|
||||
"requires": {
|
||||
"balanced-match": "^4.0.2"
|
||||
@@ -6198,9 +6197,9 @@
|
||||
}
|
||||
},
|
||||
"flatted": {
|
||||
"version": "3.2.7",
|
||||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz",
|
||||
"integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==",
|
||||
"version": "3.4.2",
|
||||
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.2.tgz",
|
||||
"integrity": "sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==",
|
||||
"dev": true
|
||||
},
|
||||
"for-each": {
|
||||
@@ -7612,9 +7611,9 @@
|
||||
"requires": {}
|
||||
},
|
||||
"picomatch": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
|
||||
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz",
|
||||
"integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==",
|
||||
"dev": true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
"devDependencies": {
|
||||
"@stylistic/eslint-plugin": "^5.10.0",
|
||||
"@types/node": "^22.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.57.1",
|
||||
"@typescript-eslint/eslint-plugin": "^8.57.2",
|
||||
"@typescript-eslint/parser": "^8.0.0",
|
||||
"@vercel/ncc": "^0.38.3",
|
||||
"eslint": "^8.47.0",
|
||||
|
||||
@@ -6,8 +6,8 @@ NODE_URL=https://nodejs.org/dist
|
||||
NODE_ALPINE_URL=https://github.com/actions/alpine_nodejs/releases/download
|
||||
# When you update Node versions you must also create a new release of alpine_nodejs at that updated version.
|
||||
# Follow the instructions here: https://github.com/actions/alpine_nodejs?tab=readme-ov-file#getting-started
|
||||
NODE20_VERSION="20.20.1"
|
||||
NODE24_VERSION="24.14.0"
|
||||
NODE20_VERSION="20.20.2"
|
||||
NODE24_VERSION="24.14.1"
|
||||
|
||||
get_abs_path() {
|
||||
# exploits the fact that pwd will print abs path when no args
|
||||
|
||||
@@ -177,6 +177,8 @@ namespace GitHub.Runner.Common
|
||||
public static readonly string SetOrchestrationIdEnvForActions = "actions_set_orchestration_id_env_for_actions";
|
||||
public static readonly string SendJobLevelAnnotations = "actions_send_job_level_annotations";
|
||||
public static readonly string EmitCompositeMarkers = "actions_runner_emit_composite_markers";
|
||||
public static readonly string BatchActionResolution = "actions_batch_action_resolution";
|
||||
public static readonly string UseBearerTokenForCodeload = "actions_use_bearer_token_for_codeload";
|
||||
}
|
||||
|
||||
// Node version migration related constants
|
||||
|
||||
@@ -79,6 +79,13 @@ namespace GitHub.Runner.Worker
|
||||
PreStepTracker = new Dictionary<Guid, IActionRunner>()
|
||||
};
|
||||
var containerSetupSteps = new List<JobExtensionRunner>();
|
||||
var batchActionResolution = (executionContext.Global.Variables.GetBoolean(Constants.Runner.Features.BatchActionResolution) ?? false)
|
||||
|| StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION"));
|
||||
// Stack-local cache: same action (owner/repo@ref) is resolved only once,
|
||||
// even if it appears at multiple depths in a composite tree.
|
||||
var resolvedDownloadInfos = batchActionResolution
|
||||
? new Dictionary<string, WebApi.ActionDownloadInfo>(StringComparer.Ordinal)
|
||||
: null;
|
||||
var depth = 0;
|
||||
// We are running at the start of a job
|
||||
if (rootStepId == default(Guid))
|
||||
@@ -105,7 +112,9 @@ namespace GitHub.Runner.Worker
|
||||
PrepareActionsState result = new PrepareActionsState();
|
||||
try
|
||||
{
|
||||
result = await PrepareActionsRecursiveAsync(executionContext, state, actions, depth, rootStepId);
|
||||
result = batchActionResolution
|
||||
? await PrepareActionsRecursiveAsync(executionContext, state, actions, resolvedDownloadInfos, depth, rootStepId)
|
||||
: await PrepareActionsRecursiveLegacyAsync(executionContext, state, actions, depth, rootStepId);
|
||||
}
|
||||
catch (FailedToResolveActionDownloadInfoException ex)
|
||||
{
|
||||
@@ -169,7 +178,192 @@ namespace GitHub.Runner.Worker
|
||||
return new PrepareResult(containerSetupSteps, result.PreStepTracker);
|
||||
}
|
||||
|
||||
private async Task<PrepareActionsState> PrepareActionsRecursiveAsync(IExecutionContext executionContext, PrepareActionsState state, IEnumerable<Pipelines.ActionStep> actions, Int32 depth = 0, Guid parentStepId = default(Guid))
|
||||
private async Task<PrepareActionsState> PrepareActionsRecursiveAsync(IExecutionContext executionContext, PrepareActionsState state, IEnumerable<Pipelines.ActionStep> actions, Dictionary<string, WebApi.ActionDownloadInfo> resolvedDownloadInfos, Int32 depth = 0, Guid parentStepId = default(Guid))
|
||||
{
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
if (depth > Constants.CompositeActionsMaxDepth)
|
||||
{
|
||||
throw new Exception($"Composite action depth exceeded max depth {Constants.CompositeActionsMaxDepth}");
|
||||
}
|
||||
|
||||
var repositoryActions = new List<Pipelines.ActionStep>();
|
||||
|
||||
foreach (var action in actions)
|
||||
{
|
||||
if (action.Reference.Type == Pipelines.ActionSourceType.ContainerRegistry)
|
||||
{
|
||||
ArgUtil.NotNull(action, nameof(action));
|
||||
var containerReference = action.Reference as Pipelines.ContainerRegistryReference;
|
||||
ArgUtil.NotNull(containerReference, nameof(containerReference));
|
||||
ArgUtil.NotNullOrEmpty(containerReference.Image, nameof(containerReference.Image));
|
||||
|
||||
if (!state.ImagesToPull.ContainsKey(containerReference.Image))
|
||||
{
|
||||
state.ImagesToPull[containerReference.Image] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) needs to pull image '{containerReference.Image}'");
|
||||
state.ImagesToPull[containerReference.Image].Add(action.Id);
|
||||
}
|
||||
else if (action.Reference.Type == Pipelines.ActionSourceType.Repository)
|
||||
{
|
||||
repositoryActions.Add(action);
|
||||
}
|
||||
}
|
||||
|
||||
if (repositoryActions.Count > 0)
|
||||
{
|
||||
// Resolve download info, skipping any actions already cached.
|
||||
await ResolveNewActionsAsync(executionContext, repositoryActions, resolvedDownloadInfos);
|
||||
|
||||
// Download each action.
|
||||
foreach (var action in repositoryActions)
|
||||
{
|
||||
var lookupKey = GetDownloadInfoLookupKey(action);
|
||||
if (string.IsNullOrEmpty(lookupKey))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if (!resolvedDownloadInfos.TryGetValue(lookupKey, out var downloadInfo))
|
||||
{
|
||||
throw new Exception($"Missing download info for {lookupKey}");
|
||||
}
|
||||
await DownloadRepositoryActionAsync(executionContext, downloadInfo);
|
||||
}
|
||||
|
||||
// Parse action.yml and collect composite sub-actions for batched
|
||||
// resolution below. Pre/post step registration is deferred until
|
||||
// after recursion so that HasPre/HasPost reflect the full subtree.
|
||||
var nextLevel = new List<(Pipelines.ActionStep action, Guid parentId)>();
|
||||
|
||||
foreach (var action in repositoryActions)
|
||||
{
|
||||
var setupInfo = PrepareRepositoryActionAsync(executionContext, action);
|
||||
if (setupInfo != null && setupInfo.Container != null)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(setupInfo.Container.Image))
|
||||
{
|
||||
if (!state.ImagesToPull.ContainsKey(setupInfo.Container.Image))
|
||||
{
|
||||
state.ImagesToPull[setupInfo.Container.Image] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.Container.ActionRepository}' needs to pull image '{setupInfo.Container.Image}'");
|
||||
state.ImagesToPull[setupInfo.Container.Image].Add(action.Id);
|
||||
}
|
||||
else
|
||||
{
|
||||
ArgUtil.NotNullOrEmpty(setupInfo.Container.ActionRepository, nameof(setupInfo.Container.ActionRepository));
|
||||
|
||||
if (!state.ImagesToBuild.ContainsKey(setupInfo.Container.ActionRepository))
|
||||
{
|
||||
state.ImagesToBuild[setupInfo.Container.ActionRepository] = new List<Guid>();
|
||||
}
|
||||
|
||||
Trace.Info($"Action {action.Name} ({action.Id}) from repository '{setupInfo.Container.ActionRepository}' needs to build image '{setupInfo.Container.Dockerfile}'");
|
||||
state.ImagesToBuild[setupInfo.Container.ActionRepository].Add(action.Id);
|
||||
state.ImagesToBuildInfo[setupInfo.Container.ActionRepository] = setupInfo.Container;
|
||||
}
|
||||
}
|
||||
else if (setupInfo != null && setupInfo.Steps != null && setupInfo.Steps.Count > 0)
|
||||
{
|
||||
foreach (var step in setupInfo.Steps)
|
||||
{
|
||||
nextLevel.Add((step, action.Id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve all next-level sub-actions in one batch API call,
|
||||
// then recurse per parent (which hits the cache, not the API).
|
||||
if (nextLevel.Count > 0)
|
||||
{
|
||||
var nextLevelRepoActions = nextLevel
|
||||
.Where(x => x.action.Reference.Type == Pipelines.ActionSourceType.Repository)
|
||||
.Select(x => x.action)
|
||||
.ToList();
|
||||
await ResolveNewActionsAsync(executionContext, nextLevelRepoActions, resolvedDownloadInfos);
|
||||
|
||||
foreach (var group in nextLevel.GroupBy(x => x.parentId))
|
||||
{
|
||||
var groupActions = group.Select(x => x.action).ToList();
|
||||
state = await PrepareActionsRecursiveAsync(executionContext, state, groupActions, resolvedDownloadInfos, depth + 1, group.Key);
|
||||
}
|
||||
}
|
||||
|
||||
// Register pre/post steps after recursion so that HasPre/HasPost
|
||||
// are correct (they depend on _cachedEmbeddedPreSteps/PostSteps
|
||||
// being populated by the recursive calls above).
|
||||
foreach (var action in repositoryActions)
|
||||
{
|
||||
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
|
||||
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
|
||||
{
|
||||
var definition = LoadAction(executionContext, action);
|
||||
if (definition.Data.Execution.HasPre)
|
||||
{
|
||||
Trace.Info($"Add 'pre' execution for {action.Id}");
|
||||
// Root Step
|
||||
if (depth < 1)
|
||||
{
|
||||
var actionRunner = HostContext.CreateService<IActionRunner>();
|
||||
actionRunner.Action = action;
|
||||
actionRunner.Stage = ActionRunStage.Pre;
|
||||
actionRunner.Condition = definition.Data.Execution.InitCondition;
|
||||
state.PreStepTracker[action.Id] = actionRunner;
|
||||
}
|
||||
// Embedded Step
|
||||
else
|
||||
{
|
||||
if (!_cachedEmbeddedPreSteps.ContainsKey(parentStepId))
|
||||
{
|
||||
_cachedEmbeddedPreSteps[parentStepId] = new List<Pipelines.ActionStep>();
|
||||
}
|
||||
// Clone action so we can modify the condition without affecting the original
|
||||
var clonedAction = action.Clone() as Pipelines.ActionStep;
|
||||
clonedAction.Condition = definition.Data.Execution.InitCondition;
|
||||
_cachedEmbeddedPreSteps[parentStepId].Add(clonedAction);
|
||||
}
|
||||
}
|
||||
|
||||
if (definition.Data.Execution.HasPost && depth > 0)
|
||||
{
|
||||
if (!_cachedEmbeddedPostSteps.ContainsKey(parentStepId))
|
||||
{
|
||||
// If we haven't done so already, add the parent to the post steps
|
||||
_cachedEmbeddedPostSteps[parentStepId] = new Stack<Pipelines.ActionStep>();
|
||||
}
|
||||
// Clone action so we can modify the condition without affecting the original
|
||||
var clonedAction = action.Clone() as Pipelines.ActionStep;
|
||||
clonedAction.Condition = definition.Data.Execution.CleanupCondition;
|
||||
_cachedEmbeddedPostSteps[parentStepId].Push(clonedAction);
|
||||
}
|
||||
}
|
||||
else if (depth > 0)
|
||||
{
|
||||
// if we're in a composite action and haven't loaded the local action yet
|
||||
// we assume it has a post step
|
||||
if (!_cachedEmbeddedPostSteps.ContainsKey(parentStepId))
|
||||
{
|
||||
// If we haven't done so already, add the parent to the post steps
|
||||
_cachedEmbeddedPostSteps[parentStepId] = new Stack<Pipelines.ActionStep>();
|
||||
}
|
||||
// Clone action so we can modify the condition without affecting the original
|
||||
var clonedAction = action.Clone() as Pipelines.ActionStep;
|
||||
_cachedEmbeddedPostSteps[parentStepId].Push(clonedAction);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return state;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Legacy (non-batched) action resolution. Each composite resolves its
|
||||
/// sub-actions individually, with no cross-depth deduplication.
|
||||
/// Used when the BatchActionResolution feature flag is disabled.
|
||||
/// </summary>
|
||||
private async Task<PrepareActionsState> PrepareActionsRecursiveLegacyAsync(IExecutionContext executionContext, PrepareActionsState state, IEnumerable<Pipelines.ActionStep> actions, Int32 depth = 0, Guid parentStepId = default(Guid))
|
||||
{
|
||||
ArgUtil.NotNull(executionContext, nameof(executionContext));
|
||||
if (depth > Constants.CompositeActionsMaxDepth)
|
||||
@@ -255,7 +449,7 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else if (setupInfo != null && setupInfo.Steps != null && setupInfo.Steps.Count > 0)
|
||||
{
|
||||
state = await PrepareActionsRecursiveAsync(executionContext, state, setupInfo.Steps, depth + 1, action.Id);
|
||||
state = await PrepareActionsRecursiveLegacyAsync(executionContext, state, setupInfo.Steps, depth + 1, action.Id);
|
||||
}
|
||||
var repoAction = action.Reference as Pipelines.RepositoryPathReference;
|
||||
if (repoAction.RepositoryType != Pipelines.PipelineConstants.SelfAlias)
|
||||
@@ -762,6 +956,33 @@ namespace GitHub.Runner.Worker
|
||||
return actionDownloadInfos.Actions;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Only resolves actions not already in resolvedDownloadInfos.
|
||||
/// Results are cached for reuse at deeper recursion levels.
|
||||
/// </summary>
|
||||
private async Task ResolveNewActionsAsync(IExecutionContext executionContext, List<Pipelines.ActionStep> actions, Dictionary<string, WebApi.ActionDownloadInfo> resolvedDownloadInfos)
|
||||
{
|
||||
var actionsToResolve = new List<Pipelines.ActionStep>();
|
||||
var pendingKeys = new HashSet<string>(StringComparer.Ordinal);
|
||||
foreach (var action in actions)
|
||||
{
|
||||
var lookupKey = GetDownloadInfoLookupKey(action);
|
||||
if (!string.IsNullOrEmpty(lookupKey) && !resolvedDownloadInfos.ContainsKey(lookupKey) && pendingKeys.Add(lookupKey))
|
||||
{
|
||||
actionsToResolve.Add(action);
|
||||
}
|
||||
}
|
||||
|
||||
if (actionsToResolve.Count > 0)
|
||||
{
|
||||
var downloadInfos = await GetDownloadInfoAsync(executionContext, actionsToResolve);
|
||||
foreach (var kvp in downloadInfos)
|
||||
{
|
||||
resolvedDownloadInfos[kvp.Key] = kvp.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async Task DownloadRepositoryActionAsync(IExecutionContext executionContext, WebApi.ActionDownloadInfo downloadInfo)
|
||||
{
|
||||
Trace.Entering();
|
||||
@@ -1146,16 +1367,29 @@ namespace GitHub.Runner.Worker
|
||||
return $"{repositoryReference.Name}@{repositoryReference.Ref}";
|
||||
}
|
||||
|
||||
private AuthenticationHeaderValue CreateAuthHeader(string token)
|
||||
private AuthenticationHeaderValue CreateAuthHeader(IExecutionContext executionContext, string downloadUrl, string token)
|
||||
{
|
||||
if (string.IsNullOrEmpty(token))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
if (executionContext.Global.Variables.GetBoolean(Constants.Runner.Features.UseBearerTokenForCodeload) == true &&
|
||||
Uri.TryCreate(downloadUrl, UriKind.Absolute, out var parsedUrl) &&
|
||||
!string.IsNullOrEmpty(parsedUrl?.Host) &&
|
||||
!string.IsNullOrEmpty(parsedUrl?.PathAndQuery) &&
|
||||
(parsedUrl.Host.StartsWith("codeload.", StringComparison.OrdinalIgnoreCase) || parsedUrl.PathAndQuery.StartsWith("/_codeload/", StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
Trace.Info("Using Bearer token for action archive download directly to codeload.");
|
||||
return new AuthenticationHeaderValue("Bearer", token);
|
||||
}
|
||||
else
|
||||
{
|
||||
Trace.Info("Using Basic token for action archive download.");
|
||||
var base64EncodingToken = Convert.ToBase64String(Encoding.UTF8.GetBytes($"x-access-token:{token}"));
|
||||
HostContext.SecretMasker.AddValue(base64EncodingToken);
|
||||
return new AuthenticationHeaderValue("Basic", base64EncodingToken);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task DownloadRepositoryArchive(IExecutionContext executionContext, string downloadUrl, string downloadAuthToken, string archiveFile)
|
||||
@@ -1180,7 +1414,7 @@ namespace GitHub.Runner.Worker
|
||||
using (var httpClientHandler = HostContext.CreateHttpClientHandler())
|
||||
using (var httpClient = new HttpClient(httpClientHandler))
|
||||
{
|
||||
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(downloadAuthToken);
|
||||
httpClient.DefaultRequestHeaders.Authorization = CreateAuthHeader(executionContext, downloadUrl, downloadAuthToken);
|
||||
|
||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||
using (var response = await httpClient.GetAsync(downloadUrl))
|
||||
|
||||
@@ -316,7 +316,6 @@ namespace GitHub.Runner.Worker
|
||||
Schema = _actionManifestSchema,
|
||||
// TODO: Switch to real tracewriter for cutover
|
||||
TraceWriter = new GitHub.Actions.WorkflowParser.ObjectTemplating.EmptyTraceWriter(),
|
||||
AllowCaseFunction = false,
|
||||
};
|
||||
|
||||
// Expression values from execution context
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using System;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
@@ -315,7 +315,6 @@ namespace GitHub.Runner.Worker
|
||||
maxBytes: 10 * 1024 * 1024),
|
||||
Schema = _actionManifestSchema,
|
||||
TraceWriter = executionContext.ToTemplateTraceWriter(),
|
||||
AllowCaseFunction = false,
|
||||
};
|
||||
|
||||
// Expression values from execution context
|
||||
|
||||
1299
src/Runner.Worker/Dap/DapDebugger.cs
Normal file
1299
src/Runner.Worker/Dap/DapDebugger.cs
Normal file
File diff suppressed because it is too large
Load Diff
1231
src/Runner.Worker/Dap/DapMessages.cs
Normal file
1231
src/Runner.Worker/Dap/DapMessages.cs
Normal file
File diff suppressed because it is too large
Load Diff
369
src/Runner.Worker/Dap/DapReplExecutor.cs
Normal file
369
src/Runner.Worker/Dap/DapReplExecutor.cs
Normal file
@@ -0,0 +1,369 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker.Handlers;
|
||||
|
||||
namespace GitHub.Runner.Worker.Dap
|
||||
{
|
||||
/// <summary>
|
||||
/// Executes <see cref="RunCommand"/> objects in the job's runtime context.
|
||||
///
|
||||
/// Mirrors the behavior of a normal workflow <c>run:</c> step as closely
|
||||
/// as possible by reusing the runner's existing shell-resolution logic,
|
||||
/// script fixup helpers, and process execution infrastructure.
|
||||
///
|
||||
/// Output is streamed to the debugger via DAP <c>output</c> events with
|
||||
/// secrets masked before emission.
|
||||
/// </summary>
|
||||
internal sealed class DapReplExecutor
|
||||
{
|
||||
private readonly IHostContext _hostContext;
|
||||
private readonly Action<string, string> _sendOutput;
|
||||
private readonly Tracing _trace;
|
||||
|
||||
public DapReplExecutor(IHostContext hostContext, Action<string, string> sendOutput)
|
||||
{
|
||||
_hostContext = hostContext ?? throw new ArgumentNullException(nameof(hostContext));
|
||||
_sendOutput = sendOutput ?? throw new ArgumentNullException(nameof(sendOutput));
|
||||
_trace = hostContext.GetTrace(nameof(DapReplExecutor));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Executes a <see cref="RunCommand"/> and returns the exit code as a
|
||||
/// formatted <see cref="EvaluateResponseBody"/>.
|
||||
/// </summary>
|
||||
public async Task<EvaluateResponseBody> ExecuteRunCommandAsync(
|
||||
RunCommand command,
|
||||
IExecutionContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (context == null)
|
||||
{
|
||||
return ErrorResult("No execution context available. The debugger must be paused at a step to run commands.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
return await ExecuteScriptAsync(command, context, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_trace.Error($"REPL run command failed ({ex.GetType().Name})");
|
||||
var maskedError = _hostContext.SecretMasker.MaskSecrets(ex.Message);
|
||||
return ErrorResult($"Command failed: {maskedError}");
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<EvaluateResponseBody> ExecuteScriptAsync(
|
||||
RunCommand command,
|
||||
IExecutionContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// 1. Resolve shell — same logic as ScriptHandler
|
||||
string shellCommand;
|
||||
string argFormat;
|
||||
|
||||
if (!string.IsNullOrEmpty(command.Shell))
|
||||
{
|
||||
// Explicit shell from the DSL
|
||||
var parsed = ScriptHandlerHelpers.ParseShellOptionString(command.Shell);
|
||||
shellCommand = parsed.shellCommand;
|
||||
argFormat = string.IsNullOrEmpty(parsed.shellArgs)
|
||||
? ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand)
|
||||
: parsed.shellArgs;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Default shell — mirrors ScriptHandler platform defaults
|
||||
shellCommand = ResolveDefaultShell(context);
|
||||
argFormat = ScriptHandlerHelpers.GetScriptArgumentsFormat(shellCommand);
|
||||
}
|
||||
|
||||
_trace.Info("Resolved REPL shell");
|
||||
|
||||
// 2. Expand ${{ }} expressions in the script body, just like
|
||||
// ActionRunner evaluates step inputs before ScriptHandler sees them
|
||||
var contents = ExpandExpressions(command.Script, context);
|
||||
contents = ScriptHandlerHelpers.FixUpScriptContents(shellCommand, contents);
|
||||
|
||||
// Write to a temp file (same pattern as ScriptHandler)
|
||||
var extension = ScriptHandlerHelpers.GetScriptFileExtension(shellCommand);
|
||||
var scriptFilePath = Path.Combine(
|
||||
_hostContext.GetDirectory(WellKnownDirectory.Temp),
|
||||
$"dap_repl_{Guid.NewGuid()}{extension}");
|
||||
|
||||
Encoding encoding = new UTF8Encoding(false);
|
||||
#if OS_WINDOWS
|
||||
contents = contents.Replace("\r\n", "\n").Replace("\n", "\r\n");
|
||||
encoding = Console.InputEncoding.CodePage != 65001
|
||||
? Console.InputEncoding
|
||||
: encoding;
|
||||
#endif
|
||||
File.WriteAllText(scriptFilePath, contents, encoding);
|
||||
|
||||
try
|
||||
{
|
||||
// 3. Format arguments with script path
|
||||
var resolvedPath = scriptFilePath.Replace("\"", "\\\"");
|
||||
if (string.IsNullOrEmpty(argFormat) || !argFormat.Contains("{0}"))
|
||||
{
|
||||
return ErrorResult($"Invalid shell option '{shellCommand}'. Shell must be a valid built-in (bash, sh, cmd, powershell, pwsh) or a format string containing '{{0}}'");
|
||||
}
|
||||
var arguments = string.Format(argFormat, resolvedPath);
|
||||
|
||||
// 4. Resolve shell command path
|
||||
string prependPath = string.Join(
|
||||
Path.PathSeparator.ToString(),
|
||||
Enumerable.Reverse(context.Global.PrependPath));
|
||||
var commandPath = WhichUtil.Which(shellCommand, false, _trace, prependPath)
|
||||
?? shellCommand;
|
||||
|
||||
// 5. Build environment — merge from execution context like a real step
|
||||
var environment = BuildEnvironment(context, command.Env);
|
||||
|
||||
// 6. Resolve working directory
|
||||
var workingDirectory = command.WorkingDirectory;
|
||||
if (string.IsNullOrEmpty(workingDirectory))
|
||||
{
|
||||
var githubContext = context.ExpressionValues.TryGetValue("github", out var gh)
|
||||
? gh as DictionaryContextData
|
||||
: null;
|
||||
var workspace = githubContext?.TryGetValue("workspace", out var ws) == true
|
||||
? (ws as StringContextData)?.Value
|
||||
: null;
|
||||
workingDirectory = workspace ?? _hostContext.GetDirectory(WellKnownDirectory.Work);
|
||||
}
|
||||
|
||||
_trace.Info("Executing REPL command");
|
||||
|
||||
// Stream execution info to debugger
|
||||
SendOutput("console", $"$ {shellCommand} {command.Script.Substring(0, Math.Min(command.Script.Length, 80))}{(command.Script.Length > 80 ? "..." : "")}\n");
|
||||
|
||||
// 7. Execute via IProcessInvoker (same as DefaultStepHost)
|
||||
int exitCode;
|
||||
using (var processInvoker = _hostContext.CreateService<IProcessInvoker>())
|
||||
{
|
||||
processInvoker.OutputDataReceived += (sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
var masked = _hostContext.SecretMasker.MaskSecrets(args.Data);
|
||||
SendOutput("stdout", masked + "\n");
|
||||
}
|
||||
};
|
||||
|
||||
processInvoker.ErrorDataReceived += (sender, args) =>
|
||||
{
|
||||
if (!string.IsNullOrEmpty(args.Data))
|
||||
{
|
||||
var masked = _hostContext.SecretMasker.MaskSecrets(args.Data);
|
||||
SendOutput("stderr", masked + "\n");
|
||||
}
|
||||
};
|
||||
|
||||
exitCode = await processInvoker.ExecuteAsync(
|
||||
workingDirectory: workingDirectory,
|
||||
fileName: commandPath,
|
||||
arguments: arguments,
|
||||
environment: environment,
|
||||
requireExitCodeZero: false,
|
||||
outputEncoding: null,
|
||||
killProcessOnCancel: true,
|
||||
cancellationToken: cancellationToken);
|
||||
}
|
||||
|
||||
_trace.Info($"REPL command exited with code {exitCode}");
|
||||
|
||||
// 8. Return only the exit code summary (output was already streamed)
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = exitCode == 0 ? $"(exit code: {exitCode})" : $"Process completed with exit code {exitCode}.",
|
||||
Type = exitCode == 0 ? "string" : "error",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Clean up temp script file
|
||||
try { File.Delete(scriptFilePath); }
|
||||
catch { /* best effort */ }
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Expands <c>${{ }}</c> expressions in the input string using the
|
||||
/// runner's template evaluator — the same evaluation path that processes
|
||||
/// step inputs before <see cref="ScriptHandler"/> runs them.
|
||||
///
|
||||
/// Each <c>${{ expr }}</c> occurrence is individually evaluated and
|
||||
/// replaced with its masked string result, mirroring the semantics of
|
||||
/// expression interpolation in a workflow <c>run:</c> step body.
|
||||
/// </summary>
|
||||
internal string ExpandExpressions(string input, IExecutionContext context)
|
||||
{
|
||||
if (string.IsNullOrEmpty(input) || !input.Contains("${{"))
|
||||
{
|
||||
return input ?? string.Empty;
|
||||
}
|
||||
|
||||
var result = new StringBuilder();
|
||||
int pos = 0;
|
||||
|
||||
while (pos < input.Length)
|
||||
{
|
||||
var start = input.IndexOf("${{", pos, StringComparison.Ordinal);
|
||||
if (start < 0)
|
||||
{
|
||||
result.Append(input, pos, input.Length - pos);
|
||||
break;
|
||||
}
|
||||
|
||||
// Append the literal text before the expression
|
||||
result.Append(input, pos, start - pos);
|
||||
|
||||
var end = input.IndexOf("}}", start + 3, StringComparison.Ordinal);
|
||||
if (end < 0)
|
||||
{
|
||||
// Unterminated expression — keep literal
|
||||
result.Append(input, start, input.Length - start);
|
||||
break;
|
||||
}
|
||||
|
||||
var expr = input.Substring(start + 3, end - start - 3).Trim();
|
||||
end += 2; // skip past "}}"
|
||||
|
||||
// Evaluate the expression
|
||||
try
|
||||
{
|
||||
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||
var token = new GitHub.DistributedTask.ObjectTemplating.Tokens.BasicExpressionToken(
|
||||
null, null, null, expr);
|
||||
var evaluated = templateEvaluator.EvaluateStepDisplayName(
|
||||
token,
|
||||
context.ExpressionValues,
|
||||
context.ExpressionFunctions);
|
||||
result.Append(_hostContext.SecretMasker.MaskSecrets(evaluated ?? string.Empty));
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_trace.Warning($"Expression expansion failed ({ex.GetType().Name})");
|
||||
// Keep the original expression literal on failure
|
||||
result.Append(input, start, end - start);
|
||||
}
|
||||
|
||||
pos = end;
|
||||
}
|
||||
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resolves the default shell the same way <see cref="ScriptHandler"/>
|
||||
/// does: check job defaults, then fall back to platform default.
|
||||
/// </summary>
|
||||
internal string ResolveDefaultShell(IExecutionContext context)
|
||||
{
|
||||
// Check job defaults
|
||||
if (context.Global?.JobDefaults != null &&
|
||||
context.Global.JobDefaults.TryGetValue("run", out var runDefaults) &&
|
||||
runDefaults.TryGetValue("shell", out var defaultShell) &&
|
||||
!string.IsNullOrEmpty(defaultShell))
|
||||
{
|
||||
_trace.Info("Using job default shell");
|
||||
return defaultShell;
|
||||
}
|
||||
|
||||
#if OS_WINDOWS
|
||||
string prependPath = string.Join(
|
||||
Path.PathSeparator.ToString(),
|
||||
context.Global?.PrependPath != null ? Enumerable.Reverse(context.Global.PrependPath) : Array.Empty<string>());
|
||||
var pwshPath = WhichUtil.Which("pwsh", false, _trace, prependPath);
|
||||
return !string.IsNullOrEmpty(pwshPath) ? "pwsh" : "powershell";
|
||||
#else
|
||||
return "sh";
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Merges the job context environment with any REPL-specific overrides.
|
||||
/// </summary>
|
||||
internal Dictionary<string, string> BuildEnvironment(
|
||||
IExecutionContext context,
|
||||
Dictionary<string, string> replEnv)
|
||||
{
|
||||
var env = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer);
|
||||
|
||||
// Pull environment from the execution context (same as ActionRunner)
|
||||
if (context.ExpressionValues.TryGetValue("env", out var envData))
|
||||
{
|
||||
if (envData is DictionaryContextData dictEnv)
|
||||
{
|
||||
foreach (var pair in dictEnv)
|
||||
{
|
||||
if (pair.Value is StringContextData str)
|
||||
{
|
||||
env[pair.Key] = str.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (envData is CaseSensitiveDictionaryContextData csEnv)
|
||||
{
|
||||
foreach (var pair in csEnv)
|
||||
{
|
||||
if (pair.Value is StringContextData str)
|
||||
{
|
||||
env[pair.Key] = str.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Expose runtime context variables to the environment (GITHUB_*, RUNNER_*, etc.)
|
||||
foreach (var ctxPair in context.ExpressionValues)
|
||||
{
|
||||
if (ctxPair.Value is IEnvironmentContextData runtimeContext && runtimeContext != null)
|
||||
{
|
||||
foreach (var rtEnv in runtimeContext.GetRuntimeEnvironmentVariables())
|
||||
{
|
||||
env[rtEnv.Key] = rtEnv.Value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Apply REPL-specific overrides last (so they win),
|
||||
// expanding any ${{ }} expressions in the values
|
||||
if (replEnv != null)
|
||||
{
|
||||
foreach (var pair in replEnv)
|
||||
{
|
||||
env[pair.Key] = ExpandExpressions(pair.Value, context);
|
||||
}
|
||||
}
|
||||
|
||||
return env;
|
||||
}
|
||||
|
||||
private void SendOutput(string category, string text)
|
||||
{
|
||||
_sendOutput(category, text);
|
||||
}
|
||||
|
||||
private static EvaluateResponseBody ErrorResult(string message)
|
||||
{
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = message,
|
||||
Type = "error",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
411
src/Runner.Worker/Dap/DapReplParser.cs
Normal file
411
src/Runner.Worker/Dap/DapReplParser.cs
Normal file
@@ -0,0 +1,411 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
|
||||
namespace GitHub.Runner.Worker.Dap
|
||||
{
|
||||
/// <summary>
|
||||
/// Base type for all REPL DSL commands.
|
||||
/// </summary>
|
||||
internal abstract class DapReplCommand
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// <c>help</c> or <c>help("run")</c>
|
||||
/// </summary>
|
||||
internal sealed class HelpCommand : DapReplCommand
|
||||
{
|
||||
public string Topic { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// <c>run("echo hello")</c> or
|
||||
/// <c>run("echo hello", shell: "bash", env: { FOO: "bar" }, working_directory: "/tmp")</c>
|
||||
/// </summary>
|
||||
internal sealed class RunCommand : DapReplCommand
|
||||
{
|
||||
public string Script { get; set; }
|
||||
public string Shell { get; set; }
|
||||
public Dictionary<string, string> Env { get; set; }
|
||||
public string WorkingDirectory { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses REPL input into typed <see cref="DapReplCommand"/> objects.
|
||||
///
|
||||
/// Grammar (intentionally minimal — extend as the DSL grows):
|
||||
/// <code>
|
||||
/// help → HelpCommand { Topic = null }
|
||||
/// help("run") → HelpCommand { Topic = "run" }
|
||||
/// run("script body") → RunCommand { Script = "script body" }
|
||||
/// run("script", shell: "bash") → RunCommand { Shell = "bash" }
|
||||
/// run("script", env: { K: "V" }) → RunCommand { Env = { K → V } }
|
||||
/// run("script", working_directory: "p")→ RunCommand { WorkingDirectory = "p" }
|
||||
/// </code>
|
||||
///
|
||||
/// Parsing is intentionally hand-rolled rather than regex-based so it can
|
||||
/// handle nested braces, quoted strings with escapes, and grow to support
|
||||
/// future commands without accumulating regex complexity.
|
||||
/// </summary>
|
||||
internal static class DapReplParser
|
||||
{
|
||||
/// <summary>
|
||||
/// Attempts to parse REPL input into a command. Returns null if the
|
||||
/// input does not match any known DSL command (i.e. it should be
|
||||
/// treated as an expression instead).
|
||||
/// </summary>
|
||||
internal static DapReplCommand TryParse(string input, out string error)
|
||||
{
|
||||
error = null;
|
||||
if (string.IsNullOrWhiteSpace(input))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var trimmed = input.Trim();
|
||||
|
||||
// help / help("topic")
|
||||
if (trimmed.Equals("help", StringComparison.OrdinalIgnoreCase) ||
|
||||
trimmed.StartsWith("help(", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return ParseHelp(trimmed, out error);
|
||||
}
|
||||
|
||||
// run("...")
|
||||
if (trimmed.StartsWith("run(", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return ParseRun(trimmed, out error);
|
||||
}
|
||||
|
||||
// Not a DSL command
|
||||
return null;
|
||||
}
|
||||
|
||||
internal static string GetGeneralHelp()
|
||||
{
|
||||
return """
|
||||
Actions Debug Console
|
||||
|
||||
Commands:
|
||||
help Show this help
|
||||
help("run") Show help for the run command
|
||||
run("script") Execute a script (like a workflow run step)
|
||||
|
||||
Anything else is evaluated as a GitHub Actions expression.
|
||||
Example: github.repository
|
||||
Example: ${{ github.event_name }}
|
||||
|
||||
""";
|
||||
}
|
||||
|
||||
internal static string GetRunHelp()
|
||||
{
|
||||
return """
|
||||
run command — execute a script in the job context
|
||||
|
||||
Usage:
|
||||
run("echo hello")
|
||||
run("echo $FOO", shell: "bash")
|
||||
run("echo $FOO", env: { FOO: "bar" })
|
||||
run("ls", working_directory: "/tmp")
|
||||
run("echo $X", shell: "bash", env: { X: "1" }, working_directory: "/tmp")
|
||||
|
||||
Options:
|
||||
shell: Shell to use (default: job default, e.g. bash)
|
||||
env: Extra environment variables as { KEY: "value" }
|
||||
working_directory: Working directory for the command
|
||||
|
||||
Behavior:
|
||||
- Equivalent to a workflow `run:` step
|
||||
- Expressions in the script body are expanded (${{ ... }})
|
||||
- Output is streamed in real time and secrets are masked
|
||||
|
||||
""";
|
||||
}
|
||||
|
||||
#region Parsers
|
||||
|
||||
private static HelpCommand ParseHelp(string input, out string error)
|
||||
{
|
||||
error = null;
|
||||
if (input.Equals("help", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return new HelpCommand();
|
||||
}
|
||||
|
||||
// help("topic")
|
||||
var inner = ExtractParenthesizedArgs(input, "help", out error);
|
||||
if (error != null) return null;
|
||||
|
||||
var topic = ExtractQuotedString(inner.Trim(), out error);
|
||||
if (error != null) return null;
|
||||
|
||||
return new HelpCommand { Topic = topic };
|
||||
}
|
||||
|
||||
private static RunCommand ParseRun(string input, out string error)
|
||||
{
|
||||
error = null;
|
||||
|
||||
var inner = ExtractParenthesizedArgs(input, "run", out error);
|
||||
if (error != null) return null;
|
||||
|
||||
// Split into argument list respecting quotes and braces
|
||||
var args = SplitArguments(inner, out error);
|
||||
if (error != null) return null;
|
||||
if (args.Count == 0)
|
||||
{
|
||||
error = "run() requires a script argument. Example: run(\"echo hello\")";
|
||||
return null;
|
||||
}
|
||||
|
||||
// First arg must be the script body (a quoted string)
|
||||
var script = ExtractQuotedString(args[0].Trim(), out error);
|
||||
if (error != null)
|
||||
{
|
||||
error = $"First argument to run() must be a quoted string. {error}";
|
||||
return null;
|
||||
}
|
||||
|
||||
var cmd = new RunCommand { Script = script };
|
||||
|
||||
// Parse remaining keyword arguments
|
||||
for (int i = 1; i < args.Count; i++)
|
||||
{
|
||||
var kv = args[i].Trim();
|
||||
var colonIdx = kv.IndexOf(':');
|
||||
if (colonIdx <= 0)
|
||||
{
|
||||
error = $"Expected keyword argument (e.g. shell: \"bash\"), got: {kv}";
|
||||
return null;
|
||||
}
|
||||
|
||||
var key = kv.Substring(0, colonIdx).Trim();
|
||||
var value = kv.Substring(colonIdx + 1).Trim();
|
||||
|
||||
switch (key.ToLowerInvariant())
|
||||
{
|
||||
case "shell":
|
||||
cmd.Shell = ExtractQuotedString(value, out error);
|
||||
if (error != null) { error = $"shell: {error}"; return null; }
|
||||
break;
|
||||
|
||||
case "working_directory":
|
||||
cmd.WorkingDirectory = ExtractQuotedString(value, out error);
|
||||
if (error != null) { error = $"working_directory: {error}"; return null; }
|
||||
break;
|
||||
|
||||
case "env":
|
||||
cmd.Env = ParseEnvBlock(value, out error);
|
||||
if (error != null) { error = $"env: {error}"; return null; }
|
||||
break;
|
||||
|
||||
default:
|
||||
error = $"Unknown option: {key}. Valid options: shell, env, working_directory";
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return cmd;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Low-level parsing helpers
|
||||
|
||||
/// <summary>
|
||||
/// Given "cmd(...)" returns the inner content between the outer parens.
|
||||
/// </summary>
|
||||
private static string ExtractParenthesizedArgs(string input, string prefix, out string error)
|
||||
{
|
||||
error = null;
|
||||
var start = prefix.Length; // skip "cmd"
|
||||
if (start >= input.Length || input[start] != '(')
|
||||
{
|
||||
error = $"Expected '(' after {prefix}";
|
||||
return null;
|
||||
}
|
||||
|
||||
if (input[input.Length - 1] != ')')
|
||||
{
|
||||
error = $"Expected ')' at end of {prefix}(...)";
|
||||
return null;
|
||||
}
|
||||
|
||||
return input.Substring(start + 1, input.Length - start - 2);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extracts a double-quoted string value, handling escaped quotes.
|
||||
/// </summary>
|
||||
internal static string ExtractQuotedString(string input, out string error)
|
||||
{
|
||||
error = null;
|
||||
if (string.IsNullOrEmpty(input))
|
||||
{
|
||||
error = "Expected a quoted string, got empty input";
|
||||
return null;
|
||||
}
|
||||
|
||||
if (input[0] != '"')
|
||||
{
|
||||
error = $"Expected a quoted string starting with \", got: {Truncate(input, 40)}";
|
||||
return null;
|
||||
}
|
||||
|
||||
var sb = new StringBuilder();
|
||||
for (int i = 1; i < input.Length; i++)
|
||||
{
|
||||
if (input[i] == '\\' && i + 1 < input.Length)
|
||||
{
|
||||
sb.Append(input[i + 1]);
|
||||
i++;
|
||||
}
|
||||
else if (input[i] == '"')
|
||||
{
|
||||
// Check nothing meaningful follows the closing quote
|
||||
var rest = input.Substring(i + 1).Trim();
|
||||
if (rest.Length > 0)
|
||||
{
|
||||
error = $"Unexpected content after closing quote: {Truncate(rest, 40)}";
|
||||
return null;
|
||||
}
|
||||
return sb.ToString();
|
||||
}
|
||||
else
|
||||
{
|
||||
sb.Append(input[i]);
|
||||
}
|
||||
}
|
||||
|
||||
error = "Unterminated string (missing closing \")";
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Splits a comma-separated argument list, respecting quoted strings
|
||||
/// and nested braces so that <c>"a, b", env: { K: "V, W" }</c> is
|
||||
/// correctly split into two arguments.
|
||||
/// </summary>
|
||||
internal static List<string> SplitArguments(string input, out string error)
|
||||
{
|
||||
error = null;
|
||||
var result = new List<string>();
|
||||
var current = new StringBuilder();
|
||||
int depth = 0;
|
||||
bool inQuote = false;
|
||||
|
||||
for (int i = 0; i < input.Length; i++)
|
||||
{
|
||||
var ch = input[i];
|
||||
|
||||
if (ch == '\\' && inQuote && i + 1 < input.Length)
|
||||
{
|
||||
current.Append(ch);
|
||||
current.Append(input[++i]);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (ch == '"')
|
||||
{
|
||||
inQuote = !inQuote;
|
||||
current.Append(ch);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!inQuote)
|
||||
{
|
||||
if (ch == '{')
|
||||
{
|
||||
depth++;
|
||||
current.Append(ch);
|
||||
continue;
|
||||
}
|
||||
if (ch == '}')
|
||||
{
|
||||
depth--;
|
||||
current.Append(ch);
|
||||
continue;
|
||||
}
|
||||
if (ch == ',' && depth == 0)
|
||||
{
|
||||
result.Add(current.ToString());
|
||||
current.Clear();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
current.Append(ch);
|
||||
}
|
||||
|
||||
if (inQuote)
|
||||
{
|
||||
error = "Unterminated string in arguments";
|
||||
return null;
|
||||
}
|
||||
if (depth != 0)
|
||||
{
|
||||
error = "Unmatched braces in arguments";
|
||||
return null;
|
||||
}
|
||||
|
||||
if (current.Length > 0)
|
||||
{
|
||||
result.Add(current.ToString());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses <c>{ KEY: "value", KEY2: "value2" }</c> into a dictionary.
|
||||
/// </summary>
|
||||
internal static Dictionary<string, string> ParseEnvBlock(string input, out string error)
|
||||
{
|
||||
error = null;
|
||||
var trimmed = input.Trim();
|
||||
if (!trimmed.StartsWith("{") || !trimmed.EndsWith("}"))
|
||||
{
|
||||
error = "Expected env block in the form { KEY: \"value\" }";
|
||||
return null;
|
||||
}
|
||||
|
||||
var inner = trimmed.Substring(1, trimmed.Length - 2).Trim();
|
||||
if (string.IsNullOrEmpty(inner))
|
||||
{
|
||||
return new Dictionary<string, string>();
|
||||
}
|
||||
|
||||
var pairs = SplitArguments(inner, out error);
|
||||
if (error != null) return null;
|
||||
|
||||
var result = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var pair in pairs)
|
||||
{
|
||||
var colonIdx = pair.IndexOf(':');
|
||||
if (colonIdx <= 0)
|
||||
{
|
||||
error = $"Expected KEY: \"value\" pair, got: {Truncate(pair.Trim(), 40)}";
|
||||
return null;
|
||||
}
|
||||
|
||||
var key = pair.Substring(0, colonIdx).Trim();
|
||||
var val = ExtractQuotedString(pair.Substring(colonIdx + 1).Trim(), out error);
|
||||
if (error != null) return null;
|
||||
|
||||
result[key] = val;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static string Truncate(string value, int maxLength)
|
||||
{
|
||||
if (value == null) return "(null)";
|
||||
return value.Length <= maxLength ? value : value.Substring(0, maxLength) + "...";
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
373
src/Runner.Worker/Dap/DapVariableProvider.cs
Normal file
373
src/Runner.Worker/Dap/DapVariableProvider.cs
Normal file
@@ -0,0 +1,373 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using GitHub.DistributedTask.Logging;
|
||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
|
||||
namespace GitHub.Runner.Worker.Dap
|
||||
{
|
||||
/// <summary>
|
||||
/// Maps runner execution context data to DAP scopes and variables.
|
||||
///
|
||||
/// This is the single point where runner context values are materialized
|
||||
/// for the debugger. All values pass through the runner's existing
|
||||
/// <see cref="GitHub.DistributedTask.Logging.ISecretMasker"/> so the DAP
|
||||
/// surface never exposes anything beyond what a normal CI log would show.
|
||||
///
|
||||
/// The secrets scope is intentionally opaque: keys are visible but every
|
||||
/// value is replaced with a constant redaction marker.
|
||||
///
|
||||
/// Designed to be reusable by future DAP features (evaluate, hover, REPL)
|
||||
/// so that masking policy is never duplicated.
|
||||
/// </summary>
|
||||
internal sealed class DapVariableProvider
|
||||
{
|
||||
// Well-known scope names that map to top-level expression contexts.
|
||||
// Order matters: the index determines the stable variablesReference ID.
|
||||
private static readonly string[] _scopeNames =
|
||||
{
|
||||
"github", "env", "runner", "job", "steps",
|
||||
"secrets", "inputs", "vars", "matrix", "needs"
|
||||
};
|
||||
|
||||
// Scope references occupy the range [1, ScopeReferenceMax].
|
||||
private const int _scopeReferenceBase = 1;
|
||||
private const int _scopeReferenceMax = 100;
|
||||
|
||||
// Dynamic (nested) variable references start above the scope range.
|
||||
private const int _dynamicReferenceBase = 101;
|
||||
|
||||
private const string _redactedValue = "***";
|
||||
|
||||
private readonly ISecretMasker _secretMasker;
|
||||
|
||||
// Maps dynamic variable reference IDs to the backing data and its
|
||||
// dot-separated path (e.g. "github.event.pull_request").
|
||||
private readonly Dictionary<int, (PipelineContextData Data, string Path)> _variableReferences = new();
|
||||
private int _nextVariableReference = _dynamicReferenceBase;
|
||||
|
||||
public DapVariableProvider(ISecretMasker secretMasker)
|
||||
{
|
||||
_secretMasker = secretMasker ?? throw new ArgumentNullException(nameof(secretMasker));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears all dynamic variable references.
|
||||
/// Call this whenever the paused execution context changes (e.g. new step)
|
||||
/// so that stale nested references are not served to the client.
|
||||
/// </summary>
|
||||
public void Reset()
|
||||
{
|
||||
_variableReferences.Clear();
|
||||
_nextVariableReference = _dynamicReferenceBase;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the list of DAP scopes for the given execution context.
|
||||
/// Each scope corresponds to a well-known runner expression context
|
||||
/// (github, env, secrets, …) and carries a stable variablesReference
|
||||
/// that the client can use to drill into variables.
|
||||
/// </summary>
|
||||
public List<Scope> GetScopes(IExecutionContext context)
|
||||
{
|
||||
var scopes = new List<Scope>();
|
||||
|
||||
if (context?.ExpressionValues == null)
|
||||
{
|
||||
return scopes;
|
||||
}
|
||||
|
||||
for (int i = 0; i < _scopeNames.Length; i++)
|
||||
{
|
||||
var scopeName = _scopeNames[i];
|
||||
if (!context.ExpressionValues.TryGetValue(scopeName, out var value) || value == null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var scope = new Scope
|
||||
{
|
||||
Name = scopeName,
|
||||
VariablesReference = _scopeReferenceBase + i,
|
||||
Expensive = false,
|
||||
PresentationHint = scopeName == "secrets" ? "registers" : null
|
||||
};
|
||||
|
||||
if (value is DictionaryContextData dict)
|
||||
{
|
||||
scope.NamedVariables = dict.Count;
|
||||
}
|
||||
else if (value is CaseSensitiveDictionaryContextData csDict)
|
||||
{
|
||||
scope.NamedVariables = csDict.Count;
|
||||
}
|
||||
|
||||
scopes.Add(scope);
|
||||
}
|
||||
|
||||
return scopes;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns the child variables for a given variablesReference.
|
||||
/// The reference may point at a top-level scope (1–100) or a
|
||||
/// dynamically registered nested container (101+).
|
||||
/// </summary>
|
||||
public List<Variable> GetVariables(IExecutionContext context, int variablesReference)
|
||||
{
|
||||
var variables = new List<Variable>();
|
||||
|
||||
if (context?.ExpressionValues == null)
|
||||
{
|
||||
return variables;
|
||||
}
|
||||
|
||||
PipelineContextData data = null;
|
||||
string basePath = null;
|
||||
bool isSecretsScope = false;
|
||||
|
||||
if (variablesReference >= _scopeReferenceBase && variablesReference <= _scopeReferenceMax)
|
||||
{
|
||||
var scopeIndex = variablesReference - _scopeReferenceBase;
|
||||
if (scopeIndex < _scopeNames.Length)
|
||||
{
|
||||
var scopeName = _scopeNames[scopeIndex];
|
||||
isSecretsScope = scopeName == "secrets";
|
||||
if (context.ExpressionValues.TryGetValue(scopeName, out data))
|
||||
{
|
||||
basePath = scopeName;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (_variableReferences.TryGetValue(variablesReference, out var refData))
|
||||
{
|
||||
data = refData.Data;
|
||||
basePath = refData.Path;
|
||||
isSecretsScope = basePath?.StartsWith("secrets", StringComparison.OrdinalIgnoreCase) == true;
|
||||
}
|
||||
|
||||
if (data == null)
|
||||
{
|
||||
return variables;
|
||||
}
|
||||
|
||||
ConvertToVariables(data, basePath, isSecretsScope, variables);
|
||||
return variables;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates a GitHub Actions expression (e.g. "github.repository",
|
||||
/// "${{ github.event_name }}") in the context of the current step and
|
||||
/// returns a masked result suitable for the DAP evaluate response.
|
||||
///
|
||||
/// Uses the runner's standard <see cref="GitHub.DistributedTask.Pipelines.ObjectTemplating.IPipelineTemplateEvaluator"/>
|
||||
/// so the full expression language is available (functions, operators,
|
||||
/// context access).
|
||||
/// </summary>
|
||||
public EvaluateResponseBody EvaluateExpression(string expression, IExecutionContext context)
|
||||
{
|
||||
if (context?.ExpressionValues == null)
|
||||
{
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = "(no execution context available)",
|
||||
Type = "string",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
|
||||
// Strip ${{ }} wrapper if present
|
||||
var expr = expression?.Trim() ?? string.Empty;
|
||||
if (expr.StartsWith("${{") && expr.EndsWith("}}"))
|
||||
{
|
||||
expr = expr.Substring(3, expr.Length - 5).Trim();
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(expr))
|
||||
{
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = string.Empty,
|
||||
Type = "string",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var templateEvaluator = context.ToPipelineTemplateEvaluator();
|
||||
var token = new BasicExpressionToken(null, null, null, expr);
|
||||
|
||||
var result = templateEvaluator.EvaluateStepDisplayName(
|
||||
token,
|
||||
context.ExpressionValues,
|
||||
context.ExpressionFunctions);
|
||||
|
||||
result = _secretMasker.MaskSecrets(result ?? "null");
|
||||
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = result,
|
||||
Type = InferResultType(result),
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
var errorMessage = _secretMasker.MaskSecrets($"Evaluation error: {ex.Message}");
|
||||
return new EvaluateResponseBody
|
||||
{
|
||||
Result = errorMessage,
|
||||
Type = "string",
|
||||
VariablesReference = 0
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Infers a simple DAP type hint from the string representation of a result.
|
||||
/// </summary>
|
||||
internal static string InferResultType(string value)
|
||||
{
|
||||
value = value?.ToLower();
|
||||
if (value == null || value == "null")
|
||||
return "null";
|
||||
if (value == "true" || value == "false")
|
||||
return "boolean";
|
||||
if (double.TryParse(value, NumberStyles.Any,
|
||||
CultureInfo.InvariantCulture, out _))
|
||||
return "number";
|
||||
if (value.StartsWith("{") || value.StartsWith("["))
|
||||
return "object";
|
||||
return "string";
|
||||
}
|
||||
|
||||
#region Private helpers
|
||||
|
||||
private void ConvertToVariables(
|
||||
PipelineContextData data,
|
||||
string basePath,
|
||||
bool isSecretsScope,
|
||||
List<Variable> variables)
|
||||
{
|
||||
switch (data)
|
||||
{
|
||||
case DictionaryContextData dict:
|
||||
foreach (var pair in dict)
|
||||
{
|
||||
variables.Add(CreateVariable(pair.Key, pair.Value, basePath, isSecretsScope));
|
||||
}
|
||||
break;
|
||||
|
||||
case CaseSensitiveDictionaryContextData csDict:
|
||||
foreach (var pair in csDict)
|
||||
{
|
||||
variables.Add(CreateVariable(pair.Key, pair.Value, basePath, isSecretsScope));
|
||||
}
|
||||
break;
|
||||
|
||||
case ArrayContextData array:
|
||||
for (int i = 0; i < array.Count; i++)
|
||||
{
|
||||
var variable = CreateVariable($"[{i}]", array[i], basePath, isSecretsScope);
|
||||
variables.Add(variable);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private Variable CreateVariable(
|
||||
string name,
|
||||
PipelineContextData value,
|
||||
string basePath,
|
||||
bool isSecretsScope)
|
||||
{
|
||||
var childPath = string.IsNullOrEmpty(basePath) ? name : $"{basePath}.{name}";
|
||||
var variable = new Variable
|
||||
{
|
||||
Name = name,
|
||||
EvaluateName = $"${{{{ {childPath} }}}}"
|
||||
};
|
||||
|
||||
// Secrets scope: redact ALL values regardless of underlying type.
|
||||
// Keys are visible but values are always replaced with the
|
||||
// redaction marker, and nested containers are not drillable.
|
||||
if (isSecretsScope)
|
||||
{
|
||||
variable.Value = _redactedValue;
|
||||
variable.Type = "string";
|
||||
variable.VariablesReference = 0;
|
||||
return variable;
|
||||
}
|
||||
|
||||
if (value == null)
|
||||
{
|
||||
variable.Value = "null";
|
||||
variable.Type = "null";
|
||||
variable.VariablesReference = 0;
|
||||
return variable;
|
||||
}
|
||||
|
||||
switch (value)
|
||||
{
|
||||
case StringContextData str:
|
||||
variable.Value = _secretMasker.MaskSecrets(str.Value);
|
||||
variable.Type = "string";
|
||||
variable.VariablesReference = 0;
|
||||
break;
|
||||
|
||||
case NumberContextData num:
|
||||
variable.Value = _secretMasker.MaskSecrets(num.Value.ToString("G15", CultureInfo.InvariantCulture));
|
||||
variable.Type = "number";
|
||||
variable.VariablesReference = 0;
|
||||
break;
|
||||
|
||||
case BooleanContextData boolVal:
|
||||
variable.Value = boolVal.Value ? "true" : "false";
|
||||
variable.Type = "boolean";
|
||||
variable.VariablesReference = 0;
|
||||
break;
|
||||
|
||||
case DictionaryContextData dict:
|
||||
variable.Value = $"Object ({dict.Count} properties)";
|
||||
variable.Type = "object";
|
||||
variable.VariablesReference = RegisterVariableReference(dict, childPath);
|
||||
variable.NamedVariables = dict.Count;
|
||||
break;
|
||||
|
||||
case CaseSensitiveDictionaryContextData csDict:
|
||||
variable.Value = $"Object ({csDict.Count} properties)";
|
||||
variable.Type = "object";
|
||||
variable.VariablesReference = RegisterVariableReference(csDict, childPath);
|
||||
variable.NamedVariables = csDict.Count;
|
||||
break;
|
||||
|
||||
case ArrayContextData array:
|
||||
variable.Value = $"Array ({array.Count} items)";
|
||||
variable.Type = "array";
|
||||
variable.VariablesReference = RegisterVariableReference(array, childPath);
|
||||
variable.IndexedVariables = array.Count;
|
||||
break;
|
||||
|
||||
default:
|
||||
var rawValue = value.ToJToken()?.ToString() ?? "unknown";
|
||||
variable.Value = _secretMasker.MaskSecrets(rawValue);
|
||||
variable.Type = value.GetType().Name;
|
||||
variable.VariablesReference = 0;
|
||||
break;
|
||||
}
|
||||
|
||||
return variable;
|
||||
}
|
||||
|
||||
private int RegisterVariableReference(PipelineContextData data, string path)
|
||||
{
|
||||
var reference = _nextVariableReference++;
|
||||
_variableReferences[reference] = (data, path);
|
||||
return reference;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
26
src/Runner.Worker/Dap/IDapDebugger.cs
Normal file
26
src/Runner.Worker/Dap/IDapDebugger.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.Runner.Common;
|
||||
|
||||
namespace GitHub.Runner.Worker.Dap
|
||||
{
|
||||
public enum DapSessionState
|
||||
{
|
||||
NotStarted,
|
||||
WaitingForConnection,
|
||||
Initializing,
|
||||
Ready,
|
||||
Paused,
|
||||
Running,
|
||||
Terminated
|
||||
}
|
||||
|
||||
[ServiceLocator(Default = typeof(DapDebugger))]
|
||||
public interface IDapDebugger : IRunnerService
|
||||
{
|
||||
Task StartAsync(IExecutionContext jobContext);
|
||||
Task WaitUntilReadyAsync();
|
||||
Task OnStepStartingAsync(IStep step);
|
||||
void OnStepCompleted(IStep step);
|
||||
Task OnJobCompletedAsync();
|
||||
}
|
||||
}
|
||||
@@ -969,6 +969,9 @@ namespace GitHub.Runner.Worker
|
||||
// Verbosity (from GitHub.Step_Debug).
|
||||
Global.WriteDebug = Global.Variables.Step_Debug ?? false;
|
||||
|
||||
// Debugger enabled flag (from acquire response).
|
||||
Global.EnableDebugger = message.EnableDebugger;
|
||||
|
||||
// Hook up JobServerQueueThrottling event, we will log warning on server tarpit.
|
||||
_jobServerQueue.JobServerQueueThrottling += JobServerQueueThrottling_EventReceived;
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ namespace GitHub.Runner.Worker
|
||||
public StepsContext StepsContext { get; set; }
|
||||
public Variables Variables { get; set; }
|
||||
public bool WriteDebug { get; set; }
|
||||
public bool EnableDebugger { get; set; }
|
||||
public string InfrastructureFailureCategory { get; set; }
|
||||
public JObject ContainerHookState { get; set; }
|
||||
public bool HasTemplateEvaluatorMismatch { get; set; }
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("Test")]
|
||||
[assembly: InternalsVisibleTo("DynamicProxyGenAssembly2")]
|
||||
|
||||
@@ -13,6 +13,7 @@ using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using GitHub.Services.Common;
|
||||
using GitHub.Services.WebApi;
|
||||
using Sdk.RSWebApi.Contracts;
|
||||
@@ -28,6 +29,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
public sealed class JobRunner : RunnerService, IJobRunner
|
||||
{
|
||||
private const string DebuggerConnectionTelemetryPrefix = "DebuggerConnectionResult";
|
||||
private IJobServerQueue _jobServerQueue;
|
||||
private RunnerSettings _runnerSettings;
|
||||
private ITempDirectoryManager _tempDirectoryManager;
|
||||
@@ -112,6 +114,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
IExecutionContext jobContext = null;
|
||||
CancellationTokenRegistration? runnerShutdownRegistration = null;
|
||||
IDapDebugger dapDebugger = null;
|
||||
try
|
||||
{
|
||||
// Create the job execution context.
|
||||
@@ -178,6 +181,26 @@ namespace GitHub.Runner.Worker
|
||||
_tempDirectoryManager = HostContext.GetService<ITempDirectoryManager>();
|
||||
_tempDirectoryManager.InitializeTempDirectory(jobContext);
|
||||
|
||||
// Setup the debugger
|
||||
if (jobContext.Global.EnableDebugger)
|
||||
{
|
||||
Trace.Info("Debugger enabled for this job run");
|
||||
|
||||
try
|
||||
{
|
||||
dapDebugger = HostContext.GetService<IDapDebugger>();
|
||||
await dapDebugger.StartAsync(jobContext);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"Failed to start DAP debugger: {ex.Message}");
|
||||
AddDebuggerConnectionTelemetry(jobContext, $"Failed: {ex.Message}");
|
||||
jobContext.Error("Failed to start debugger.");
|
||||
return await CompleteJobAsync(server, jobContext, message, TaskResult.Failed);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Get the job extension.
|
||||
Trace.Info("Getting job extension.");
|
||||
IJobExtension jobExtension = HostContext.CreateService<IJobExtension>();
|
||||
@@ -219,6 +242,33 @@ namespace GitHub.Runner.Worker
|
||||
await Task.WhenAny(_jobServerQueue.JobRecordUpdated.Task, Task.Delay(1000));
|
||||
}
|
||||
|
||||
// Wait for DAP debugger client connection and handshake after "Set up job"
|
||||
// so the job page shows the setup step before we block on the debugger
|
||||
if (dapDebugger != null)
|
||||
{
|
||||
try
|
||||
{
|
||||
await dapDebugger.WaitUntilReadyAsync();
|
||||
AddDebuggerConnectionTelemetry(jobContext, "Connected");
|
||||
}
|
||||
catch (OperationCanceledException) when (jobRequestCancellationToken.IsCancellationRequested)
|
||||
{
|
||||
Trace.Info("Job was cancelled before debugger client connected.");
|
||||
AddDebuggerConnectionTelemetry(jobContext, "Canceled");
|
||||
jobContext.Error("Job was cancelled before debugger client connected.");
|
||||
return await CompleteJobAsync(server, jobContext, message, TaskResult.Canceled);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Trace.Error($"DAP debugger failed to become ready: {ex.Message}");
|
||||
AddDebuggerConnectionTelemetry(jobContext, $"Failed: {ex.Message}");
|
||||
|
||||
// If debugging was requested but the debugger is not available, fail the job
|
||||
jobContext.Error("The debugger failed to start or no debugger client connected in time.");
|
||||
return await CompleteJobAsync(server, jobContext, message, TaskResult.Failed);
|
||||
}
|
||||
}
|
||||
|
||||
// Run all job steps
|
||||
Trace.Info("Run all job steps.");
|
||||
var stepsRunner = HostContext.GetService<IStepsRunner>();
|
||||
@@ -259,6 +309,11 @@ namespace GitHub.Runner.Worker
|
||||
runnerShutdownRegistration = null;
|
||||
}
|
||||
|
||||
if (dapDebugger != null)
|
||||
{
|
||||
await dapDebugger.OnJobCompletedAsync();
|
||||
}
|
||||
|
||||
await ShutdownQueue(throwOnFailure: false);
|
||||
}
|
||||
}
|
||||
@@ -440,6 +495,15 @@ namespace GitHub.Runner.Worker
|
||||
throw new AggregateException(exceptions);
|
||||
}
|
||||
|
||||
private static void AddDebuggerConnectionTelemetry(IExecutionContext jobContext, string result)
|
||||
{
|
||||
jobContext.Global.JobTelemetry.Add(new JobTelemetry
|
||||
{
|
||||
Type = JobTelemetryType.General,
|
||||
Message = $"{DebuggerConnectionTelemetryPrefix}: {result}"
|
||||
});
|
||||
}
|
||||
|
||||
private void MaskTelemetrySecrets(List<JobTelemetry> jobTelemetry)
|
||||
{
|
||||
foreach (var telemetryItem in jobTelemetry)
|
||||
|
||||
@@ -10,6 +10,7 @@ using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Sdk;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using GitHub.Runner.Worker.Expressions;
|
||||
|
||||
namespace GitHub.Runner.Worker
|
||||
@@ -50,6 +51,7 @@ namespace GitHub.Runner.Worker
|
||||
jobContext.JobContext.Status = (jobContext.Result ?? TaskResult.Succeeded).ToActionResult();
|
||||
var scopeInputs = new Dictionary<string, PipelineContextData>(StringComparer.OrdinalIgnoreCase);
|
||||
bool checkPostJobActions = false;
|
||||
var dapDebugger = HostContext.GetService<IDapDebugger>();
|
||||
while (jobContext.JobSteps.Count > 0 || !checkPostJobActions)
|
||||
{
|
||||
if (jobContext.JobSteps.Count == 0 && !checkPostJobActions)
|
||||
@@ -226,9 +228,14 @@ namespace GitHub.Runner.Worker
|
||||
}
|
||||
else
|
||||
{
|
||||
// Pause for DAP debugger before step execution
|
||||
await dapDebugger?.OnStepStartingAsync(step);
|
||||
|
||||
// Run the step
|
||||
await RunStepAsync(step, jobContext.CancellationToken);
|
||||
CompleteStep(step);
|
||||
|
||||
dapDebugger?.OnStepCompleted(step);
|
||||
}
|
||||
}
|
||||
finally
|
||||
@@ -255,6 +262,7 @@ namespace GitHub.Runner.Worker
|
||||
|
||||
Trace.Info($"Current state: job state = '{jobContext.Result}'");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private async Task RunStepAsync(IStep step, CancellationToken jobCancellationToken)
|
||||
|
||||
@@ -17,10 +17,9 @@ namespace GitHub.DistributedTask.Expressions2
|
||||
String expression,
|
||||
ITraceWriter trace,
|
||||
IEnumerable<INamedValueInfo> namedValues,
|
||||
IEnumerable<IFunctionInfo> functions,
|
||||
Boolean allowCaseFunction = true)
|
||||
IEnumerable<IFunctionInfo> functions)
|
||||
{
|
||||
var context = new ParseContext(expression, trace, namedValues, functions, allowCaseFunction: allowCaseFunction);
|
||||
var context = new ParseContext(expression, trace, namedValues, functions);
|
||||
context.Trace.Info($"Parsing expression: <{expression}>");
|
||||
return CreateTree(context);
|
||||
}
|
||||
@@ -416,12 +415,6 @@ namespace GitHub.DistributedTask.Expressions2
|
||||
String name,
|
||||
out IFunctionInfo functionInfo)
|
||||
{
|
||||
if (String.Equals(name, "case", StringComparison.OrdinalIgnoreCase) && !context.AllowCaseFunction)
|
||||
{
|
||||
functionInfo = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
return ExpressionConstants.WellKnownFunctions.TryGetValue(name, out functionInfo) ||
|
||||
context.ExtensionFunctions.TryGetValue(name, out functionInfo);
|
||||
}
|
||||
@@ -429,7 +422,6 @@ namespace GitHub.DistributedTask.Expressions2
|
||||
private sealed class ParseContext
|
||||
{
|
||||
public Boolean AllowUnknownKeywords;
|
||||
public Boolean AllowCaseFunction;
|
||||
public readonly String Expression;
|
||||
public readonly Dictionary<String, IFunctionInfo> ExtensionFunctions = new Dictionary<String, IFunctionInfo>(StringComparer.OrdinalIgnoreCase);
|
||||
public readonly Dictionary<String, INamedValueInfo> ExtensionNamedValues = new Dictionary<String, INamedValueInfo>(StringComparer.OrdinalIgnoreCase);
|
||||
@@ -445,8 +437,7 @@ namespace GitHub.DistributedTask.Expressions2
|
||||
ITraceWriter trace,
|
||||
IEnumerable<INamedValueInfo> namedValues,
|
||||
IEnumerable<IFunctionInfo> functions,
|
||||
Boolean allowUnknownKeywords = false,
|
||||
Boolean allowCaseFunction = true)
|
||||
Boolean allowUnknownKeywords = false)
|
||||
{
|
||||
Expression = expression ?? String.Empty;
|
||||
if (Expression.Length > ExpressionConstants.MaxLength)
|
||||
@@ -467,7 +458,6 @@ namespace GitHub.DistributedTask.Expressions2
|
||||
|
||||
LexicalAnalyzer = new LexicalAnalyzer(Expression);
|
||||
AllowUnknownKeywords = allowUnknownKeywords;
|
||||
AllowCaseFunction = allowCaseFunction;
|
||||
}
|
||||
|
||||
private class NoOperationTraceWriter : ITraceWriter
|
||||
|
||||
@@ -86,12 +86,6 @@ namespace GitHub.DistributedTask.ObjectTemplating
|
||||
|
||||
internal ITraceWriter TraceWriter { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets a value indicating whether the case expression function is allowed.
|
||||
/// Defaults to true. Set to false to disable the case function.
|
||||
/// </summary>
|
||||
internal Boolean AllowCaseFunction { get; set; } = true;
|
||||
|
||||
private IDictionary<String, Int32> FileIds
|
||||
{
|
||||
get
|
||||
|
||||
@@ -57,7 +57,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -94,7 +94,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -123,7 +123,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -152,7 +152,7 @@ namespace GitHub.DistributedTask.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
|
||||
@@ -253,6 +253,13 @@ namespace GitHub.DistributedTask.Pipelines
|
||||
set;
|
||||
}
|
||||
|
||||
[DataMember(EmitDefaultValue = false)]
|
||||
public bool EnableDebugger
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the collection of variables associated with the current context.
|
||||
/// </summary>
|
||||
|
||||
@@ -681,7 +681,7 @@ namespace GitHub.DistributedTask.Pipelines.ObjectTemplating
|
||||
var node = default(ExpressionNode);
|
||||
try
|
||||
{
|
||||
node = expressionParser.CreateTree(condition, null, namedValues, functions, allowCaseFunction: context.AllowCaseFunction) as ExpressionNode;
|
||||
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
@@ -17,10 +17,9 @@ namespace GitHub.Actions.Expressions
|
||||
String expression,
|
||||
ITraceWriter trace,
|
||||
IEnumerable<INamedValueInfo> namedValues,
|
||||
IEnumerable<IFunctionInfo> functions,
|
||||
Boolean allowCaseFunction = true)
|
||||
IEnumerable<IFunctionInfo> functions)
|
||||
{
|
||||
var context = new ParseContext(expression, trace, namedValues, functions, allowCaseFunction: allowCaseFunction);
|
||||
var context = new ParseContext(expression, trace, namedValues, functions);
|
||||
context.Trace.Info($"Parsing expression: <{expression}>");
|
||||
return CreateTree(context);
|
||||
}
|
||||
@@ -322,7 +321,7 @@ namespace GitHub.Actions.Expressions
|
||||
context.Operators.Pop();
|
||||
}
|
||||
var functionOperands = PopOperands(context, parameterCount);
|
||||
|
||||
|
||||
// Node already exists on the operand stack
|
||||
function = (Function)context.Operands.Peek();
|
||||
|
||||
@@ -416,12 +415,6 @@ namespace GitHub.Actions.Expressions
|
||||
String name,
|
||||
out IFunctionInfo functionInfo)
|
||||
{
|
||||
if (String.Equals(name, "case", StringComparison.OrdinalIgnoreCase) && !context.AllowCaseFunction)
|
||||
{
|
||||
functionInfo = null;
|
||||
return false;
|
||||
}
|
||||
|
||||
return ExpressionConstants.WellKnownFunctions.TryGetValue(name, out functionInfo) ||
|
||||
context.ExtensionFunctions.TryGetValue(name, out functionInfo);
|
||||
}
|
||||
@@ -429,7 +422,6 @@ namespace GitHub.Actions.Expressions
|
||||
private sealed class ParseContext
|
||||
{
|
||||
public Boolean AllowUnknownKeywords;
|
||||
public Boolean AllowCaseFunction;
|
||||
public readonly String Expression;
|
||||
public readonly Dictionary<String, IFunctionInfo> ExtensionFunctions = new Dictionary<String, IFunctionInfo>(StringComparer.OrdinalIgnoreCase);
|
||||
public readonly Dictionary<String, INamedValueInfo> ExtensionNamedValues = new Dictionary<String, INamedValueInfo>(StringComparer.OrdinalIgnoreCase);
|
||||
@@ -445,8 +437,7 @@ namespace GitHub.Actions.Expressions
|
||||
ITraceWriter trace,
|
||||
IEnumerable<INamedValueInfo> namedValues,
|
||||
IEnumerable<IFunctionInfo> functions,
|
||||
Boolean allowUnknownKeywords = false,
|
||||
Boolean allowCaseFunction = true)
|
||||
Boolean allowUnknownKeywords = false)
|
||||
{
|
||||
Expression = expression ?? String.Empty;
|
||||
if (Expression.Length > ExpressionConstants.MaxLength)
|
||||
@@ -467,7 +458,6 @@ namespace GitHub.Actions.Expressions
|
||||
|
||||
LexicalAnalyzer = new LexicalAnalyzer(Expression);
|
||||
AllowUnknownKeywords = allowUnknownKeywords;
|
||||
AllowCaseFunction = allowCaseFunction;
|
||||
}
|
||||
|
||||
private class NoOperationTraceWriter : ITraceWriter
|
||||
|
||||
@@ -1828,7 +1828,7 @@ namespace GitHub.Actions.WorkflowParser.Conversion
|
||||
var node = default(ExpressionNode);
|
||||
try
|
||||
{
|
||||
node = expressionParser.CreateTree(condition, null, namedValues, functions, allowCaseFunction: context.AllowCaseFunction) as ExpressionNode;
|
||||
node = expressionParser.CreateTree(condition, null, namedValues, functions) as ExpressionNode;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
@@ -113,12 +113,6 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating
|
||||
/// </summary>
|
||||
internal Boolean StrictJsonParsing { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets a value indicating whether the case expression function is allowed.
|
||||
/// Defaults to true. Set to false to disable the case function.
|
||||
/// </summary>
|
||||
internal Boolean AllowCaseFunction { get; set; } = true;
|
||||
|
||||
internal ITraceWriter TraceWriter { get; set; }
|
||||
|
||||
private IDictionary<String, Int32> FileIds
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
#nullable disable // Consider removing in the future to minimize likelihood of NullReferenceException; refer https://learn.microsoft.com/en-us/dotnet/csharp/nullable-references
|
||||
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
@@ -55,7 +55,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -93,7 +93,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -123,7 +123,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -153,7 +153,7 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
|
||||
var originalBytes = context.Memory.CurrentBytes;
|
||||
try
|
||||
{
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions, allowCaseFunction: context.AllowCaseFunction);
|
||||
var tree = new ExpressionParser().CreateTree(expression, null, context.GetExpressionNamedValues(), context.ExpressionFunctions);
|
||||
var options = new EvaluationOptions
|
||||
{
|
||||
MaxMemory = context.Memory.MaxBytes,
|
||||
@@ -289,4 +289,4 @@ namespace GitHub.Actions.WorkflowParser.ObjectTemplating.Tokens
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
using GitHub.DistributedTask.Expressions2.Sdk;
|
||||
using GitHub.DistributedTask.ObjectTemplating;
|
||||
using System;
|
||||
@@ -9,7 +9,7 @@ namespace GitHub.Runner.Common.Tests.Sdk
|
||||
{
|
||||
/// <summary>
|
||||
/// Regression tests for ExpressionParser.CreateTree to verify that
|
||||
/// allowCaseFunction does not accidentally set allowUnknownKeywords.
|
||||
/// the case function does not accidentally set allowUnknownKeywords.
|
||||
/// </summary>
|
||||
public sealed class ExpressionParserL0
|
||||
{
|
||||
@@ -18,7 +18,7 @@ namespace GitHub.Runner.Common.Tests.Sdk
|
||||
[Trait("Category", "Sdk")]
|
||||
public void CreateTree_RejectsUnrecognizedNamedValue()
|
||||
{
|
||||
// Regression: allowCaseFunction was passed positionally into
|
||||
// Regression: the case function parameter was passed positionally into
|
||||
// the allowUnknownKeywords parameter, causing all named values
|
||||
// to be silently accepted.
|
||||
var parser = new ExpressionParser();
|
||||
@@ -52,7 +52,7 @@ namespace GitHub.Runner.Common.Tests.Sdk
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Sdk")]
|
||||
public void CreateTree_CaseFunctionWorks_WhenAllowed()
|
||||
public void CreateTree_CaseFunctionWorks()
|
||||
{
|
||||
var parser = new ExpressionParser();
|
||||
var namedValues = new List<INamedValueInfo>
|
||||
@@ -60,35 +60,17 @@ namespace GitHub.Runner.Common.Tests.Sdk
|
||||
new NamedValueInfo<ContextValueNode>("github"),
|
||||
};
|
||||
|
||||
var node = parser.CreateTree("case(github.event_name, 'push', 'Push Event')", null, namedValues, null, allowCaseFunction: true);
|
||||
var node = parser.CreateTree("case(github.event_name, 'push', 'Push Event')", null, namedValues, null);
|
||||
|
||||
Assert.NotNull(node);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Sdk")]
|
||||
public void CreateTree_CaseFunctionRejected_WhenDisallowed()
|
||||
{
|
||||
var parser = new ExpressionParser();
|
||||
var namedValues = new List<INamedValueInfo>
|
||||
{
|
||||
new NamedValueInfo<ContextValueNode>("github"),
|
||||
};
|
||||
|
||||
var ex = Assert.Throws<ParseException>(() =>
|
||||
parser.CreateTree("case(github.event_name, 'push', 'Push Event')", null, namedValues, null, allowCaseFunction: false));
|
||||
|
||||
Assert.Contains("Unrecognized function", ex.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Sdk")]
|
||||
public void CreateTree_CaseFunctionDoesNotAffectUnknownKeywords()
|
||||
{
|
||||
// The key regression test: with allowCaseFunction=true (default),
|
||||
// unrecognized named values must still be rejected.
|
||||
// The key regression test: unrecognized named values must still be rejected.
|
||||
var parser = new ExpressionParser();
|
||||
var namedValues = new List<INamedValueInfo>
|
||||
{
|
||||
@@ -96,7 +78,7 @@ namespace GitHub.Runner.Common.Tests.Sdk
|
||||
};
|
||||
|
||||
var ex = Assert.Throws<ParseException>(() =>
|
||||
parser.CreateTree("github.ref", null, namedValues, null, allowCaseFunction: true));
|
||||
parser.CreateTree("github.ref", null, namedValues, null));
|
||||
|
||||
Assert.Contains("Unrecognized named-value", ex.Message);
|
||||
}
|
||||
|
||||
76
src/Test/L0/Sdk/RSWebApi/AgentJobRequestMessageL0.cs
Normal file
76
src/Test/L0/Sdk/RSWebApi/AgentJobRequestMessageL0.cs
Normal file
@@ -0,0 +1,76 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Runtime.Serialization.Json;
|
||||
using System.Text;
|
||||
using Xunit;
|
||||
using GitHub.DistributedTask.Pipelines;
|
||||
|
||||
namespace GitHub.Actions.RunService.WebApi.Tests;
|
||||
|
||||
public sealed class AgentJobRequestMessageL0
|
||||
{
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void VerifyEnableDebuggerDeserialization_WithTrue()
|
||||
{
|
||||
// Arrange
|
||||
var serializer = new DataContractJsonSerializer(typeof(AgentJobRequestMessage));
|
||||
string jsonWithEnabledDebugger = DoubleQuotify("{'EnableDebugger': true}");
|
||||
|
||||
// Act
|
||||
using var stream = new MemoryStream();
|
||||
stream.Write(Encoding.UTF8.GetBytes(jsonWithEnabledDebugger));
|
||||
stream.Position = 0;
|
||||
var recoveredMessage = serializer.ReadObject(stream) as AgentJobRequestMessage;
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(recoveredMessage);
|
||||
Assert.True(recoveredMessage.EnableDebugger, "EnableDebugger should be true when JSON contains 'EnableDebugger': true");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void VerifyEnableDebuggerDeserialization_DefaultToFalse()
|
||||
{
|
||||
// Arrange
|
||||
var serializer = new DataContractJsonSerializer(typeof(AgentJobRequestMessage));
|
||||
string jsonWithoutDebugger = DoubleQuotify("{'messageType': 'PipelineAgentJobRequest'}");
|
||||
|
||||
// Act
|
||||
using var stream = new MemoryStream();
|
||||
stream.Write(Encoding.UTF8.GetBytes(jsonWithoutDebugger));
|
||||
stream.Position = 0;
|
||||
var recoveredMessage = serializer.ReadObject(stream) as AgentJobRequestMessage;
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(recoveredMessage);
|
||||
Assert.False(recoveredMessage.EnableDebugger, "EnableDebugger should default to false when JSON field is absent");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Common")]
|
||||
public void VerifyEnableDebuggerDeserialization_WithFalse()
|
||||
{
|
||||
// Arrange
|
||||
var serializer = new DataContractJsonSerializer(typeof(AgentJobRequestMessage));
|
||||
string jsonWithDisabledDebugger = DoubleQuotify("{'EnableDebugger': false}");
|
||||
|
||||
// Act
|
||||
using var stream = new MemoryStream();
|
||||
stream.Write(Encoding.UTF8.GetBytes(jsonWithDisabledDebugger));
|
||||
stream.Position = 0;
|
||||
var recoveredMessage = serializer.ReadObject(stream) as AgentJobRequestMessage;
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(recoveredMessage);
|
||||
Assert.False(recoveredMessage.EnableDebugger, "EnableDebugger should be false when JSON contains 'EnableDebugger': false");
|
||||
}
|
||||
|
||||
private static string DoubleQuotify(string text)
|
||||
{
|
||||
return text.Replace('\'', '"');
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@
|
||||
using GitHub.Runner.Listener.Check;
|
||||
using GitHub.Runner.Listener.Configuration;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using GitHub.Runner.Worker.Container.ContainerHooks;
|
||||
using GitHub.Runner.Worker.Handlers;
|
||||
using System;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Net;
|
||||
@@ -1254,6 +1255,659 @@ runs:
|
||||
}
|
||||
#endif
|
||||
|
||||
// =================================================================
|
||||
// Tests for batched action resolution optimization
|
||||
// =================================================================
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_BatchesResolutionAcrossCompositeActions()
|
||||
{
|
||||
// Verifies that when multiple composite actions at the same depth
|
||||
// reference sub-actions, those sub-actions are resolved in a single
|
||||
// batched API call rather than one call per composite.
|
||||
//
|
||||
// Action tree:
|
||||
// CompositePrestep (composite) → [Node action, CompositePrestep2 (composite)]
|
||||
// CompositePrestep2 (composite) → [Node action, Docker action]
|
||||
//
|
||||
// Without batching: 3 API calls (depth 0, depth 1 for CompositePrestep, depth 2 for CompositePrestep2)
|
||||
// With batching: still 3 calls at most, but the key is that depth-1
|
||||
// sub-actions from all composites at depth 0 are batched into 1 call.
|
||||
// And the same action appearing at multiple depths triggers only 1 resolve.
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
|
||||
var resolveCallCount = 0;
|
||||
var resolvedActions = new List<ActionReferenceList>();
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
resolveCallCount++;
|
||||
resolvedActions.Add(actions);
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actionId = Guid.NewGuid();
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action",
|
||||
Id = actionId,
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "CompositePrestep",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
var result = await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
// The composite tree is:
|
||||
// depth 0: CompositePrestep
|
||||
// depth 1: Node@RepositoryActionWithWrapperActionfile_Node + CompositePrestep2
|
||||
// depth 2: Node@RepositoryActionWithWrapperActionfile_Node + Docker@RepositoryActionWithWrapperActionfile_Docker
|
||||
//
|
||||
// With batching:
|
||||
// Call 1 (depth 0, resolve): CompositePrestep
|
||||
// Call 2 (depth 0→1, pre-resolve): Node + CompositePrestep2 in one batch
|
||||
// Call 3 (depth 1→2, pre-resolve): Docker only (Node already cached from call 2)
|
||||
Assert.Equal(3, resolveCallCount);
|
||||
|
||||
// Call 1: depth 0 resolve — just the top-level composite
|
||||
var call1Keys = resolvedActions[0].Actions.Select(a => $"{a.NameWithOwner}@{a.Ref}").OrderBy(k => k).ToList();
|
||||
Assert.Equal(new[] { "TingluoHuang/runner_L0@CompositePrestep" }, call1Keys);
|
||||
|
||||
// Call 2: depth 0→1 pre-resolve — batch both children of CompositePrestep
|
||||
var call2Keys = resolvedActions[1].Actions.Select(a => $"{a.NameWithOwner}@{a.Ref}").OrderBy(k => k).ToList();
|
||||
Assert.Equal(new[] { "TingluoHuang/runner_L0@CompositePrestep2", "TingluoHuang/runner_L0@RepositoryActionWithWrapperActionfile_Node" }, call2Keys);
|
||||
|
||||
// Call 3: depth 1→2 pre-resolve — only Docker (Node was cached in call 2)
|
||||
var call3Keys = resolvedActions[2].Actions.Select(a => $"{a.NameWithOwner}@{a.Ref}").OrderBy(k => k).ToList();
|
||||
Assert.Equal(new[] { "TingluoHuang/runner_L0@RepositoryActionWithWrapperActionfile_Docker" }, call3Keys);
|
||||
|
||||
// Verify all actions were downloaded
|
||||
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "CompositePrestep.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "CompositePrestep2.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed")));
|
||||
|
||||
// Verify pre-step tracking still works correctly
|
||||
Assert.Equal(1, result.PreStepTracker.Count);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_DeduplicatesResolutionAcrossDepthLevels()
|
||||
{
|
||||
// Verifies that an action appearing at multiple depths in the
|
||||
// composite tree is only resolved once (not re-resolved at each level).
|
||||
//
|
||||
// CompositePrestep uses Node action at depth 1.
|
||||
// CompositePrestep2 (also at depth 1) uses the SAME Node action at depth 2.
|
||||
// The Node action should only be resolved once total.
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
|
||||
var allResolvedKeys = new List<string>();
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
allResolvedKeys.Add(key);
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actionId = Guid.NewGuid();
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action",
|
||||
Id = actionId,
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "CompositePrestep",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
// TingluoHuang/runner_L0@RepositoryActionWithWrapperActionfile_Node appears
|
||||
// at both depth 1 (sub-step of CompositePrestep) and depth 2 (sub-step of
|
||||
// CompositePrestep2). With deduplication it should only be resolved once.
|
||||
var nodeActionKey = "TingluoHuang/runner_L0@RepositoryActionWithWrapperActionfile_Node";
|
||||
var nodeResolveCount = allResolvedKeys.FindAll(k => k == nodeActionKey).Count;
|
||||
Assert.Equal(1, nodeResolveCount);
|
||||
|
||||
// Verify the total number of unique actions resolved matches the tree
|
||||
var uniqueKeys = new HashSet<string>(allResolvedKeys);
|
||||
// Expected unique actions: CompositePrestep, Node, CompositePrestep2, Docker = 4
|
||||
Assert.Equal(4, uniqueKeys.Count);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_MultipleTopLevelActions_BatchesResolution()
|
||||
{
|
||||
// Verifies that multiple independent actions at depth 0 are
|
||||
// resolved in a single API call.
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
// Node action has pre+post, needs IActionRunner instances
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
|
||||
var resolveCallCount = 0;
|
||||
var firstCallActionCount = 0;
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
resolveCallCount++;
|
||||
if (resolveCallCount == 1)
|
||||
{
|
||||
firstCallActionCount = actions.Actions.Count;
|
||||
}
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action1",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "RepositoryActionWithWrapperActionfile_Node",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
},
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action2",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "RepositoryActionWithWrapperActionfile_Docker",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
// Both actions are at depth 0 — should be resolved in a single batch call
|
||||
Assert.Equal(1, resolveCallCount);
|
||||
Assert.Equal(2, firstCallActionCount);
|
||||
|
||||
// Verify both were downloaded
|
||||
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
#if OS_LINUX
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_NestedCompositeContainers_BatchedResolution()
|
||||
{
|
||||
// Verifies batching with nested composite actions that reference
|
||||
// container actions (Linux-only since containers require Linux).
|
||||
//
|
||||
// CompositeContainerNested (composite):
|
||||
// → repositoryactionwithdockerfile (Dockerfile)
|
||||
// → CompositeContainerNested2 (composite):
|
||||
// → repositoryactionwithdockerfile (Dockerfile, same as above)
|
||||
// → notpullorbuildimagesmultipletimes1 (Dockerfile)
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
|
||||
var resolveCallCount = 0;
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
resolveCallCount++;
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actionId = Guid.NewGuid();
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action",
|
||||
Id = actionId,
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "CompositeContainerNested",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
var result = await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
// Tree has 3 depth levels with 5 unique actions.
|
||||
// With batching, should need at most 3 resolve calls (one per depth level).
|
||||
Assert.True(resolveCallCount <= 3, $"Expected at most 3 resolve calls but got {resolveCallCount}");
|
||||
|
||||
// repositoryactionwithdockerfile appears at both depth 1 and depth 2.
|
||||
// Container setup should still work correctly — 2 unique Docker images.
|
||||
Assert.Equal(2, result.ContainerSetupSteps.Count);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_ParallelDownloads_MultipleUniqueActions()
|
||||
{
|
||||
// Verifies that multiple unique top-level actions are downloaded via
|
||||
// DownloadActionsInParallelAsync (the parallel code path), and that
|
||||
// all actions are correctly resolved and downloaded.
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
// Node action has pre step, and CompositePrestep recurses into
|
||||
// sub-actions that also need IActionRunner instances
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
|
||||
var resolveCallCount = 0;
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
Interlocked.Increment(ref resolveCallCount);
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action1",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "RepositoryActionWithWrapperActionfile_Node",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
},
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action2",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "RepositoryActionWithWrapperActionfile_Docker",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
},
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action3",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "CompositePrestep",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
// 3 unique actions at depth 0 → triggers DownloadActionsInParallelAsync
|
||||
// (parallel path used when uniqueDownloads.Count > 1)
|
||||
var nodeCompleted = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed");
|
||||
var dockerCompleted = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed");
|
||||
var compositeCompleted = Path.Combine(_hc.GetDirectory(WellKnownDirectory.Actions), "TingluoHuang/runner_L0", "CompositePrestep.completed");
|
||||
|
||||
Assert.True(File.Exists(nodeCompleted), $"Expected watermark at {nodeCompleted}");
|
||||
Assert.True(File.Exists(dockerCompleted), $"Expected watermark at {dockerCompleted}");
|
||||
Assert.True(File.Exists(compositeCompleted), $"Expected watermark at {compositeCompleted}");
|
||||
|
||||
// All depth-0 actions resolved in a single batch call.
|
||||
// Composite sub-actions may add 1-2 more calls.
|
||||
Assert.True(resolveCallCount >= 1, "Expected at least 1 resolve call");
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_DownloadsNextLevelActionsBeforeRecursing()
|
||||
{
|
||||
// Verifies that depth-1 actions are downloaded before the depth-2
|
||||
// pre-resolve fires. We detect this by snapshotting watermark state
|
||||
// inside the 3rd ResolveActionDownloadInfoAsync callback (which is
|
||||
// the depth-2 pre-resolve). If pre-download works, depth-1 watermarks
|
||||
// already exist at that point.
|
||||
//
|
||||
// Action tree:
|
||||
// CompositePrestep (composite) → [Node, CompositePrestep2 (composite)]
|
||||
// CompositePrestep2 (composite) → [Node, Docker]
|
||||
//
|
||||
// Without pre-download: downloads happen during recursion (serial per depth)
|
||||
// With pre-download: depth 1 actions (Node + CompositePrestep2) are
|
||||
// downloaded in parallel before recursing, so recursion is a no-op
|
||||
// for downloads.
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
|
||||
// Track watermark state at the time of each resolve call.
|
||||
// If pre-download works, when the 3rd resolve fires (depth 2
|
||||
// pre-resolve for Docker), the depth-1 actions (Node +
|
||||
// CompositePrestep2) should already have watermarks on disk.
|
||||
var resolveCallCount = 0;
|
||||
var watermarksAtResolve3 = new Dictionary<string, bool>();
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
resolveCallCount++;
|
||||
if (resolveCallCount == 3)
|
||||
{
|
||||
// At the time of the 3rd resolve, check if depth-1 actions
|
||||
// are already downloaded (pre-download should have done this)
|
||||
var actionsDir2 = _hc.GetDirectory(WellKnownDirectory.Actions);
|
||||
watermarksAtResolve3["Node"] = File.Exists(Path.Combine(actionsDir2, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed"));
|
||||
watermarksAtResolve3["CompositePrestep2"] = File.Exists(Path.Combine(actionsDir2, "TingluoHuang/runner_L0", "CompositePrestep2.completed"));
|
||||
}
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actionId = Guid.NewGuid();
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action",
|
||||
Id = actionId,
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "CompositePrestep",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
var result = await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert
|
||||
// All actions should be downloaded (watermarks exist)
|
||||
var actionsDir = _hc.GetDirectory(WellKnownDirectory.Actions);
|
||||
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "CompositePrestep.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "CompositePrestep2.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed")));
|
||||
|
||||
// 3 resolve calls total
|
||||
Assert.Equal(3, resolveCallCount);
|
||||
|
||||
// The key assertion: at the time of the 3rd resolve call
|
||||
// (pre-resolve for depth 2), the depth-1 actions should
|
||||
// ALREADY be downloaded thanks to pre-download.
|
||||
// Without pre-download, these watermarks wouldn't exist yet
|
||||
// because depth-1 downloads would only happen during recursion.
|
||||
Assert.True(watermarksAtResolve3["Node"],
|
||||
"Node action should be pre-downloaded before depth 2 pre-resolve");
|
||||
Assert.True(watermarksAtResolve3["CompositePrestep2"],
|
||||
"CompositePrestep2 should be pre-downloaded before depth 2 pre-resolve");
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async void PrepareActions_ParallelDownloadsAtSameDepth()
|
||||
{
|
||||
// Verifies that multiple unique actions at the same depth are
|
||||
// downloaded concurrently (Task.WhenAll) rather than sequentially.
|
||||
// We detect this by checking that all watermarks exist after a
|
||||
// single PrepareActionsAsync call with multiple top-level actions.
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", "true");
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
_hc.EnqueueInstance<IActionRunner>(new Mock<IActionRunner>().Object);
|
||||
|
||||
_jobServer.Setup(x => x.ResolveActionDownloadInfoAsync(It.IsAny<Guid>(), It.IsAny<string>(), It.IsAny<Guid>(), It.IsAny<Guid>(), It.IsAny<ActionReferenceList>(), It.IsAny<CancellationToken>()))
|
||||
.Returns((Guid scopeIdentifier, string hubName, Guid planId, Guid jobId, ActionReferenceList actions, CancellationToken cancellationToken) =>
|
||||
{
|
||||
var result = new ActionDownloadInfoCollection { Actions = new Dictionary<string, ActionDownloadInfo>() };
|
||||
foreach (var action in actions.Actions)
|
||||
{
|
||||
var key = $"{action.NameWithOwner}@{action.Ref}";
|
||||
result.Actions[key] = new ActionDownloadInfo
|
||||
{
|
||||
NameWithOwner = action.NameWithOwner,
|
||||
Ref = action.Ref,
|
||||
ResolvedNameWithOwner = action.NameWithOwner,
|
||||
ResolvedSha = $"{action.Ref}-sha",
|
||||
TarballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/tarball/{action.Ref}",
|
||||
ZipballUrl = $"https://api.github.com/repos/{action.NameWithOwner}/zipball/{action.Ref}",
|
||||
};
|
||||
}
|
||||
return Task.FromResult(result);
|
||||
});
|
||||
|
||||
var actions = new List<Pipelines.ActionStep>
|
||||
{
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action1",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "RepositoryActionWithWrapperActionfile_Node",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
},
|
||||
new Pipelines.ActionStep()
|
||||
{
|
||||
Name = "action2",
|
||||
Id = Guid.NewGuid(),
|
||||
Reference = new Pipelines.RepositoryPathReference()
|
||||
{
|
||||
Name = "TingluoHuang/runner_L0",
|
||||
Ref = "RepositoryActionWithWrapperActionfile_Docker",
|
||||
RepositoryType = "GitHub"
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//Act
|
||||
await _actionManager.PrepareActionsAsync(_ec.Object, actions);
|
||||
|
||||
//Assert - both downloaded (parallel path used when > 1 unique download)
|
||||
var actionsDir = _hc.GetDirectory(WellKnownDirectory.Actions);
|
||||
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Node.completed")));
|
||||
Assert.True(File.Exists(Path.Combine(actionsDir, "TingluoHuang/runner_L0", "RepositoryActionWithWrapperActionfile_Docker.completed")));
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable("ACTIONS_BATCH_ACTION_RESOLUTION", null);
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
|
||||
@@ -504,7 +504,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Load_Node24Action()
|
||||
@@ -1006,6 +1006,45 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
_ec.Setup(x => x.AddIssue(It.IsAny<Issue>(), It.IsAny<ExecutionContextLogOptions>())).Callback((Issue issue, ExecutionContextLogOptions logOptions) => { _hc.GetTrace().Info($"[{issue.Type}]{logOptions.LogMessageOverride ?? issue.Message}"); });
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Evaluate_Default_Input_Case_Function()
|
||||
{
|
||||
try
|
||||
{
|
||||
//Arrange
|
||||
Setup();
|
||||
|
||||
var actionManifest = new ActionManifestManager();
|
||||
actionManifest.Initialize(_hc);
|
||||
|
||||
_ec.Object.ExpressionValues["github"] = new LegacyContextData.DictionaryContextData
|
||||
{
|
||||
{ "ref", new LegacyContextData.StringContextData("refs/heads/main") },
|
||||
};
|
||||
_ec.Object.ExpressionValues["strategy"] = new LegacyContextData.DictionaryContextData();
|
||||
_ec.Object.ExpressionValues["matrix"] = new LegacyContextData.DictionaryContextData();
|
||||
_ec.Object.ExpressionValues["steps"] = new LegacyContextData.DictionaryContextData();
|
||||
_ec.Object.ExpressionValues["job"] = new LegacyContextData.DictionaryContextData();
|
||||
_ec.Object.ExpressionValues["runner"] = new LegacyContextData.DictionaryContextData();
|
||||
_ec.Object.ExpressionValues["env"] = new LegacyContextData.DictionaryContextData();
|
||||
_ec.Object.ExpressionFunctions.Add(new LegacyExpressions.FunctionInfo<GitHub.Runner.Worker.Expressions.HashFilesFunction>("hashFiles", 1, 255));
|
||||
|
||||
// Act — evaluate a case() expression as a default input value.
|
||||
// The feature flag is set, so this should succeed.
|
||||
var token = new BasicExpressionToken(null, null, null, "case(true, 'matched', 'default')");
|
||||
var result = actionManifest.EvaluateDefaultInput(_ec.Object, "testInput", token);
|
||||
|
||||
// Assert — case() should evaluate successfully
|
||||
Assert.Equal("matched", result);
|
||||
}
|
||||
finally
|
||||
{
|
||||
Teardown();
|
||||
}
|
||||
}
|
||||
|
||||
private void Teardown()
|
||||
{
|
||||
_hc?.Dispose();
|
||||
|
||||
616
src/Test/L0/Worker/DapDebuggerL0.cs
Normal file
616
src/Test/L0/Worker/DapDebuggerL0.cs
Normal file
@@ -0,0 +1,616 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Net;
|
||||
using System.Net.Sockets;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using Moq;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using Newtonsoft.Json;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class DapDebuggerL0
|
||||
{
|
||||
private const string PortEnvironmentVariable = "ACTIONS_RUNNER_DAP_PORT";
|
||||
private const string TimeoutEnvironmentVariable = "ACTIONS_RUNNER_DAP_CONNECTION_TIMEOUT";
|
||||
private DapDebugger _debugger;
|
||||
|
||||
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
|
||||
{
|
||||
var hc = new TestHostContext(this, testName);
|
||||
_debugger = new DapDebugger();
|
||||
_debugger.Initialize(hc);
|
||||
return hc;
|
||||
}
|
||||
|
||||
private static async Task WithEnvironmentVariableAsync(string name, string value, Func<Task> action)
|
||||
{
|
||||
var originalValue = Environment.GetEnvironmentVariable(name);
|
||||
Environment.SetEnvironmentVariable(name, value);
|
||||
try
|
||||
{
|
||||
await action();
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable(name, originalValue);
|
||||
}
|
||||
}
|
||||
|
||||
private static void WithEnvironmentVariable(string name, string value, Action action)
|
||||
{
|
||||
var originalValue = Environment.GetEnvironmentVariable(name);
|
||||
Environment.SetEnvironmentVariable(name, value);
|
||||
try
|
||||
{
|
||||
action();
|
||||
}
|
||||
finally
|
||||
{
|
||||
Environment.SetEnvironmentVariable(name, originalValue);
|
||||
}
|
||||
}
|
||||
|
||||
private static int GetFreePort()
|
||||
{
|
||||
using var listener = new TcpListener(IPAddress.Loopback, 0);
|
||||
listener.Start();
|
||||
return ((IPEndPoint)listener.LocalEndpoint).Port;
|
||||
}
|
||||
|
||||
private static async Task<TcpClient> ConnectClientAsync(int port)
|
||||
{
|
||||
var client = new TcpClient();
|
||||
await client.ConnectAsync(IPAddress.Loopback, port);
|
||||
return client;
|
||||
}
|
||||
|
||||
private static async Task SendRequestAsync(NetworkStream stream, Request request)
|
||||
{
|
||||
var json = JsonConvert.SerializeObject(request);
|
||||
var body = Encoding.UTF8.GetBytes(json);
|
||||
var header = $"Content-Length: {body.Length}\r\n\r\n";
|
||||
var headerBytes = Encoding.ASCII.GetBytes(header);
|
||||
|
||||
await stream.WriteAsync(headerBytes, 0, headerBytes.Length);
|
||||
await stream.WriteAsync(body, 0, body.Length);
|
||||
await stream.FlushAsync();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reads a single DAP-framed message from a stream with a timeout.
|
||||
/// Parses the Content-Length header, reads exactly that many bytes,
|
||||
/// and returns the JSON body. Fails with a clear error on timeout.
|
||||
/// </summary>
|
||||
private static async Task<string> ReadDapMessageAsync(NetworkStream stream, TimeSpan timeout)
|
||||
{
|
||||
using var cts = new CancellationTokenSource(timeout);
|
||||
var token = cts.Token;
|
||||
|
||||
var headerBuilder = new StringBuilder();
|
||||
var buffer = new byte[1];
|
||||
var contentLength = -1;
|
||||
|
||||
while (true)
|
||||
{
|
||||
var readTask = stream.ReadAsync(buffer, 0, 1, token);
|
||||
var bytesRead = await readTask;
|
||||
if (bytesRead == 0)
|
||||
{
|
||||
throw new EndOfStreamException("Connection closed while reading DAP headers");
|
||||
}
|
||||
|
||||
headerBuilder.Append((char)buffer[0]);
|
||||
var headers = headerBuilder.ToString();
|
||||
if (headers.EndsWith("\r\n\r\n", StringComparison.Ordinal))
|
||||
{
|
||||
foreach (var line in headers.Split(new[] { "\r\n" }, StringSplitOptions.RemoveEmptyEntries))
|
||||
{
|
||||
if (line.StartsWith("Content-Length: ", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
contentLength = int.Parse(line.Substring("Content-Length: ".Length).Trim());
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (contentLength < 0)
|
||||
{
|
||||
throw new InvalidOperationException("No Content-Length header found in DAP message");
|
||||
}
|
||||
|
||||
var body = new byte[contentLength];
|
||||
var totalRead = 0;
|
||||
while (totalRead < contentLength)
|
||||
{
|
||||
var bytesRead = await stream.ReadAsync(body, totalRead, contentLength - totalRead, token);
|
||||
if (bytesRead == 0)
|
||||
{
|
||||
throw new EndOfStreamException("Connection closed while reading DAP body");
|
||||
}
|
||||
totalRead += bytesRead;
|
||||
}
|
||||
|
||||
return Encoding.UTF8.GetString(body);
|
||||
}
|
||||
|
||||
private static Mock<IExecutionContext> CreateJobContext(CancellationToken cancellationToken, string jobName = null)
|
||||
{
|
||||
var jobContext = new Mock<IExecutionContext>();
|
||||
jobContext.Setup(x => x.CancellationToken).Returns(cancellationToken);
|
||||
jobContext
|
||||
.Setup(x => x.GetGitHubContext(It.IsAny<string>()))
|
||||
.Returns((string contextName) => string.Equals(contextName, "job", StringComparison.Ordinal) ? jobName : null);
|
||||
return jobContext;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void InitializeSucceeds()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
Assert.NotNull(_debugger);
|
||||
Assert.False(_debugger.IsActive);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolvePortUsesCustomPortFromEnvironment()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
WithEnvironmentVariable(PortEnvironmentVariable, "9999", () =>
|
||||
{
|
||||
Assert.Equal(9999, _debugger.ResolvePort());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolvePortIgnoresInvalidPortFromEnvironment()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
WithEnvironmentVariable(PortEnvironmentVariable, "not-a-number", () =>
|
||||
{
|
||||
Assert.Equal(4711, _debugger.ResolvePort());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolvePortIgnoresOutOfRangePortFromEnvironment()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
WithEnvironmentVariable(PortEnvironmentVariable, "99999", () =>
|
||||
{
|
||||
Assert.Equal(4711, _debugger.ResolvePort());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveTimeoutUsesCustomTimeoutFromEnvironment()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
WithEnvironmentVariable(TimeoutEnvironmentVariable, "30", () =>
|
||||
{
|
||||
Assert.Equal(30, _debugger.ResolveTimeout());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveTimeoutIgnoresInvalidTimeoutFromEnvironment()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
WithEnvironmentVariable(TimeoutEnvironmentVariable, "not-a-number", () =>
|
||||
{
|
||||
Assert.Equal(15, _debugger.ResolveTimeout());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveTimeoutIgnoresZeroTimeoutFromEnvironment()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
WithEnvironmentVariable(TimeoutEnvironmentVariable, "0", () =>
|
||||
{
|
||||
Assert.Equal(15, _debugger.ResolveTimeout());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StartAndStopLifecycle()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContext(cts.Token);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
using var client = await ConnectClientAsync(port);
|
||||
Assert.True(client.Connected);
|
||||
await _debugger.StopAsync();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StartAndStopMultipleTimesDoesNotThrow()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
foreach (var port in new[] { GetFreePort(), GetFreePort() })
|
||||
{
|
||||
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContext(cts.Token);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
await _debugger.StopAsync();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task WaitUntilReadyCompletesAfterClientConnectionAndConfigurationDone()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContext(cts.Token);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
var waitTask = _debugger.WaitUntilReadyAsync();
|
||||
using var client = await ConnectClientAsync(port);
|
||||
await SendRequestAsync(client.GetStream(), new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "configurationDone"
|
||||
});
|
||||
|
||||
await waitTask;
|
||||
Assert.Equal(DapSessionState.Ready, _debugger.State);
|
||||
await _debugger.StopAsync();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StartStoresJobContextForThreadsRequest()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContext(cts.Token, "ci-job");
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
using var client = await ConnectClientAsync(port);
|
||||
var stream = client.GetStream();
|
||||
await SendRequestAsync(client.GetStream(), new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "threads"
|
||||
});
|
||||
|
||||
var response = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
|
||||
Assert.Contains("\"command\":\"threads\"", response);
|
||||
Assert.Contains("\"name\":\"Job: ci-job\"", response);
|
||||
await _debugger.StopAsync();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task CancellationUnblocksAndOnJobCompletedTerminates()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContext(cts.Token);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
var waitTask = _debugger.WaitUntilReadyAsync();
|
||||
using var client = await ConnectClientAsync(port);
|
||||
await SendRequestAsync(client.GetStream(), new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "configurationDone"
|
||||
});
|
||||
|
||||
await waitTask;
|
||||
cts.Cancel();
|
||||
|
||||
// In the real runner, JobRunner always calls OnJobCompletedAsync
|
||||
// from a finally block. The cancellation callback only unblocks
|
||||
// pending waits; OnJobCompletedAsync handles state + cleanup.
|
||||
await _debugger.OnJobCompletedAsync();
|
||||
Assert.Equal(DapSessionState.Terminated, _debugger.State);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StopWithoutStartDoesNotThrow()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task OnJobCompletedTerminatesSession()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContext(cts.Token);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
var waitTask = _debugger.WaitUntilReadyAsync();
|
||||
using var client = await ConnectClientAsync(port);
|
||||
await SendRequestAsync(client.GetStream(), new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "configurationDone"
|
||||
});
|
||||
|
||||
await waitTask;
|
||||
await _debugger.OnJobCompletedAsync();
|
||||
Assert.Equal(DapSessionState.Terminated, _debugger.State);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task WaitUntilReadyBeforeStartIsNoOp()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
await _debugger.WaitUntilReadyAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task WaitUntilReadyJobCancellationPropagatesAsOperationCancelledException()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContext(cts.Token);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
var waitTask = _debugger.WaitUntilReadyAsync();
|
||||
await Task.Delay(50);
|
||||
cts.Cancel();
|
||||
|
||||
var ex = await Assert.ThrowsAnyAsync<OperationCanceledException>(() => waitTask);
|
||||
Assert.IsNotType<TimeoutException>(ex);
|
||||
await _debugger.StopAsync();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task InitializeRequestOverSocketPreservesProtocolMetadataWhenSecretsCollide()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
hc.SecretMasker.AddValue("response");
|
||||
hc.SecretMasker.AddValue("initialize");
|
||||
hc.SecretMasker.AddValue("event");
|
||||
hc.SecretMasker.AddValue("initialized");
|
||||
|
||||
var port = GetFreePort();
|
||||
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContext(cts.Token);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
using var client = await ConnectClientAsync(port);
|
||||
var stream = client.GetStream();
|
||||
|
||||
await SendRequestAsync(stream, new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "initialize"
|
||||
});
|
||||
|
||||
var response = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
|
||||
Assert.Contains("\"type\":\"response\"", response);
|
||||
Assert.Contains("\"command\":\"initialize\"", response);
|
||||
Assert.Contains("\"success\":true", response);
|
||||
|
||||
var initializedEvent = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
|
||||
Assert.Contains("\"type\":\"event\"", initializedEvent);
|
||||
Assert.Contains("\"event\":\"initialized\"", initializedEvent);
|
||||
|
||||
await _debugger.StopAsync();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task CancellationDuringStepPauseReleasesWait()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContext(cts.Token);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
// Complete handshake so session is ready
|
||||
var waitTask = _debugger.WaitUntilReadyAsync();
|
||||
using var client = await ConnectClientAsync(port);
|
||||
var stream = client.GetStream();
|
||||
await SendRequestAsync(stream, new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "configurationDone"
|
||||
});
|
||||
await waitTask;
|
||||
|
||||
// Simulate a step starting (which pauses)
|
||||
var step = new Mock<IStep>();
|
||||
step.Setup(s => s.DisplayName).Returns("Test Step");
|
||||
step.Setup(s => s.ExecutionContext).Returns((IExecutionContext)null);
|
||||
var stepTask = _debugger.OnStepStartingAsync(step.Object);
|
||||
|
||||
// Give the step time to pause
|
||||
await Task.Delay(50);
|
||||
|
||||
// Cancel the job — should release the step pause
|
||||
cts.Cancel();
|
||||
await stepTask;
|
||||
|
||||
// In the real runner, OnJobCompletedAsync always follows.
|
||||
await _debugger.OnJobCompletedAsync();
|
||||
Assert.Equal(DapSessionState.Terminated, _debugger.State);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task StopAsyncSafeAtAnyLifecyclePoint()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
// StopAsync before start
|
||||
await _debugger.StopAsync();
|
||||
|
||||
// Start then immediate stop (no connection, no WaitUntilReady)
|
||||
var port = GetFreePort();
|
||||
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContext(cts.Token);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
await _debugger.StopAsync();
|
||||
});
|
||||
|
||||
// StopAsync after already stopped
|
||||
await _debugger.StopAsync();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task OnJobCompletedSendsTerminatedAndExitedEvents()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var port = GetFreePort();
|
||||
await WithEnvironmentVariableAsync(PortEnvironmentVariable, port.ToString(), async () =>
|
||||
{
|
||||
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
|
||||
var jobContext = CreateJobContext(cts.Token);
|
||||
await _debugger.StartAsync(jobContext.Object);
|
||||
|
||||
var waitTask = _debugger.WaitUntilReadyAsync();
|
||||
using var client = await ConnectClientAsync(port);
|
||||
var stream = client.GetStream();
|
||||
await SendRequestAsync(stream, new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "configurationDone"
|
||||
});
|
||||
|
||||
// Read the configurationDone response
|
||||
await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
|
||||
await waitTask;
|
||||
|
||||
// Complete the job — events are sent via OnJobCompletedAsync
|
||||
await _debugger.OnJobCompletedAsync();
|
||||
|
||||
var msg1 = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
|
||||
var msg2 = await ReadDapMessageAsync(stream, TimeSpan.FromSeconds(5));
|
||||
|
||||
// Both events should arrive (order may vary)
|
||||
var combined = msg1 + msg2;
|
||||
Assert.Contains("\"event\":\"terminated\"", combined);
|
||||
Assert.Contains("\"event\":\"exited\"", combined);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
233
src/Test/L0/Worker/DapMessagesL0.cs
Normal file
233
src/Test/L0/Worker/DapMessagesL0.cs
Normal file
@@ -0,0 +1,233 @@
|
||||
using System.Collections.Generic;
|
||||
using Newtonsoft.Json;
|
||||
using Newtonsoft.Json.Linq;
|
||||
using Xunit;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class DapMessagesL0
|
||||
{
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void RequestSerializesCorrectly()
|
||||
{
|
||||
var request = new Request
|
||||
{
|
||||
Seq = 1,
|
||||
Type = "request",
|
||||
Command = "initialize",
|
||||
Arguments = JObject.FromObject(new { clientID = "test-client" })
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(request);
|
||||
var deserialized = JsonConvert.DeserializeObject<Request>(json);
|
||||
|
||||
Assert.Equal(1, deserialized.Seq);
|
||||
Assert.Equal("request", deserialized.Type);
|
||||
Assert.Equal("initialize", deserialized.Command);
|
||||
Assert.Equal("test-client", deserialized.Arguments["clientID"].ToString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResponseSerializesCorrectly()
|
||||
{
|
||||
var response = new Response
|
||||
{
|
||||
Seq = 2,
|
||||
Type = "response",
|
||||
RequestSeq = 1,
|
||||
Success = true,
|
||||
Command = "initialize",
|
||||
Body = new Capabilities { SupportsConfigurationDoneRequest = true }
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(response);
|
||||
var deserialized = JsonConvert.DeserializeObject<Response>(json);
|
||||
|
||||
Assert.Equal(2, deserialized.Seq);
|
||||
Assert.Equal("response", deserialized.Type);
|
||||
Assert.Equal(1, deserialized.RequestSeq);
|
||||
Assert.True(deserialized.Success);
|
||||
Assert.Equal("initialize", deserialized.Command);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EventSerializesWithCorrectType()
|
||||
{
|
||||
var evt = new Event
|
||||
{
|
||||
EventType = "stopped",
|
||||
Body = new StoppedEventBody
|
||||
{
|
||||
Reason = "entry",
|
||||
Description = "Stopped at entry",
|
||||
ThreadId = 1,
|
||||
AllThreadsStopped = true
|
||||
}
|
||||
};
|
||||
|
||||
Assert.Equal("event", evt.Type);
|
||||
|
||||
var json = JsonConvert.SerializeObject(evt);
|
||||
Assert.Contains("\"type\":\"event\"", json);
|
||||
Assert.Contains("\"event\":\"stopped\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void StoppedEventBodyOmitsNullFields()
|
||||
{
|
||||
var body = new StoppedEventBody
|
||||
{
|
||||
Reason = "step"
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(body);
|
||||
Assert.Contains("\"reason\":\"step\"", json);
|
||||
Assert.DoesNotContain("\"threadId\"", json);
|
||||
Assert.DoesNotContain("\"allThreadsStopped\"", json);
|
||||
Assert.DoesNotContain("\"description\"", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void CapabilitiesMvpDefaults()
|
||||
{
|
||||
var caps = new Capabilities
|
||||
{
|
||||
SupportsConfigurationDoneRequest = true,
|
||||
SupportsFunctionBreakpoints = false,
|
||||
SupportsStepBack = false
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(caps);
|
||||
var deserialized = JsonConvert.DeserializeObject<Capabilities>(json);
|
||||
|
||||
Assert.True(deserialized.SupportsConfigurationDoneRequest);
|
||||
Assert.False(deserialized.SupportsFunctionBreakpoints);
|
||||
Assert.False(deserialized.SupportsStepBack);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ContinueResponseBodySerialization()
|
||||
{
|
||||
var body = new ContinueResponseBody { AllThreadsContinued = true };
|
||||
var json = JsonConvert.SerializeObject(body);
|
||||
var deserialized = JsonConvert.DeserializeObject<ContinueResponseBody>(json);
|
||||
|
||||
Assert.True(deserialized.AllThreadsContinued);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ThreadsResponseBodySerialization()
|
||||
{
|
||||
var body = new ThreadsResponseBody
|
||||
{
|
||||
Threads = new List<Thread>
|
||||
{
|
||||
new Thread { Id = 1, Name = "Job Thread" }
|
||||
}
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(body);
|
||||
var deserialized = JsonConvert.DeserializeObject<ThreadsResponseBody>(json);
|
||||
|
||||
Assert.Single(deserialized.Threads);
|
||||
Assert.Equal(1, deserialized.Threads[0].Id);
|
||||
Assert.Equal("Job Thread", deserialized.Threads[0].Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void StackFrameSerialization()
|
||||
{
|
||||
var frame = new StackFrame
|
||||
{
|
||||
Id = 1,
|
||||
Name = "Step: Checkout",
|
||||
Line = 1,
|
||||
Column = 1,
|
||||
PresentationHint = "normal"
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(frame);
|
||||
var deserialized = JsonConvert.DeserializeObject<StackFrame>(json);
|
||||
|
||||
Assert.Equal(1, deserialized.Id);
|
||||
Assert.Equal("Step: Checkout", deserialized.Name);
|
||||
Assert.Equal("normal", deserialized.PresentationHint);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExitedEventBodySerialization()
|
||||
{
|
||||
var body = new ExitedEventBody { ExitCode = 130 };
|
||||
var json = JsonConvert.SerializeObject(body);
|
||||
var deserialized = JsonConvert.DeserializeObject<ExitedEventBody>(json);
|
||||
|
||||
Assert.Equal(130, deserialized.ExitCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void DapCommandEnumValues()
|
||||
{
|
||||
Assert.Equal(0, (int)DapCommand.Continue);
|
||||
Assert.Equal(1, (int)DapCommand.Next);
|
||||
Assert.Equal(4, (int)DapCommand.Disconnect);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void RequestDeserializesFromRawJson()
|
||||
{
|
||||
var json = @"{""seq"":5,""type"":""request"",""command"":""continue"",""arguments"":{""threadId"":1}}";
|
||||
var request = JsonConvert.DeserializeObject<Request>(json);
|
||||
|
||||
Assert.Equal(5, request.Seq);
|
||||
Assert.Equal("request", request.Type);
|
||||
Assert.Equal("continue", request.Command);
|
||||
Assert.Equal(1, request.Arguments["threadId"].Value<int>());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ErrorResponseBodySerialization()
|
||||
{
|
||||
var body = new ErrorResponseBody
|
||||
{
|
||||
Error = new Message
|
||||
{
|
||||
Id = 1,
|
||||
Format = "Something went wrong",
|
||||
ShowUser = true
|
||||
}
|
||||
};
|
||||
|
||||
var json = JsonConvert.SerializeObject(body);
|
||||
var deserialized = JsonConvert.DeserializeObject<ErrorResponseBody>(json);
|
||||
|
||||
Assert.Equal(1, deserialized.Error.Id);
|
||||
Assert.Equal("Something went wrong", deserialized.Error.Format);
|
||||
Assert.True(deserialized.Error.ShowUser);
|
||||
}
|
||||
}
|
||||
}
|
||||
237
src/Test/L0/Worker/DapReplExecutorL0.cs
Normal file
237
src/Test/L0/Worker/DapReplExecutorL0.cs
Normal file
@@ -0,0 +1,237 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using GitHub.DistributedTask.Expressions2;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.Runner.Common.Tests;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class DapReplExecutorL0
|
||||
{
|
||||
private TestHostContext _hc;
|
||||
private DapReplExecutor _executor;
|
||||
private List<Event> _sentEvents;
|
||||
|
||||
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
|
||||
{
|
||||
_hc = new TestHostContext(this, testName);
|
||||
_sentEvents = new List<Event>();
|
||||
_executor = new DapReplExecutor(_hc, (category, text) =>
|
||||
{
|
||||
_sentEvents.Add(new Event
|
||||
{
|
||||
EventType = "output",
|
||||
Body = new OutputEventBody
|
||||
{
|
||||
Category = category,
|
||||
Output = text
|
||||
}
|
||||
});
|
||||
});
|
||||
return _hc;
|
||||
}
|
||||
|
||||
private Mock<IExecutionContext> CreateMockContext(
|
||||
DictionaryContextData exprValues = null,
|
||||
IDictionary<string, IDictionary<string, string>> jobDefaults = null)
|
||||
{
|
||||
var mock = new Mock<IExecutionContext>();
|
||||
mock.Setup(x => x.ExpressionValues).Returns(exprValues ?? new DictionaryContextData());
|
||||
mock.Setup(x => x.ExpressionFunctions).Returns(new List<IFunctionInfo>());
|
||||
|
||||
var global = new GlobalContext
|
||||
{
|
||||
PrependPath = new List<string>(),
|
||||
JobDefaults = jobDefaults
|
||||
?? new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase),
|
||||
};
|
||||
mock.Setup(x => x.Global).Returns(global);
|
||||
|
||||
return mock;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public async Task ExecuteRunCommand_NullContext_ReturnsError()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var command = new RunCommand { Script = "echo hello" };
|
||||
var result = await _executor.ExecuteRunCommandAsync(command, null, CancellationToken.None);
|
||||
|
||||
Assert.Equal("error", result.Type);
|
||||
Assert.Contains("No execution context available", result.Result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExpandExpressions_NoExpressions_ReturnsInput()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var context = CreateMockContext();
|
||||
var result = _executor.ExpandExpressions("echo hello", context.Object);
|
||||
|
||||
Assert.Equal("echo hello", result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExpandExpressions_NullInput_ReturnsEmpty()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var context = CreateMockContext();
|
||||
var result = _executor.ExpandExpressions(null, context.Object);
|
||||
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExpandExpressions_EmptyInput_ReturnsEmpty()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var context = CreateMockContext();
|
||||
var result = _executor.ExpandExpressions("", context.Object);
|
||||
|
||||
Assert.Equal(string.Empty, result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ExpandExpressions_UnterminatedExpression_KeepsLiteral()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var context = CreateMockContext();
|
||||
var result = _executor.ExpandExpressions("echo ${{ github.repo", context.Object);
|
||||
|
||||
Assert.Equal("echo ${{ github.repo", result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveDefaultShell_NoJobDefaults_ReturnsPlatformDefault()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var context = CreateMockContext();
|
||||
var result = _executor.ResolveDefaultShell(context.Object);
|
||||
|
||||
#if OS_WINDOWS
|
||||
Assert.True(result == "pwsh" || result == "powershell");
|
||||
#else
|
||||
Assert.Equal("sh", result);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ResolveDefaultShell_WithJobDefault_ReturnsJobDefault()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var jobDefaults = new Dictionary<string, IDictionary<string, string>>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["run"] = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["shell"] = "bash"
|
||||
}
|
||||
};
|
||||
var context = CreateMockContext(jobDefaults: jobDefaults);
|
||||
var result = _executor.ResolveDefaultShell(context.Object);
|
||||
|
||||
Assert.Equal("bash", result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void BuildEnvironment_MergesEnvContextAndReplOverrides()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
var envData = new DictionaryContextData
|
||||
{
|
||||
["FOO"] = new StringContextData("bar"),
|
||||
};
|
||||
exprValues["env"] = envData;
|
||||
|
||||
var context = CreateMockContext(exprValues);
|
||||
var replEnv = new Dictionary<string, string> { { "BAZ", "qux" } };
|
||||
var result = _executor.BuildEnvironment(context.Object, replEnv);
|
||||
|
||||
Assert.Equal("bar", result["FOO"]);
|
||||
Assert.Equal("qux", result["BAZ"]);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void BuildEnvironment_ReplOverridesWin()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
var envData = new DictionaryContextData
|
||||
{
|
||||
["FOO"] = new StringContextData("original"),
|
||||
};
|
||||
exprValues["env"] = envData;
|
||||
|
||||
var context = CreateMockContext(exprValues);
|
||||
var replEnv = new Dictionary<string, string> { { "FOO", "override" } };
|
||||
var result = _executor.BuildEnvironment(context.Object, replEnv);
|
||||
|
||||
Assert.Equal("override", result["FOO"]);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void BuildEnvironment_NullReplEnv_ReturnsContextEnvOnly()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
var envData = new DictionaryContextData
|
||||
{
|
||||
["FOO"] = new StringContextData("bar"),
|
||||
};
|
||||
exprValues["env"] = envData;
|
||||
|
||||
var context = CreateMockContext(exprValues);
|
||||
var result = _executor.BuildEnvironment(context.Object, null);
|
||||
|
||||
Assert.Equal("bar", result["FOO"]);
|
||||
Assert.False(result.ContainsKey("BAZ"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
314
src/Test/L0/Worker/DapReplParserL0.cs
Normal file
314
src/Test/L0/Worker/DapReplParserL0.cs
Normal file
@@ -0,0 +1,314 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.CompilerServices;
|
||||
using GitHub.Runner.Common.Tests;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class DapReplParserL0
|
||||
{
|
||||
#region help command
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_HelpReturnsHelpCommand()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("help", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var help = Assert.IsType<HelpCommand>(cmd);
|
||||
Assert.Null(help.Topic);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_HelpCaseInsensitive()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("Help", out var error);
|
||||
Assert.Null(error);
|
||||
Assert.IsType<HelpCommand>(cmd);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_HelpWithTopic()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("help(\"run\")", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var help = Assert.IsType<HelpCommand>(cmd);
|
||||
Assert.Equal("run", help.Topic);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region run command — basic
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunSimpleScript()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo hello\")", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("echo hello", run.Script);
|
||||
Assert.Null(run.Shell);
|
||||
Assert.Null(run.Env);
|
||||
Assert.Null(run.WorkingDirectory);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithShell()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo hello\", shell: \"bash\")", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("echo hello", run.Script);
|
||||
Assert.Equal("bash", run.Shell);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithWorkingDirectory()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"ls\", working_directory: \"/tmp\")", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("ls", run.Script);
|
||||
Assert.Equal("/tmp", run.WorkingDirectory);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithEnv()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo $FOO\", env: { FOO: \"bar\" })", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("echo $FOO", run.Script);
|
||||
Assert.NotNull(run.Env);
|
||||
Assert.Equal("bar", run.Env["FOO"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithMultipleEnvVars()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo\", env: { A: \"1\", B: \"2\" })", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal(2, run.Env.Count);
|
||||
Assert.Equal("1", run.Env["A"]);
|
||||
Assert.Equal("2", run.Env["B"]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithAllOptions()
|
||||
{
|
||||
var input = "run(\"echo $X\", shell: \"zsh\", env: { X: \"1\" }, working_directory: \"/tmp\")";
|
||||
var cmd = DapReplParser.TryParse(input, out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("echo $X", run.Script);
|
||||
Assert.Equal("zsh", run.Shell);
|
||||
Assert.Equal("1", run.Env["X"]);
|
||||
Assert.Equal("/tmp", run.WorkingDirectory);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region run command — edge cases
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithEscapedQuotes()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo \\\"hello\\\"\")", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("echo \"hello\"", run.Script);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunWithCommaInEnvValue()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo\", env: { CSV: \"a,b,c\" })", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
var run = Assert.IsType<RunCommand>(cmd);
|
||||
Assert.Equal("a,b,c", run.Env["CSV"]);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region error cases
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunEmptyArgsReturnsError()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run()", out var error);
|
||||
|
||||
Assert.NotNull(error);
|
||||
Assert.Null(cmd);
|
||||
Assert.Contains("requires a script argument", error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunUnquotedArgReturnsError()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(echo hello)", out var error);
|
||||
|
||||
Assert.NotNull(error);
|
||||
Assert.Null(cmd);
|
||||
Assert.Contains("quoted string", error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunUnknownOptionReturnsError()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo\", timeout: \"10\")", out var error);
|
||||
|
||||
Assert.NotNull(error);
|
||||
Assert.Null(cmd);
|
||||
Assert.Contains("Unknown option", error);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_RunMissingClosingParenReturnsError()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("run(\"echo\"", out var error);
|
||||
|
||||
Assert.NotNull(error);
|
||||
Assert.Null(cmd);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region non-DSL input falls through
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_ExpressionReturnsNull()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("github.repository", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
Assert.Null(cmd);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_WrappedExpressionReturnsNull()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("${{ github.event_name }}", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
Assert.Null(cmd);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Parse_EmptyInputReturnsNull()
|
||||
{
|
||||
var cmd = DapReplParser.TryParse("", out var error);
|
||||
Assert.Null(error);
|
||||
Assert.Null(cmd);
|
||||
|
||||
cmd = DapReplParser.TryParse(null, out error);
|
||||
Assert.Null(error);
|
||||
Assert.Null(cmd);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region help text
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetGeneralHelp_ContainsCommands()
|
||||
{
|
||||
var help = DapReplParser.GetGeneralHelp();
|
||||
|
||||
Assert.Contains("help", help);
|
||||
Assert.Contains("run", help);
|
||||
Assert.Contains("expression", help, System.StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetRunHelp_ContainsOptions()
|
||||
{
|
||||
var help = DapReplParser.GetRunHelp();
|
||||
|
||||
Assert.Contains("shell", help);
|
||||
Assert.Contains("env", help);
|
||||
Assert.Contains("working_directory", help);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region internal parser helpers
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void SplitArguments_HandlesNestedBraces()
|
||||
{
|
||||
var args = DapReplParser.SplitArguments("\"hello\", env: { A: \"1\", B: \"2\" }", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
Assert.Equal(2, args.Count);
|
||||
Assert.Equal("\"hello\"", args[0].Trim());
|
||||
Assert.Contains("A:", args[1]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void ParseEnvBlock_HandlesEmptyBlock()
|
||||
{
|
||||
var result = DapReplParser.ParseEnvBlock("{ }", out var error);
|
||||
|
||||
Assert.Null(error);
|
||||
Assert.NotNull(result);
|
||||
Assert.Empty(result);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
728
src/Test/L0/Worker/DapVariableProviderL0.cs
Normal file
728
src/Test/L0/Worker/DapVariableProviderL0.cs
Normal file
@@ -0,0 +1,728 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Runtime.CompilerServices;
|
||||
using GitHub.DistributedTask.Pipelines.ContextData;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common;
|
||||
using GitHub.Runner.Common.Tests;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using Xunit;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
public sealed class DapVariableProviderL0
|
||||
{
|
||||
private TestHostContext _hc;
|
||||
private DapVariableProvider _provider;
|
||||
|
||||
private TestHostContext CreateTestContext([CallerMemberName] string testName = "")
|
||||
{
|
||||
_hc = new TestHostContext(this, testName);
|
||||
_provider = new DapVariableProvider(_hc.SecretMasker);
|
||||
return _hc;
|
||||
}
|
||||
|
||||
private Moq.Mock<GitHub.Runner.Worker.IExecutionContext> CreateMockContext(DictionaryContextData expressionValues)
|
||||
{
|
||||
var mock = new Moq.Mock<GitHub.Runner.Worker.IExecutionContext>();
|
||||
mock.Setup(x => x.ExpressionValues).Returns(expressionValues);
|
||||
return mock;
|
||||
}
|
||||
|
||||
#region GetScopes tests
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetScopes_ReturnsEmptyWhenContextIsNull()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var scopes = _provider.GetScopes(null);
|
||||
Assert.Empty(scopes);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetScopes_ReturnsOnlyPopulatedScopes()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "repository", new StringContextData("owner/repo") }
|
||||
};
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "CI", new StringContextData("true") },
|
||||
{ "HOME", new StringContextData("/home/runner") }
|
||||
};
|
||||
// "runner" is not set — should not appear in scopes
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var scopes = _provider.GetScopes(ctx.Object);
|
||||
|
||||
Assert.Equal(2, scopes.Count);
|
||||
Assert.Equal("github", scopes[0].Name);
|
||||
Assert.Equal("env", scopes[1].Name);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetScopes_ReportsNamedVariableCount()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "A", new StringContextData("1") },
|
||||
{ "B", new StringContextData("2") },
|
||||
{ "C", new StringContextData("3") }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var scopes = _provider.GetScopes(ctx.Object);
|
||||
|
||||
Assert.Single(scopes);
|
||||
Assert.Equal(3, scopes[0].NamedVariables);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetScopes_SecretsGetSpecialPresentationHint()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["secrets"] = new DictionaryContextData
|
||||
{
|
||||
{ "MY_SECRET", new StringContextData("super-secret") }
|
||||
};
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "CI", new StringContextData("true") }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var scopes = _provider.GetScopes(ctx.Object);
|
||||
|
||||
var envScope = scopes.Find(s => s.Name == "env");
|
||||
var secretsScope = scopes.Find(s => s.Name == "secrets");
|
||||
|
||||
Assert.NotNull(envScope);
|
||||
Assert.Null(envScope.PresentationHint);
|
||||
|
||||
Assert.NotNull(secretsScope);
|
||||
Assert.Equal("registers", secretsScope.PresentationHint);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetVariables — basic types
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_ReturnsEmptyWhenContextIsNull()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var variables = _provider.GetVariables(null, 1);
|
||||
Assert.Empty(variables);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_ReturnsStringVariables()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "CI", new StringContextData("true") },
|
||||
{ "HOME", new StringContextData("/home/runner") }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
// "env" is at ScopeNames index 1 → variablesReference = 2
|
||||
var variables = _provider.GetVariables(ctx.Object, 2);
|
||||
|
||||
Assert.Equal(2, variables.Count);
|
||||
|
||||
var ciVar = variables.Find(v => v.Name == "CI");
|
||||
Assert.NotNull(ciVar);
|
||||
Assert.Equal("true", ciVar.Value);
|
||||
Assert.Equal("string", ciVar.Type);
|
||||
Assert.Equal(0, ciVar.VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_ReturnsBooleanVariables()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "event_name", new StringContextData("push") },
|
||||
};
|
||||
// Use a nested dict with boolean to test
|
||||
var jobDict = new DictionaryContextData();
|
||||
// BooleanContextData is a valid PipelineContextData type
|
||||
// but job context typically has strings. Use env scope instead.
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "flag", new BooleanContextData(true) }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
// "env" is at index 1 → ref 2
|
||||
var variables = _provider.GetVariables(ctx.Object, 2);
|
||||
|
||||
var flagVar = variables.Find(v => v.Name == "flag");
|
||||
Assert.NotNull(flagVar);
|
||||
Assert.Equal("true", flagVar.Value);
|
||||
Assert.Equal("boolean", flagVar.Type);
|
||||
Assert.Equal(0, flagVar.VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_ReturnsNumberVariables()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "count", new NumberContextData(42) }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 2);
|
||||
|
||||
var countVar = variables.Find(v => v.Name == "count");
|
||||
Assert.NotNull(countVar);
|
||||
Assert.Equal("42", countVar.Value);
|
||||
Assert.Equal("number", countVar.Type);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_HandlesNullValues()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
var dict = new DictionaryContextData();
|
||||
dict["present"] = new StringContextData("yes");
|
||||
dict["missing"] = null;
|
||||
exprValues["env"] = dict;
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 2);
|
||||
|
||||
var nullVar = variables.Find(v => v.Name == "missing");
|
||||
Assert.NotNull(nullVar);
|
||||
Assert.Equal("null", nullVar.Value);
|
||||
Assert.Equal("null", nullVar.Type);
|
||||
Assert.Equal(0, nullVar.VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region GetVariables — nested expansion
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_NestedDictionaryIsExpandable()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var innerDict = new DictionaryContextData
|
||||
{
|
||||
{ "name", new StringContextData("push") },
|
||||
{ "ref", new StringContextData("refs/heads/main") }
|
||||
};
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "event", innerDict }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
// "github" is at index 0 → ref 1
|
||||
var variables = _provider.GetVariables(ctx.Object, 1);
|
||||
|
||||
var eventVar = variables.Find(v => v.Name == "event");
|
||||
Assert.NotNull(eventVar);
|
||||
Assert.Equal("object", eventVar.Type);
|
||||
Assert.True(eventVar.VariablesReference > 0, "Nested dict should have a non-zero variablesReference");
|
||||
Assert.Equal(2, eventVar.NamedVariables);
|
||||
|
||||
// Now expand it
|
||||
var children = _provider.GetVariables(ctx.Object, eventVar.VariablesReference);
|
||||
Assert.Equal(2, children.Count);
|
||||
|
||||
var nameVar = children.Find(v => v.Name == "name");
|
||||
Assert.NotNull(nameVar);
|
||||
Assert.Equal("push", nameVar.Value);
|
||||
Assert.Equal("${{ github.event.name }}", nameVar.EvaluateName);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_NestedArrayIsExpandable()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var array = new ArrayContextData();
|
||||
array.Add(new StringContextData("item0"));
|
||||
array.Add(new StringContextData("item1"));
|
||||
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "list", array }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 2);
|
||||
|
||||
var listVar = variables.Find(v => v.Name == "list");
|
||||
Assert.NotNull(listVar);
|
||||
Assert.Equal("array", listVar.Type);
|
||||
Assert.True(listVar.VariablesReference > 0);
|
||||
Assert.Equal(2, listVar.IndexedVariables);
|
||||
|
||||
// Expand the array
|
||||
var items = _provider.GetVariables(ctx.Object, listVar.VariablesReference);
|
||||
Assert.Equal(2, items.Count);
|
||||
Assert.Equal("[0]", items[0].Name);
|
||||
Assert.Equal("item0", items[0].Value);
|
||||
Assert.Equal("[1]", items[1].Name);
|
||||
Assert.Equal("item1", items[1].Value);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Secret masking
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_SecretsScopeValuesAreRedacted()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["secrets"] = new DictionaryContextData
|
||||
{
|
||||
{ "MY_TOKEN", new StringContextData("ghp_abc123secret") },
|
||||
{ "DB_PASSWORD", new StringContextData("p@ssword!") }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
// "secrets" is at index 5 → ref 6
|
||||
var variables = _provider.GetVariables(ctx.Object, 6);
|
||||
|
||||
Assert.Equal(2, variables.Count);
|
||||
foreach (var v in variables)
|
||||
{
|
||||
Assert.Equal("***", v.Value);
|
||||
Assert.Equal("string", v.Type);
|
||||
}
|
||||
|
||||
// Keys should still be visible
|
||||
Assert.Contains(variables, v => v.Name == "MY_TOKEN");
|
||||
Assert.Contains(variables, v => v.Name == "DB_PASSWORD");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_NonSecretScopeValuesMaskedBySecretMasker()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
// Register a known secret value with the masker
|
||||
hc.SecretMasker.AddValue("super-secret-token");
|
||||
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "SAFE", new StringContextData("hello world") },
|
||||
{ "LEAKED", new StringContextData("prefix-super-secret-token-suffix") }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 2);
|
||||
|
||||
var safeVar = variables.Find(v => v.Name == "SAFE");
|
||||
Assert.NotNull(safeVar);
|
||||
Assert.Equal("hello world", safeVar.Value);
|
||||
|
||||
var leakedVar = variables.Find(v => v.Name == "LEAKED");
|
||||
Assert.NotNull(leakedVar);
|
||||
Assert.DoesNotContain("super-secret-token", leakedVar.Value);
|
||||
Assert.Contains("***", leakedVar.Value);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Reset
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void Reset_InvalidatesNestedReferences()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var innerDict = new DictionaryContextData
|
||||
{
|
||||
{ "name", new StringContextData("push") }
|
||||
};
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "event", innerDict }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 1);
|
||||
var eventVar = variables.Find(v => v.Name == "event");
|
||||
Assert.True(eventVar.VariablesReference > 0);
|
||||
|
||||
var savedRef = eventVar.VariablesReference;
|
||||
|
||||
// Reset should clear all dynamic references
|
||||
_provider.Reset();
|
||||
|
||||
var children = _provider.GetVariables(ctx.Object, savedRef);
|
||||
Assert.Empty(children);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EvaluateName
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_SetsEvaluateNameWithDotPath()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "repository", new StringContextData("owner/repo") }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 1);
|
||||
|
||||
var repoVar = variables.Find(v => v.Name == "repository");
|
||||
Assert.NotNull(repoVar);
|
||||
Assert.Equal("${{ github.repository }}", repoVar.EvaluateName);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EvaluateExpression
|
||||
|
||||
/// <summary>
|
||||
/// Creates a mock execution context with Global set up so that
|
||||
/// ToPipelineTemplateEvaluator() works for real expression evaluation.
|
||||
/// </summary>
|
||||
private Moq.Mock<IExecutionContext> CreateEvaluatableContext(
|
||||
TestHostContext hc,
|
||||
DictionaryContextData expressionValues)
|
||||
{
|
||||
var mock = new Moq.Mock<IExecutionContext>();
|
||||
mock.Setup(x => x.ExpressionValues).Returns(expressionValues);
|
||||
mock.Setup(x => x.ExpressionFunctions)
|
||||
.Returns(new List<GitHub.DistributedTask.Expressions2.IFunctionInfo>());
|
||||
mock.Setup(x => x.Global).Returns(new GlobalContext
|
||||
{
|
||||
FileTable = new List<string>(),
|
||||
Variables = new Variables(hc, new Dictionary<string, VariableValue>()),
|
||||
});
|
||||
// ToPipelineTemplateEvaluator uses ToTemplateTraceWriter which calls
|
||||
// context.Write — provide a no-op so it doesn't NRE.
|
||||
mock.Setup(x => x.Write(Moq.It.IsAny<string>(), Moq.It.IsAny<string>()));
|
||||
return mock;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EvaluateExpression_ReturnsValueForSimpleExpression()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "repository", new StringContextData("owner/repo") }
|
||||
};
|
||||
|
||||
var ctx = CreateEvaluatableContext(hc, exprValues);
|
||||
var result = _provider.EvaluateExpression("github.repository", ctx.Object);
|
||||
|
||||
Assert.Equal("owner/repo", result.Result);
|
||||
Assert.Equal("string", result.Type);
|
||||
Assert.Equal(0, result.VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EvaluateExpression_StripsWrapperSyntax()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData
|
||||
{
|
||||
{ "event_name", new StringContextData("push") }
|
||||
};
|
||||
|
||||
var ctx = CreateEvaluatableContext(hc, exprValues);
|
||||
var result = _provider.EvaluateExpression("${{ github.event_name }}", ctx.Object);
|
||||
|
||||
Assert.Equal("push", result.Result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EvaluateExpression_MasksSecretInResult()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
hc.SecretMasker.AddValue("super-secret");
|
||||
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["env"] = new DictionaryContextData
|
||||
{
|
||||
{ "TOKEN", new StringContextData("super-secret") }
|
||||
};
|
||||
|
||||
var ctx = CreateEvaluatableContext(hc, exprValues);
|
||||
var result = _provider.EvaluateExpression("env.TOKEN", ctx.Object);
|
||||
|
||||
Assert.DoesNotContain("super-secret", result.Result);
|
||||
Assert.Contains("***", result.Result);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EvaluateExpression_ReturnsErrorForInvalidExpression()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["github"] = new DictionaryContextData();
|
||||
|
||||
var ctx = CreateEvaluatableContext(hc, exprValues);
|
||||
// An invalid expression syntax should not throw — it should
|
||||
// return an error result.
|
||||
var result = _provider.EvaluateExpression("!!!invalid[[", ctx.Object);
|
||||
|
||||
Assert.Contains("error", result.Result, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EvaluateExpression_ReturnsMessageWhenNoContext()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var result = _provider.EvaluateExpression("github.repository", null);
|
||||
|
||||
Assert.Contains("no execution context", result.Result, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void EvaluateExpression_ReturnsEmptyForEmptyExpression()
|
||||
{
|
||||
using (var hc = CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
var ctx = CreateEvaluatableContext(hc, exprValues);
|
||||
var result = _provider.EvaluateExpression("", ctx.Object);
|
||||
|
||||
Assert.Equal(string.Empty, result.Result);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region InferResultType
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void InferResultType_ClassifiesCorrectly()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
Assert.Equal("null", DapVariableProvider.InferResultType(null));
|
||||
Assert.Equal("null", DapVariableProvider.InferResultType("null"));
|
||||
Assert.Equal("boolean", DapVariableProvider.InferResultType("true"));
|
||||
Assert.Equal("boolean", DapVariableProvider.InferResultType("false"));
|
||||
Assert.Equal("number", DapVariableProvider.InferResultType("42"));
|
||||
Assert.Equal("number", DapVariableProvider.InferResultType("3.14"));
|
||||
Assert.Equal("object", DapVariableProvider.InferResultType("{\"key\":\"val\"}"));
|
||||
Assert.Equal("object", DapVariableProvider.InferResultType("[1,2,3]"));
|
||||
Assert.Equal("string", DapVariableProvider.InferResultType("hello world"));
|
||||
Assert.Equal("string", DapVariableProvider.InferResultType("owner/repo"));
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Non-string secret type redaction
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_SecretsScopeRedactsNumberContextData()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["secrets"] = new DictionaryContextData
|
||||
{
|
||||
{ "NUMERIC_SECRET", new NumberContextData(12345) }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 6);
|
||||
|
||||
Assert.Single(variables);
|
||||
Assert.Equal("NUMERIC_SECRET", variables[0].Name);
|
||||
Assert.Equal("***", variables[0].Value);
|
||||
Assert.Equal("string", variables[0].Type);
|
||||
Assert.Equal(0, variables[0].VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_SecretsScopeRedactsBooleanContextData()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["secrets"] = new DictionaryContextData
|
||||
{
|
||||
{ "BOOL_SECRET", new BooleanContextData(true) }
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 6);
|
||||
|
||||
Assert.Single(variables);
|
||||
Assert.Equal("BOOL_SECRET", variables[0].Name);
|
||||
Assert.Equal("***", variables[0].Value);
|
||||
Assert.Equal("string", variables[0].Type);
|
||||
Assert.Equal(0, variables[0].VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_SecretsScopeRedactsNestedDictionary()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
exprValues["secrets"] = new DictionaryContextData
|
||||
{
|
||||
{ "NESTED_SECRET", new DictionaryContextData
|
||||
{
|
||||
{ "inner_key", new StringContextData("inner_value") }
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 6);
|
||||
|
||||
Assert.Single(variables);
|
||||
Assert.Equal("NESTED_SECRET", variables[0].Name);
|
||||
Assert.Equal("***", variables[0].Value);
|
||||
Assert.Equal("string", variables[0].Type);
|
||||
// Nested container should NOT be drillable under secrets
|
||||
Assert.Equal(0, variables[0].VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
[Trait("Level", "L0")]
|
||||
[Trait("Category", "Worker")]
|
||||
public void GetVariables_SecretsScopeRedactsNullValue()
|
||||
{
|
||||
using (CreateTestContext())
|
||||
{
|
||||
var exprValues = new DictionaryContextData();
|
||||
var secrets = new DictionaryContextData();
|
||||
secrets["NULL_SECRET"] = null;
|
||||
exprValues["secrets"] = secrets;
|
||||
|
||||
var ctx = CreateMockContext(exprValues);
|
||||
var variables = _provider.GetVariables(ctx.Object, 6);
|
||||
|
||||
Assert.Single(variables);
|
||||
Assert.Equal("NULL_SECRET", variables[0].Name);
|
||||
Assert.Equal("***", variables[0].Value);
|
||||
Assert.Equal(0, variables[0].VariablesReference);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.Pipelines.ObjectTemplating;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||
@@ -547,6 +548,10 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
|
||||
var _stepsRunner = new StepsRunner();
|
||||
_stepsRunner.Initialize(hc);
|
||||
|
||||
var mockDapDebugger = new Mock<IDapDebugger>();
|
||||
hc.SetSingleton(mockDapDebugger.Object);
|
||||
|
||||
await _stepsRunner.RunAsync(_jobEc);
|
||||
|
||||
Assert.Equal("Create custom image", snapshotStep.DisplayName);
|
||||
|
||||
@@ -12,6 +12,7 @@ using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||
using GitHub.DistributedTask.WebApi;
|
||||
using GitHub.Runner.Common.Util;
|
||||
using GitHub.Runner.Worker;
|
||||
using GitHub.Runner.Worker.Dap;
|
||||
|
||||
namespace GitHub.Runner.Common.Tests.Worker
|
||||
{
|
||||
@@ -61,6 +62,10 @@ namespace GitHub.Runner.Common.Tests.Worker
|
||||
|
||||
_stepsRunner = new StepsRunner();
|
||||
_stepsRunner.Initialize(hc);
|
||||
|
||||
var mockDapDebugger = new Mock<IDapDebugger>();
|
||||
hc.SetSingleton(mockDapDebugger.Object);
|
||||
|
||||
return hc;
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user