mirror of
https://github.com/actions/runner.git
synced 2025-12-11 21:06:55 +00:00
Compare commits
38 Commits
v2.308.0
...
feature/va
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ab7aa2e431 | ||
|
|
3a8cb43022 | ||
|
|
80a17a2f0c | ||
|
|
16834edc67 | ||
|
|
2908d82845 | ||
|
|
3f5b813499 | ||
|
|
7b703d667d | ||
|
|
d2f0a46865 | ||
|
|
143639ddac | ||
|
|
474d0fb354 | ||
|
|
d0300c34f2 | ||
|
|
15c0fe6c1d | ||
|
|
2b66cbe699 | ||
|
|
0e9e9f1e8d | ||
|
|
ee0ba3616c | ||
|
|
1d1aaed09a | ||
|
|
7c4b0f6e88 | ||
|
|
7d3cbb0494 | ||
|
|
be65955a9d | ||
|
|
e419ae3c7e | ||
|
|
bb40cd2788 | ||
|
|
e0acb14bfc | ||
|
|
1ff8ad7860 | ||
|
|
8dd2cec3af | ||
|
|
7b53c38294 | ||
|
|
e22452c2d6 | ||
|
|
9bbfed0740 | ||
|
|
cf5afc63da | ||
|
|
a00db53b0d | ||
|
|
73ef82ff85 | ||
|
|
7892066256 | ||
|
|
8b9a81c952 | ||
|
|
460d9ae5a8 | ||
|
|
e94e744bed | ||
|
|
94080812f7 | ||
|
|
1183100ab8 | ||
|
|
4f40f29cff | ||
|
|
d88823c634 |
@@ -1,11 +1,10 @@
|
|||||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
|
||||||
{
|
{
|
||||||
"name": "Actions Runner Devcontainer",
|
"name": "Actions Runner Devcontainer",
|
||||||
"image": "mcr.microsoft.com/devcontainers/base:focal",
|
"image": "mcr.microsoft.com/devcontainers/base:focal",
|
||||||
"features": {
|
"features": {
|
||||||
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
"ghcr.io/devcontainers/features/docker-in-docker:1": {},
|
||||||
"ghcr.io/devcontainers/features/dotnet": {
|
"ghcr.io/devcontainers/features/dotnet": {
|
||||||
"version": "6.0.412"
|
"version": "6.0.414"
|
||||||
},
|
},
|
||||||
"ghcr.io/devcontainers/features/node:1": {
|
"ghcr.io/devcontainers/features/node:1": {
|
||||||
"version": "16"
|
"version": "16"
|
||||||
@@ -20,8 +19,6 @@
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
// dotnet restore to install dependencies so OmniSharp works out of the box
|
|
||||||
// src/Test restores all other projects it references, src/Runner.PluginHost is not one of them
|
|
||||||
"postCreateCommand": "dotnet restore src/Test && dotnet restore src/Runner.PluginHost",
|
"postCreateCommand": "dotnet restore src/Test && dotnet restore src/Runner.PluginHost",
|
||||||
"remoteUser": "vscode"
|
"remoteUser": "vscode"
|
||||||
}
|
}
|
||||||
143
.github/workflows/build.yml
vendored
143
.github/workflows/build.yml
vendored
@@ -17,6 +17,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
strategy:
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ]
|
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ]
|
||||||
include:
|
include:
|
||||||
@@ -60,6 +61,8 @@ jobs:
|
|||||||
|
|
||||||
# Check runtime/externals hash
|
# Check runtime/externals hash
|
||||||
- name: Compute/Compare runtime and externals Hash
|
- name: Compute/Compare runtime and externals Hash
|
||||||
|
id: compute-hash
|
||||||
|
continue-on-error: true
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
||||||
@@ -68,25 +71,74 @@ jobs:
|
|||||||
NeedUpdate=0
|
NeedUpdate=0
|
||||||
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
||||||
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
||||||
|
|
||||||
|
echo "EXTERNAL_HASH=$EXTERNALS_HASH" >> $GITHUB_OUTPUT
|
||||||
NeedUpdate=1
|
NeedUpdate=1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
||||||
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
||||||
|
|
||||||
|
echo "DOTNET_RUNTIME_HASH=$DOTNET_RUNTIME_HASH" >> $GITHUB_OUTPUT
|
||||||
NeedUpdate=1
|
NeedUpdate=1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
exit $NeedUpdate
|
echo "NEED_UPDATE=$NeedUpdate" >> $GITHUB_OUTPUT
|
||||||
env:
|
env:
|
||||||
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||||
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||||
|
- name: update hash
|
||||||
|
if: ${{ ! github.event.pull_request.head.repo.fork && github.event_name == 'pull_request' && steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }}
|
||||||
|
DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }}
|
||||||
|
|
||||||
|
if [ -n "$ExternalHash" ]; then
|
||||||
|
echo "$ExternalHash" > ./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$DotNetRuntimeHash" ]; then
|
||||||
|
echo "$DotNetRuntimeHash" > ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||||
|
fi
|
||||||
|
- name: cache updated hashes
|
||||||
|
if: ${{ ! github.event.pull_request.head.repo.fork && github.event_name == 'pull_request' && steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||||
|
uses: actions/cache/save@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||||
|
key: compute-hashes-${{ matrix.runtime }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Create an warning annotation if computed hashes will automatically be updated
|
||||||
|
if: ${{ ! github.event.pull_request.head.repo.fork && github.event_name == 'pull_request' && steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||||
|
shell: bash
|
||||||
|
run: echo "::warning ::Computed hashes do not match, we will automatically update these for you, you can safely ignore the errors on this job" && exit 1
|
||||||
|
- name: Create an error annotation if computed hashes need to be updated for a fork
|
||||||
|
if: ${{ github.event.pull_request.head.repo.fork && github.event_name == 'pull_request' && steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }}
|
||||||
|
DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }}
|
||||||
|
|
||||||
|
if [ -n "$ExternalHash" ]; then
|
||||||
|
echo "::error ::Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $ExternalHash"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$DotNetRuntimeHash" ]; then
|
||||||
|
echo "::error ::Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DotNetRuntimeHash"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -n "$ExternalHash" || -n "$DotNetRuntimeHash" ]]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# Run tests
|
# Run tests
|
||||||
- name: L0
|
- name: L0
|
||||||
run: |
|
run: |
|
||||||
${{ matrix.devScript }} test
|
${{ matrix.devScript }} test
|
||||||
working-directory: src
|
working-directory: src
|
||||||
if: matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm' && matrix.runtime != 'osx-arm64' && matrix.runtime != 'win-arm64'
|
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 0 && matrix.runtime != 'linux-arm64' && matrix.runtime != 'linux-arm' && matrix.runtime != 'osx-arm64' && matrix.runtime != 'win-arm64' }}
|
||||||
|
|
||||||
# Create runner package tar.gz/zip
|
# Create runner package tar.gz/zip
|
||||||
- name: Package Release
|
- name: Package Release
|
||||||
@@ -106,3 +158,90 @@ jobs:
|
|||||||
_package_trims/trim_externals
|
_package_trims/trim_externals
|
||||||
_package_trims/trim_runtime
|
_package_trims/trim_runtime
|
||||||
_package_trims/trim_runtime_externals
|
_package_trims/trim_runtime_externals
|
||||||
|
|
||||||
|
hash-update:
|
||||||
|
needs: [build]
|
||||||
|
# only run this if we get a failure from the build step - most likely meaning we need a hash update
|
||||||
|
if: ${{ always() && contains(needs.build.result, 'failure') && github.event_name == 'pull_request' && ! github.event.pull_request.head.repo.fork }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ github.head_ref }}
|
||||||
|
- name: Restore cached hashes - linux-x64
|
||||||
|
id: cache-restore-linux-x64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/linux-x64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/linux-x64
|
||||||
|
key: compute-hashes-linux-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - linux-arm64
|
||||||
|
id: cache-restore-linux-arm64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/linux-arm64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/linux-arm64
|
||||||
|
key: compute-hashes-linux-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - linux-arm
|
||||||
|
id: cache-restore-linux-arm
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/linux-arm
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/linux-arm
|
||||||
|
key: compute-hashes-linux-arm-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - osx-x64
|
||||||
|
id: cache-restore-osx-x64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/osx-x64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/osx-x64
|
||||||
|
key: compute-hashes-osx-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - osx-arm64
|
||||||
|
id: cache-restore-osx-arm64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/osx-arm64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/osx-arm64
|
||||||
|
key: compute-hashes-osx-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - win-x64
|
||||||
|
id: cache-restore-win-x64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/win-x64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/win-x64
|
||||||
|
key: compute-hashes-win-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - win-arm64
|
||||||
|
id: cache-restore-win-arm64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/win-arm64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/win-arm64
|
||||||
|
key: compute-hashes-win-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Fetch cached computed hashes
|
||||||
|
if: steps.cache-restore-linux-x64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-linux-arm64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-linux-arm.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-win-x64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-win-arm64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-osx-x64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-osx-arm64.outputs.cache-hit == 'true'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
|
git commit -a -m "Update computed hashes"
|
||||||
|
git push
|
||||||
|
|||||||
306
.github/workflows/dotnet-upgrade.yml
vendored
Normal file
306
.github/workflows/dotnet-upgrade.yml
vendored
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
name: "DotNet SDK Upgrade"
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * 1'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dotnet-update:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
SHOULD_UPDATE: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE }}
|
||||||
|
BRANCH_EXISTS: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS }}
|
||||||
|
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Get current major minor version
|
||||||
|
id: fetch_current_version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
current_major_minor_patch_version=$(jq .sdk.version ./src/global.json | xargs)
|
||||||
|
current_major_minor_version=$(cut -d '.' -f 1,2 <<< "$current_major_minor_patch_version")
|
||||||
|
|
||||||
|
echo "DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION=${current_major_minor_patch_version}" >> $GITHUB_OUTPUT
|
||||||
|
echo "DOTNET_CURRENT_MAJOR_MINOR_VERSION=${current_major_minor_version}" >> $GITHUB_OUTPUT
|
||||||
|
- name: Check patch version
|
||||||
|
id: fetch_latest_version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
latest_patch_version=$(curl -sb -H "Accept: application/json" "https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version")
|
||||||
|
current_patch_version=${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
|
||||||
|
should_update=0
|
||||||
|
[ "$current_patch_version" != "$latest_patch_version" ] && should_update=1
|
||||||
|
|
||||||
|
# check if git branch already exists for the upgrade
|
||||||
|
branch_already_exists=0
|
||||||
|
|
||||||
|
if git ls-remote --heads --exit-code origin refs/heads/feature/dotnetsdk-upgrade/${latest_patch_version};
|
||||||
|
then
|
||||||
|
branch_already_exists=1
|
||||||
|
should_update=0
|
||||||
|
fi
|
||||||
|
echo "DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION=${latest_patch_version}" >> $GITHUB_OUTPUT
|
||||||
|
echo "SHOULD_UPDATE=${should_update}" >> $GITHUB_OUTPUT
|
||||||
|
echo "BRANCH_EXISTS=${branch_already_exists}" >> $GITHUB_OUTPUT
|
||||||
|
- name: Create an error annotation if branch exists
|
||||||
|
if: ${{ steps.fetch_latest_version.outputs.BRANCH_EXISTS == 1 }}
|
||||||
|
run: echo "::error links::feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} https://github.com/actions/runner/tree/feature/dotnet-sdk-upgrade${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}::Branch feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} already exists. Please take a look and delete that branch if you wish to recreate"
|
||||||
|
- name: Create a warning annotation if no need to update
|
||||||
|
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 0 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||||
|
run: echo "::warning ::Latest DotNet SDK patch is ${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}, and we are on ${{ steps.fetch_latest_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}. No need to update"
|
||||||
|
- name: Update patch version
|
||||||
|
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
patch_version="${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||||
|
current_version="${{ steps.fetch_current_version.outputs.DOTNET_CURRENT_MAJOR_MINOR_PATCH_VERSION }}"
|
||||||
|
|
||||||
|
# Update globals
|
||||||
|
echo Updating globals
|
||||||
|
globals_temp=$(mktemp)
|
||||||
|
jq --unbuffered --arg patch_version "$patch_version" '.sdk.version = $patch_version' ./src/global.json > "$globals_temp" && mv "$globals_temp" ./src/global.json
|
||||||
|
|
||||||
|
# Update devcontainer
|
||||||
|
echo Updating devcontainer
|
||||||
|
devcontainer_temp=$(mktemp)
|
||||||
|
jq --unbuffered --arg patch_version "$patch_version" '.features."ghcr.io/devcontainers/features/dotnet".version = $patch_version' ./.devcontainer/devcontainer.json > "$devcontainer_temp" && mv "$devcontainer_temp" ./.devcontainer/devcontainer.json
|
||||||
|
|
||||||
|
# Update dev.sh
|
||||||
|
echo Updating start script
|
||||||
|
sed -i "s/DOTNETSDK_VERSION=\"$current_version\"/DOTNETSDK_VERSION=\"$patch_version\"/g" ./src/dev.sh
|
||||||
|
- name: GIT commit and push all changed files
|
||||||
|
if: ${{ steps.fetch_latest_version.outputs.SHOULD_UPDATE == 1 && steps.fetch_latest_version.outputs.BRANCH_EXISTS == 0 }}
|
||||||
|
id: create_branch
|
||||||
|
run: |
|
||||||
|
branch_name="feature/dotnetsdk-upgrade/${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "<41898282+github-actions[bot]@users.noreply.github.com>"
|
||||||
|
|
||||||
|
git checkout -b $branch_name
|
||||||
|
git commit -a -m "Upgrade dotnet sdk to v${{ steps.fetch_latest_version.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}"
|
||||||
|
git push --set-upstream origin $branch_name
|
||||||
|
|
||||||
|
build-hashes:
|
||||||
|
if: ${{ needs.dotnet-update.outputs.SHOULD_UPDATE == 1 && needs.dotnet-update.outputs.BRANCH_EXISTS == 0 }}
|
||||||
|
needs: [dotnet-update]
|
||||||
|
outputs:
|
||||||
|
# pass outputs from this job to create-pr for use
|
||||||
|
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.dotnet-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||||
|
NEEDS_HASH_UPDATE: ${{ steps.compute-hash.outputs.NEED_UPDATE }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
runtime: [ linux-x64, linux-arm64, linux-arm, win-x64, win-arm64, osx-x64, osx-arm64 ]
|
||||||
|
include:
|
||||||
|
- runtime: linux-x64
|
||||||
|
os: ubuntu-latest
|
||||||
|
devScript: ./dev.sh
|
||||||
|
|
||||||
|
- runtime: linux-arm64
|
||||||
|
os: ubuntu-latest
|
||||||
|
devScript: ./dev.sh
|
||||||
|
|
||||||
|
- runtime: linux-arm
|
||||||
|
os: ubuntu-latest
|
||||||
|
devScript: ./dev.sh
|
||||||
|
|
||||||
|
- runtime: osx-x64
|
||||||
|
os: macOS-latest
|
||||||
|
devScript: ./dev.sh
|
||||||
|
|
||||||
|
- runtime: osx-arm64
|
||||||
|
os: macOS-latest
|
||||||
|
devScript: ./dev.sh
|
||||||
|
|
||||||
|
- runtime: win-x64
|
||||||
|
os: windows-2019
|
||||||
|
devScript: ./dev
|
||||||
|
|
||||||
|
- runtime: win-arm64
|
||||||
|
os: windows-latest
|
||||||
|
devScript: ./dev
|
||||||
|
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: feature/dotnetsdk-upgrade/${{ needs.dotnet-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
|
||||||
|
# Build runner layout
|
||||||
|
- name: Build & Layout Release
|
||||||
|
run: |
|
||||||
|
${{ matrix.devScript }} layout Release ${{ matrix.runtime }}
|
||||||
|
working-directory: src
|
||||||
|
|
||||||
|
# Check runtime/externals hash
|
||||||
|
- name: Compute/Compare runtime and externals Hash
|
||||||
|
id: compute-hash
|
||||||
|
continue-on-error: true
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "Current dotnet runtime hash result: $DOTNET_RUNTIME_HASH"
|
||||||
|
echo "Current Externals hash result: $EXTERNALS_HASH"
|
||||||
|
|
||||||
|
NeedUpdate=0
|
||||||
|
if [ "$EXTERNALS_HASH" != "$(cat ./src/Misc/contentHash/externals/${{ matrix.runtime }})" ] ;then
|
||||||
|
echo Hash mismatch, Update ./src/Misc/contentHash/externals/${{ matrix.runtime }} to $EXTERNALS_HASH
|
||||||
|
|
||||||
|
echo "EXTERNAL_HASH=$EXTERNALS_HASH" >> $GITHUB_OUTPUT
|
||||||
|
NeedUpdate=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$DOTNET_RUNTIME_HASH" != "$(cat ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }})" ] ;then
|
||||||
|
echo Hash mismatch, Update ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }} to $DOTNET_RUNTIME_HASH
|
||||||
|
|
||||||
|
echo "DOTNET_RUNTIME_HASH=$DOTNET_RUNTIME_HASH" >> $GITHUB_OUTPUT
|
||||||
|
NeedUpdate=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "NEED_UPDATE=$NeedUpdate" >> $GITHUB_OUTPUT
|
||||||
|
env:
|
||||||
|
DOTNET_RUNTIME_HASH: ${{hashFiles('**/_layout_trims/runtime/**/*')}}
|
||||||
|
EXTERNALS_HASH: ${{hashFiles('**/_layout_trims/externals/**/*')}}
|
||||||
|
- name: update hash
|
||||||
|
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
ExternalHash=${{ steps.compute-hash.outputs.EXTERNAL_HASH }}
|
||||||
|
DotNetRuntimeHash=${{ steps.compute-hash.outputs.DOTNET_RUNTIME_HASH }}
|
||||||
|
|
||||||
|
if [ -n "$ExternalHash" ]; then
|
||||||
|
echo "$ExternalHash" > ./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$DotNetRuntimeHash" ]; then
|
||||||
|
echo "$DotNetRuntimeHash" > ./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||||
|
fi
|
||||||
|
- name: cache updated hashes
|
||||||
|
if: ${{ steps.compute-hash.outputs.NEED_UPDATE == 1 }}
|
||||||
|
uses: actions/cache/save@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/${{ matrix.runtime }}
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/${{ matrix.runtime }}
|
||||||
|
key: compute-hashes-${{ matrix.runtime }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
|
||||||
|
|
||||||
|
hash-update:
|
||||||
|
needs: [build-hashes]
|
||||||
|
if: ${{ needs.build-hashes.outputs.NEEDS_HASH_UPDATE == 1 }}
|
||||||
|
outputs:
|
||||||
|
# pass outputs from this job to create-pr for use
|
||||||
|
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.build-hashes.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
- name: Restore cached hashes - linux-x64
|
||||||
|
id: cache-restore-linux-x64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/linux-x64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/linux-x64
|
||||||
|
key: compute-hashes-linux-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - linux-arm64
|
||||||
|
id: cache-restore-linux-arm64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/linux-arm64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/linux-arm64
|
||||||
|
key: compute-hashes-linux-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - linux-arm
|
||||||
|
id: cache-restore-linux-arm
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/linux-arm
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/linux-arm
|
||||||
|
key: compute-hashes-linux-arm-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - osx-x64
|
||||||
|
id: cache-restore-osx-x64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/osx-x64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/osx-x64
|
||||||
|
key: compute-hashes-osx-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - osx-arm64
|
||||||
|
id: cache-restore-osx-arm64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/osx-arm64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/osx-arm64
|
||||||
|
key: compute-hashes-osx-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - win-x64
|
||||||
|
id: cache-restore-win-x64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/win-x64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/win-x64
|
||||||
|
key: compute-hashes-win-x64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Restore cached hashes - win-arm64
|
||||||
|
id: cache-restore-win-arm64
|
||||||
|
uses: actions/cache/restore@v3
|
||||||
|
with:
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
path: |
|
||||||
|
./src/Misc/contentHash/externals/win-arm64
|
||||||
|
./src/Misc/contentHash/dotnetRuntime/win-arm64
|
||||||
|
key: compute-hashes-win-arm64-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
|
||||||
|
- name: Fetch cached computed hashes
|
||||||
|
if: steps.cache-restore-linux-x64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-linux-arm64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-linux-arm.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-win-x64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-win-arm64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-osx-x64.outputs.cache-hit == 'true' ||
|
||||||
|
steps.cache-restore-osx-arm64.outputs.cache-hit == 'true'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
Environments=( "linux-x64" "linux-arm64" "linux-arm" "win-x64" "win-arm64" "osx-x64" "osx-arm64" )
|
||||||
|
|
||||||
|
git config --global user.name "github-actions[bot]"
|
||||||
|
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||||
|
git commit -a -m "Update computed hashes"
|
||||||
|
git push --set-upstream origin feature/dotnetsdk-upgrade/${{ needs.build-hashes.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
|
||||||
|
create-pr:
|
||||||
|
needs: [hash-update]
|
||||||
|
outputs:
|
||||||
|
# pass outputs from this job to run-tests for use
|
||||||
|
DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION: ${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
DOTNET_CURRENT_MAJOR_MINOR_VERSION: ${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}
|
||||||
|
- name: Create Pull Request
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
gh pr create -B main -H feature/dotnetsdk-upgrade/${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }} --title "Update dotnet sdk to latest version @${{ needs.hash-update.outputs.DOTNET_LATEST_MAJOR_MINOR_PATCH_VERSION }}" --body "
|
||||||
|
https://dotnetcli.blob.core.windows.net/dotnet/Sdk/${{ needs.hash-update.outputs.DOTNET_CURRENT_MAJOR_MINOR_VERSION }}/latest.version
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Autogenerated by [DotNet SDK Upgrade Workflow](https://github.com/actions/runner/blob/main/.github/workflows/dotnet-upgrade.yml)"
|
||||||
9
.github/workflows/stale-bot.yml
vendored
9
.github/workflows/stale-bot.yml
vendored
@@ -1,4 +1,4 @@
|
|||||||
name: ‘Close stale Runner issues’
|
name: Stale Bot
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
schedule:
|
schedule:
|
||||||
@@ -9,9 +9,8 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v8
|
- uses: actions/stale@v8
|
||||||
with:
|
with:
|
||||||
stale-issue-message: ‘This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days.’
|
stale-issue-message: "This issue is stale because it has been open 365 days with no activity. Remove stale label or comment or this will be closed in 15 days."
|
||||||
close-issue-message: ‘This issue was closed because it has been stalled for 15 days with no activity.’
|
close-issue-message: "This issue was closed because it has been stalled for 15 days with no activity."
|
||||||
exempt-issue-labels: ‘keep’
|
exempt-issue-labels: "keep"
|
||||||
days-before-stale: 365
|
days-before-stale: 365
|
||||||
days-before-close: 15
|
days-before-close: 15
|
||||||
debug-only: true
|
|
||||||
|
|||||||
6
.husky/pre-commit
Executable file
6
.husky/pre-commit
Executable file
@@ -0,0 +1,6 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
. "$(dirname -- "$0")/_/husky.sh"
|
||||||
|
|
||||||
|
cd src/Misc/expressionFunc/hashFiles
|
||||||
|
|
||||||
|
npx lint-staged
|
||||||
@@ -9,11 +9,13 @@ Make sure the runner has access to actions service for GitHub.com or GitHub Ente
|
|||||||
- The runner needs to access `https://api.github.com` for downloading actions.
|
- The runner needs to access `https://api.github.com` for downloading actions.
|
||||||
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
|
- The runner needs to access `https://vstoken.actions.githubusercontent.com/_apis/.../` for requesting an access token.
|
||||||
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
|
- The runner needs to access `https://pipelines.actions.githubusercontent.com/_apis/.../` for receiving workflow jobs.
|
||||||
|
---
|
||||||
|
**NOTE:** for the full list of domains that are required to be in the firewall allow list refer to the [GitHub self-hosted runners requirements documentation](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github).
|
||||||
|
|
||||||
These can by tested by running the following `curl` commands from your self-hosted runner machine:
|
These can by tested by running the following `curl` commands from your self-hosted runner machine:
|
||||||
|
|
||||||
```
|
```
|
||||||
curl -v https://api.github.com/api/v3/zen
|
curl -v https://api.github.com/zen
|
||||||
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
|
curl -v https://vstoken.actions.githubusercontent.com/_apis/health
|
||||||
curl -v https://pipelines.actions.githubusercontent.com/_apis/health
|
curl -v https://pipelines.actions.githubusercontent.com/_apis/health
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
|
|
||||||
- A Proxy may try to modify the HTTPS request (like add or change some http headers) and causes the request become incompatible with the Actions Service (ASP.NetCore), Ex: [Nginx](https://github.com/dotnet/aspnetcore/issues/17081)
|
- A Proxy may try to modify the HTTPS request (like add or change some http headers) and causes the request become incompatible with the Actions Service (ASP.NetCore), Ex: [Nginx](https://github.com/dotnet/aspnetcore/issues/17081)
|
||||||
|
|
||||||
- Firewall rules that block action runner from accessing certain hosts, ex: `*.github.com`, `*.actions.githubusercontent.com`, etc
|
- Firewall rules that block action runner from accessing [certain hosts](https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github), ex: `*.github.com`, `*.actions.githubusercontent.com`, etc
|
||||||
|
|
||||||
|
|
||||||
### Identify and solve these problems
|
### Identify and solve these problems
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ ARG TARGETOS
|
|||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ARG RUNNER_VERSION
|
ARG RUNNER_VERSION
|
||||||
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.2
|
ARG RUNNER_CONTAINER_HOOKS_VERSION=0.3.2
|
||||||
ARG DOCKER_VERSION=20.10.23
|
ARG DOCKER_VERSION=23.0.6
|
||||||
|
|
||||||
RUN apt update -y && apt install curl unzip -y
|
RUN apt update -y && apt install curl unzip -y
|
||||||
|
|
||||||
|
|||||||
@@ -1,19 +1,37 @@
|
|||||||
## Features
|
## What's Changed
|
||||||
- Support linux/arm64 docker build (#2601)
|
* Bump @types/node from 12.12.14 to 20.4.10 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2759
|
||||||
- Add node20 to runner (#2732)
|
* Trace x-github-request-id when download action tarball. by @TingluoHuang in https://github.com/actions/runner/pull/2755
|
||||||
- Update node16 to latest version (#2736)
|
* Fix typo by @kyanny in https://github.com/actions/runner/pull/2741
|
||||||
- Remove node12 from runner (#2717)
|
* Bump prettier from 3.0.1 to 3.0.2 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2772
|
||||||
|
* Bump @types/node from 20.4.10 to 20.5.0 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2773
|
||||||
|
* Revert "Fixed a bug where a misplaced `=` character could bypass here… by @cory-miller in https://github.com/actions/runner/pull/2774
|
||||||
|
* Filter NODE_OPTIONS from env for file output by @cory-miller in https://github.com/actions/runner/pull/2775
|
||||||
|
* Bump @types/node from 20.5.0 to 20.5.1 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2781
|
||||||
|
* Update Docker Version in Images by @ajschmidt8 in https://github.com/actions/runner/pull/2694
|
||||||
|
* Bump @types/node from 20.5.1 to 20.5.4 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2789
|
||||||
|
* Bump @typescript-eslint/parser from 6.4.0 to 6.4.1 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2785
|
||||||
|
* Bump Microsoft.AspNet.WebApi.Client from 5.2.4 to 5.2.9 in /src by @dependabot in https://github.com/actions/runner/pull/2751
|
||||||
|
* Bump System.Buffers from 4.3.0 to 4.5.1 in /src by @dependabot in https://github.com/actions/runner/pull/2749
|
||||||
|
* Bump dotnet/runtime-deps from 6.0-jammy to 7.0-jammy in /images by @dependabot in https://github.com/actions/runner/pull/2745
|
||||||
|
* Remove need to manually compile JS binary for hashFiles utility by @vanZeben in https://github.com/actions/runner/pull/2770
|
||||||
|
* Revert "Bump dotnet/runtime-deps from 6.0-jammy to 7.0-jammy in /images" by @TingluoHuang in https://github.com/actions/runner/pull/2790
|
||||||
|
* Query runner by name on server side. by @TingluoHuang in https://github.com/actions/runner/pull/2771
|
||||||
|
* Bump typescript from 5.1.6 to 5.2.2 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2795
|
||||||
|
* Bump @types/node from 20.5.4 to 20.5.6 in /src/Misc/expressionFunc/hashFiles by @dependabot in https://github.com/actions/runner/pull/2796
|
||||||
|
* Bump Newtonsoft.Json from 13.0.1 to 13.0.3 in /src by @dependabot in https://github.com/actions/runner/pull/2797
|
||||||
|
* Support replacing runners in v2 flow by @luketomlinson in https://github.com/actions/runner/pull/2791
|
||||||
|
* Delegating handler for Http redirects by @paveliak in https://github.com/actions/runner/pull/2814
|
||||||
|
* Add references to the firewall requirements docs by @paveliak in https://github.com/actions/runner/pull/2815
|
||||||
|
* Create automated workflow that will auto-generate dotnet sdk patches by @vanZeben in https://github.com/actions/runner/pull/2776
|
||||||
|
* Fixes minor issues with using proper output varaibles by @vanZeben in https://github.com/actions/runner/pull/2818
|
||||||
|
* Throw NonRetryableException on GetNextMessage from broker as needed. by @TingluoHuang in https://github.com/actions/runner/pull/2828
|
||||||
|
* Mark action download failures as infra failures by @cory-miller in https://github.com/actions/runner/pull/2827
|
||||||
|
|
||||||
## Misc
|
## New Contributors
|
||||||
- Pass timeout in ExecutionContext instead of StepsRunner (#2714)
|
* @kyanny made their first contribution in https://github.com/actions/runner/pull/2741
|
||||||
- Return early on invalid_client OAuth exception (#2721)
|
* @ajschmidt8 made their first contribution in https://github.com/actions/runner/pull/2694
|
||||||
- Expose results service endpoint as environment variable (#2726)
|
|
||||||
- Update HTTPEventSourceListener to trace the right events (#2727)
|
**Full Changelog**: https://github.com/actions/runner/compare/v2.308.0...v2.309.0
|
||||||
- Change RunnerId/AgentId from int32 to uint64 (#2661)
|
|
||||||
- Configure stale bot for Runner (#2729)
|
|
||||||
- Add in dependabot security scanning/updates (#2743)
|
|
||||||
- Bump dotnet sdk to latest version (#2733)
|
|
||||||
- Switch from InnerException to ErrorCode on disableupdate check (#2718)
|
|
||||||
|
|
||||||
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
_Note: Actions Runner follows a progressive release policy, so the latest release might not be available to your enterprise, organization, or repository yet.
|
||||||
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
To confirm which version of the Actions Runner you should expect, please view the download instructions for your enterprise, organization, or repository.
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
2.308.0
|
<Update to ./src/runnerversion when creating release>
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
7b78ca2997fbe048642d3717ab7321cdd359752b97158f3c67eb3df8786e21d3
|
7539d33c35b0bc94ee67e3c0de1a6bac5ef89ce8e8efaa110131fa0520a54fb4
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
6f34c1d501c87c2e22c2278df7152999aca628c66ee4176d32325773487da6d7
|
d71a31f9a17e1a41d6e1edea596edfa68a0db5948ed160e86f2154a547f4dd10
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
921ca58050be56e0b84af05e544cab4a151cb66405e815e19c0e0928ef7313f5
|
3c2f700d8a995efe7895614ee07d9c7880f872d214b45983ad6163e1931870ab
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
50f5c147074fc4943b4198b2d9b57c5e94344ab21350b0880ec8e2b85d27152b
|
b2d85c95ecad13d352f4c7d31c64dbb0d9c6381b48fa5874c4c72a43a025a8a1
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
16269548335b1f2add41a409aa3558c56581b63f280a9a26956707b6370558bd
|
417d835c1a108619886b4bb5d25988cb6c138eb7b4c00320b1d9455c5630bff9
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
e4aa6003ec77a2b21f3021927fed48727bde379fafff300f39565ff2fff4dd87
|
8f35aaecfb53426ea10816442e23065142bab9dd0fb712a29e0fc471d13c44ac
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
16ab4c166c58bc4c5600ff055be7ce0a9bb0dd993388114a76efea51e4ea14cb
|
811c7debdfc54d074385b063b83c997e5360c8a9160cd20fe777713968370063
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
{
|
{
|
||||||
"printWidth": 80,
|
"printWidth": 80,
|
||||||
"tabWidth": 2,
|
"tabWidth": 2,
|
||||||
@@ -7,5 +8,12 @@
|
|||||||
"trailingComma": "none",
|
"trailingComma": "none",
|
||||||
"bracketSpacing": false,
|
"bracketSpacing": false,
|
||||||
"arrowParens": "avoid",
|
"arrowParens": "avoid",
|
||||||
"parser": "typescript"
|
"overrides": [
|
||||||
|
{
|
||||||
|
"files": "*.{js,ts,json}",
|
||||||
|
"options": {
|
||||||
|
"tabWidth": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
To compile this package (output will be stored in `Misc/layoutbin`) run `npm install && npm run all`.
|
To compile this package (output will be stored in `Misc/layoutbin`) run `npm install && npm run prepare && npm run all`.
|
||||||
|
|
||||||
> Note: this package also needs to be recompiled for dependabot PRs updating one of
|
When you commit changes to the JSON or Typescript file, the javascript binary will be automatically re-compiled and added to the latest commit.
|
||||||
> its dependencies.
|
|
||||||
|
|||||||
2226
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
2226
src/Misc/expressionFunc/hashFiles/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,9 @@
|
|||||||
"format-check": "prettier --check **/*.ts",
|
"format-check": "prettier --check **/*.ts",
|
||||||
"lint": "eslint src/**/*.ts",
|
"lint": "eslint src/**/*.ts",
|
||||||
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
"pack": "ncc build -o ../../layoutbin/hashFiles",
|
||||||
"all": "npm run build && npm run format && npm run lint && npm run pack"
|
"all": "npm run format && npm run lint && npm run build && npm run pack",
|
||||||
|
"prepare": "cd ../../../../ && husky install"
|
||||||
|
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -18,20 +20,32 @@
|
|||||||
"keywords": [
|
"keywords": [
|
||||||
"actions"
|
"actions"
|
||||||
],
|
],
|
||||||
|
"lint-staged": {
|
||||||
|
"*.md": [
|
||||||
|
"prettier --write",
|
||||||
|
"git add ."
|
||||||
|
],
|
||||||
|
"*.{ts,json}": [
|
||||||
|
"sh -c 'npm run all'",
|
||||||
|
"git add ."
|
||||||
|
]
|
||||||
|
},
|
||||||
"author": "GitHub Actions",
|
"author": "GitHub Actions",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/glob": "^0.1.0"
|
"@actions/glob": "^0.4.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^12.7.12",
|
"@types/node": "^20.5.6",
|
||||||
"@typescript-eslint/eslint-plugin": "^6.3.0",
|
"@typescript-eslint/eslint-plugin": "^6.4.0",
|
||||||
"@typescript-eslint/parser": "^6.3.0",
|
"@typescript-eslint/parser": "^6.4.1",
|
||||||
"@vercel/ncc": "^0.36.1",
|
"@vercel/ncc": "^0.36.1",
|
||||||
"eslint": "^8.11.0",
|
"eslint": "^8.47.0",
|
||||||
"eslint-plugin-github": "^4.9.2",
|
"eslint-plugin-github": "^4.9.2",
|
||||||
"eslint-plugin-prettier": "^5.0.0",
|
"eslint-plugin-prettier": "^5.0.0",
|
||||||
"prettier": "^3.0.1",
|
"prettier": "^3.0.1",
|
||||||
"typescript": "^5.1.6"
|
"typescript": "^5.2.2",
|
||||||
|
"husky": "^8.0.3",
|
||||||
|
"lint-staged": "^14.0.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -52,12 +52,13 @@ async function run(): Promise<void> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
run()
|
;(async () => {
|
||||||
.then(out => {
|
try {
|
||||||
|
const out = await run()
|
||||||
console.log(out)
|
console.log(out)
|
||||||
process.exit(0)
|
process.exit(0)
|
||||||
})
|
} catch (err) {
|
||||||
.catch(err => {
|
|
||||||
console.error(err)
|
console.error(err)
|
||||||
process.exit(1)
|
process.exit(1)
|
||||||
})
|
}
|
||||||
|
})()
|
||||||
|
|||||||
@@ -6,6 +6,29 @@
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||||
|
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||||
|
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||||
|
}
|
||||||
|
Object.defineProperty(o, k2, desc);
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
@@ -22,13 +45,6 @@ var __asyncValues = (this && this.__asyncValues) || function (o) {
|
|||||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||||
};
|
};
|
||||||
var __importStar = (this && this.__importStar) || function (mod) {
|
|
||||||
if (mod && mod.__esModule) return mod;
|
|
||||||
var result = {};
|
|
||||||
if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k];
|
|
||||||
result["default"] = mod;
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const crypto = __importStar(__nccwpck_require__(6113));
|
const crypto = __importStar(__nccwpck_require__(6113));
|
||||||
const fs = __importStar(__nccwpck_require__(7147));
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
@@ -37,7 +53,7 @@ const path = __importStar(__nccwpck_require__(1017));
|
|||||||
const stream = __importStar(__nccwpck_require__(2781));
|
const stream = __importStar(__nccwpck_require__(2781));
|
||||||
const util = __importStar(__nccwpck_require__(3837));
|
const util = __importStar(__nccwpck_require__(3837));
|
||||||
function run() {
|
function run() {
|
||||||
var e_1, _a;
|
var _a, e_1, _b, _c;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// arg0 -> node
|
// arg0 -> node
|
||||||
// arg1 -> hashFiles.js
|
// arg1 -> hashFiles.js
|
||||||
@@ -56,8 +72,10 @@ function run() {
|
|||||||
let count = 0;
|
let count = 0;
|
||||||
const globber = yield glob.create(matchPatterns, { followSymbolicLinks });
|
const globber = yield glob.create(matchPatterns, { followSymbolicLinks });
|
||||||
try {
|
try {
|
||||||
for (var _b = __asyncValues(globber.globGenerator()), _c; _c = yield _b.next(), !_c.done;) {
|
for (var _d = true, _e = __asyncValues(globber.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) {
|
||||||
const file = _c.value;
|
_c = _f.value;
|
||||||
|
_d = false;
|
||||||
|
const file = _c;
|
||||||
console.log(file);
|
console.log(file);
|
||||||
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||||
console.log(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
console.log(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
||||||
@@ -80,7 +98,7 @@ function run() {
|
|||||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
finally {
|
finally {
|
||||||
try {
|
try {
|
||||||
if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b);
|
if (!_d && !_a && (_b = _e.return)) yield _b.call(_e);
|
||||||
}
|
}
|
||||||
finally { if (e_1) throw e_1.error; }
|
finally { if (e_1) throw e_1.error; }
|
||||||
}
|
}
|
||||||
@@ -94,15 +112,18 @@ function run() {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
run()
|
;
|
||||||
.then(out => {
|
(() => __awaiter(void 0, void 0, void 0, function* () {
|
||||||
|
try {
|
||||||
|
const out = yield run();
|
||||||
console.log(out);
|
console.log(out);
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
})
|
}
|
||||||
.catch(err => {
|
catch (err) {
|
||||||
console.error(err);
|
console.error(err);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
}
|
||||||
|
}))();
|
||||||
|
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@@ -246,7 +267,6 @@ const file_command_1 = __nccwpck_require__(717);
|
|||||||
const utils_1 = __nccwpck_require__(5278);
|
const utils_1 = __nccwpck_require__(5278);
|
||||||
const os = __importStar(__nccwpck_require__(2037));
|
const os = __importStar(__nccwpck_require__(2037));
|
||||||
const path = __importStar(__nccwpck_require__(1017));
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const uuid_1 = __nccwpck_require__(5840);
|
|
||||||
const oidc_utils_1 = __nccwpck_require__(8041);
|
const oidc_utils_1 = __nccwpck_require__(8041);
|
||||||
/**
|
/**
|
||||||
* The code to exit an action
|
* The code to exit an action
|
||||||
@@ -276,21 +296,10 @@ function exportVariable(name, val) {
|
|||||||
process.env[name] = convertedVal;
|
process.env[name] = convertedVal;
|
||||||
const filePath = process.env['GITHUB_ENV'] || '';
|
const filePath = process.env['GITHUB_ENV'] || '';
|
||||||
if (filePath) {
|
if (filePath) {
|
||||||
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
|
||||||
// These should realistically never happen, but just in case someone finds a way to exploit uuid generation let's not allow keys or values that contain the delimiter.
|
|
||||||
if (name.includes(delimiter)) {
|
|
||||||
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
|
||||||
}
|
}
|
||||||
if (convertedVal.includes(delimiter)) {
|
|
||||||
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
|
||||||
}
|
|
||||||
const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`;
|
|
||||||
file_command_1.issueCommand('ENV', commandValue);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
command_1.issueCommand('set-env', { name }, convertedVal);
|
command_1.issueCommand('set-env', { name }, convertedVal);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
exports.exportVariable = exportVariable;
|
exports.exportVariable = exportVariable;
|
||||||
/**
|
/**
|
||||||
* Registers a secret which will get masked from logs
|
* Registers a secret which will get masked from logs
|
||||||
@@ -307,7 +316,7 @@ exports.setSecret = setSecret;
|
|||||||
function addPath(inputPath) {
|
function addPath(inputPath) {
|
||||||
const filePath = process.env['GITHUB_PATH'] || '';
|
const filePath = process.env['GITHUB_PATH'] || '';
|
||||||
if (filePath) {
|
if (filePath) {
|
||||||
file_command_1.issueCommand('PATH', inputPath);
|
file_command_1.issueFileCommand('PATH', inputPath);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
command_1.issueCommand('add-path', {}, inputPath);
|
command_1.issueCommand('add-path', {}, inputPath);
|
||||||
@@ -347,8 +356,11 @@ function getMultilineInput(name, options) {
|
|||||||
const inputs = getInput(name, options)
|
const inputs = getInput(name, options)
|
||||||
.split('\n')
|
.split('\n')
|
||||||
.filter(x => x !== '');
|
.filter(x => x !== '');
|
||||||
|
if (options && options.trimWhitespace === false) {
|
||||||
return inputs;
|
return inputs;
|
||||||
}
|
}
|
||||||
|
return inputs.map(input => input.trim());
|
||||||
|
}
|
||||||
exports.getMultilineInput = getMultilineInput;
|
exports.getMultilineInput = getMultilineInput;
|
||||||
/**
|
/**
|
||||||
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
|
* Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
|
||||||
@@ -380,8 +392,12 @@ exports.getBooleanInput = getBooleanInput;
|
|||||||
*/
|
*/
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
function setOutput(name, value) {
|
function setOutput(name, value) {
|
||||||
|
const filePath = process.env['GITHUB_OUTPUT'] || '';
|
||||||
|
if (filePath) {
|
||||||
|
return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
|
||||||
|
}
|
||||||
process.stdout.write(os.EOL);
|
process.stdout.write(os.EOL);
|
||||||
command_1.issueCommand('set-output', { name }, value);
|
command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
|
||||||
}
|
}
|
||||||
exports.setOutput = setOutput;
|
exports.setOutput = setOutput;
|
||||||
/**
|
/**
|
||||||
@@ -510,7 +526,11 @@ exports.group = group;
|
|||||||
*/
|
*/
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
function saveState(name, value) {
|
function saveState(name, value) {
|
||||||
command_1.issueCommand('save-state', { name }, value);
|
const filePath = process.env['GITHUB_STATE'] || '';
|
||||||
|
if (filePath) {
|
||||||
|
return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
|
||||||
|
}
|
||||||
|
command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
|
||||||
}
|
}
|
||||||
exports.saveState = saveState;
|
exports.saveState = saveState;
|
||||||
/**
|
/**
|
||||||
@@ -576,13 +596,14 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
exports.issueCommand = void 0;
|
exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
|
||||||
// We use any as a valid input type
|
// We use any as a valid input type
|
||||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||||
const fs = __importStar(__nccwpck_require__(7147));
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
const os = __importStar(__nccwpck_require__(2037));
|
const os = __importStar(__nccwpck_require__(2037));
|
||||||
|
const uuid_1 = __nccwpck_require__(5840);
|
||||||
const utils_1 = __nccwpck_require__(5278);
|
const utils_1 = __nccwpck_require__(5278);
|
||||||
function issueCommand(command, message) {
|
function issueFileCommand(command, message) {
|
||||||
const filePath = process.env[`GITHUB_${command}`];
|
const filePath = process.env[`GITHUB_${command}`];
|
||||||
if (!filePath) {
|
if (!filePath) {
|
||||||
throw new Error(`Unable to find environment variable for file command ${command}`);
|
throw new Error(`Unable to find environment variable for file command ${command}`);
|
||||||
@@ -594,7 +615,22 @@ function issueCommand(command, message) {
|
|||||||
encoding: 'utf8'
|
encoding: 'utf8'
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.issueCommand = issueCommand;
|
exports.issueFileCommand = issueFileCommand;
|
||||||
|
function prepareKeyValueMessage(key, value) {
|
||||||
|
const delimiter = `ghadelimiter_${uuid_1.v4()}`;
|
||||||
|
const convertedValue = utils_1.toCommandValue(value);
|
||||||
|
// These should realistically never happen, but just in case someone finds a
|
||||||
|
// way to exploit uuid generation let's not allow keys or values that contain
|
||||||
|
// the delimiter.
|
||||||
|
if (key.includes(delimiter)) {
|
||||||
|
throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
|
||||||
|
}
|
||||||
|
if (convertedValue.includes(delimiter)) {
|
||||||
|
throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
|
||||||
|
}
|
||||||
|
return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
|
||||||
|
}
|
||||||
|
exports.prepareKeyValueMessage = prepareKeyValueMessage;
|
||||||
//# sourceMappingURL=file-command.js.map
|
//# sourceMappingURL=file-command.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@@ -1100,7 +1136,9 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
|
|||||||
});
|
});
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.hashFiles = exports.create = void 0;
|
||||||
const internal_globber_1 = __nccwpck_require__(8298);
|
const internal_globber_1 = __nccwpck_require__(8298);
|
||||||
|
const internal_hash_files_1 = __nccwpck_require__(2448);
|
||||||
/**
|
/**
|
||||||
* Constructs a globber
|
* Constructs a globber
|
||||||
*
|
*
|
||||||
@@ -1113,17 +1151,56 @@ function create(patterns, options) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
exports.create = create;
|
exports.create = create;
|
||||||
|
/**
|
||||||
|
* Computes the sha256 hash of a glob
|
||||||
|
*
|
||||||
|
* @param patterns Patterns separated by newlines
|
||||||
|
* @param currentWorkspace Workspace used when matching files
|
||||||
|
* @param options Glob options
|
||||||
|
* @param verbose Enables verbose logging
|
||||||
|
*/
|
||||||
|
function hashFiles(patterns, currentWorkspace = '', options, verbose = false) {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
let followSymbolicLinks = true;
|
||||||
|
if (options && typeof options.followSymbolicLinks === 'boolean') {
|
||||||
|
followSymbolicLinks = options.followSymbolicLinks;
|
||||||
|
}
|
||||||
|
const globber = yield create(patterns, { followSymbolicLinks });
|
||||||
|
return internal_hash_files_1.hashFiles(globber, currentWorkspace, verbose);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.hashFiles = hashFiles;
|
||||||
//# sourceMappingURL=glob.js.map
|
//# sourceMappingURL=glob.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1026:
|
/***/ 1026:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const core = __nccwpck_require__(2186);
|
exports.getOptions = void 0;
|
||||||
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
/**
|
/**
|
||||||
* Returns a copy with defaults filled in.
|
* Returns a copy with defaults filled in.
|
||||||
*/
|
*/
|
||||||
@@ -1131,6 +1208,7 @@ function getOptions(copy) {
|
|||||||
const result = {
|
const result = {
|
||||||
followSymbolicLinks: true,
|
followSymbolicLinks: true,
|
||||||
implicitDescendants: true,
|
implicitDescendants: true,
|
||||||
|
matchDirectories: true,
|
||||||
omitBrokenSymbolicLinks: true
|
omitBrokenSymbolicLinks: true
|
||||||
};
|
};
|
||||||
if (copy) {
|
if (copy) {
|
||||||
@@ -1142,6 +1220,10 @@ function getOptions(copy) {
|
|||||||
result.implicitDescendants = copy.implicitDescendants;
|
result.implicitDescendants = copy.implicitDescendants;
|
||||||
core.debug(`implicitDescendants '${result.implicitDescendants}'`);
|
core.debug(`implicitDescendants '${result.implicitDescendants}'`);
|
||||||
}
|
}
|
||||||
|
if (typeof copy.matchDirectories === 'boolean') {
|
||||||
|
result.matchDirectories = copy.matchDirectories;
|
||||||
|
core.debug(`matchDirectories '${result.matchDirectories}'`);
|
||||||
|
}
|
||||||
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
|
if (typeof copy.omitBrokenSymbolicLinks === 'boolean') {
|
||||||
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks;
|
||||||
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`);
|
||||||
@@ -1159,6 +1241,25 @@ exports.getOptions = getOptions;
|
|||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
@@ -1188,11 +1289,12 @@ var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _ar
|
|||||||
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); }
|
||||||
};
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const core = __nccwpck_require__(2186);
|
exports.DefaultGlobber = void 0;
|
||||||
const fs = __nccwpck_require__(7147);
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
const globOptionsHelper = __nccwpck_require__(1026);
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
const path = __nccwpck_require__(1017);
|
const globOptionsHelper = __importStar(__nccwpck_require__(1026));
|
||||||
const patternHelper = __nccwpck_require__(9005);
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
|
const patternHelper = __importStar(__nccwpck_require__(9005));
|
||||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||||
const internal_pattern_1 = __nccwpck_require__(4536);
|
const internal_pattern_1 = __nccwpck_require__(4536);
|
||||||
const internal_search_state_1 = __nccwpck_require__(9117);
|
const internal_search_state_1 = __nccwpck_require__(9117);
|
||||||
@@ -1238,7 +1340,7 @@ class DefaultGlobber {
|
|||||||
if (options.implicitDescendants &&
|
if (options.implicitDescendants &&
|
||||||
(pattern.trailingSeparator ||
|
(pattern.trailingSeparator ||
|
||||||
pattern.segments[pattern.segments.length - 1] !== '**')) {
|
pattern.segments[pattern.segments.length - 1] !== '**')) {
|
||||||
patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**')));
|
patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**')));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Push the search paths
|
// Push the search paths
|
||||||
@@ -1281,7 +1383,7 @@ class DefaultGlobber {
|
|||||||
// Directory
|
// Directory
|
||||||
if (stats.isDirectory()) {
|
if (stats.isDirectory()) {
|
||||||
// Matched
|
// Matched
|
||||||
if (match & internal_match_kind_1.MatchKind.Directory) {
|
if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) {
|
||||||
yield yield __await(item.path);
|
yield yield __await(item.path);
|
||||||
}
|
}
|
||||||
// Descend?
|
// Descend?
|
||||||
@@ -1376,12 +1478,117 @@ exports.DefaultGlobber = DefaultGlobber;
|
|||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
|
/***/ 2448:
|
||||||
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||||
|
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||||
|
return new (P || (P = Promise))(function (resolve, reject) {
|
||||||
|
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||||
|
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||||
|
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||||
|
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||||
|
});
|
||||||
|
};
|
||||||
|
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||||
|
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||||
|
var m = o[Symbol.asyncIterator], i;
|
||||||
|
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||||
|
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||||
|
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||||
|
};
|
||||||
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.hashFiles = void 0;
|
||||||
|
const crypto = __importStar(__nccwpck_require__(6113));
|
||||||
|
const core = __importStar(__nccwpck_require__(2186));
|
||||||
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
|
const stream = __importStar(__nccwpck_require__(2781));
|
||||||
|
const util = __importStar(__nccwpck_require__(3837));
|
||||||
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
|
function hashFiles(globber, currentWorkspace, verbose = false) {
|
||||||
|
var e_1, _a;
|
||||||
|
var _b;
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const writeDelegate = verbose ? core.info : core.debug;
|
||||||
|
let hasMatch = false;
|
||||||
|
const githubWorkspace = currentWorkspace
|
||||||
|
? currentWorkspace
|
||||||
|
: (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd();
|
||||||
|
const result = crypto.createHash('sha256');
|
||||||
|
let count = 0;
|
||||||
|
try {
|
||||||
|
for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) {
|
||||||
|
const file = _d.value;
|
||||||
|
writeDelegate(file);
|
||||||
|
if (!file.startsWith(`${githubWorkspace}${path.sep}`)) {
|
||||||
|
writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (fs.statSync(file).isDirectory()) {
|
||||||
|
writeDelegate(`Skip directory '${file}'.`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const hash = crypto.createHash('sha256');
|
||||||
|
const pipeline = util.promisify(stream.pipeline);
|
||||||
|
yield pipeline(fs.createReadStream(file), hash);
|
||||||
|
result.write(hash.digest());
|
||||||
|
count++;
|
||||||
|
if (!hasMatch) {
|
||||||
|
hasMatch = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||||
|
finally {
|
||||||
|
try {
|
||||||
|
if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c);
|
||||||
|
}
|
||||||
|
finally { if (e_1) throw e_1.error; }
|
||||||
|
}
|
||||||
|
result.end();
|
||||||
|
if (hasMatch) {
|
||||||
|
writeDelegate(`Found ${count} files to hash.`);
|
||||||
|
return result.digest('hex');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
writeDelegate(`No matches found for glob`);
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.hashFiles = hashFiles;
|
||||||
|
//# sourceMappingURL=internal-hash-files.js.map
|
||||||
|
|
||||||
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1063:
|
/***/ 1063:
|
||||||
/***/ ((__unused_webpack_module, exports) => {
|
/***/ ((__unused_webpack_module, exports) => {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.MatchKind = void 0;
|
||||||
/**
|
/**
|
||||||
* Indicates whether a pattern matches a path
|
* Indicates whether a pattern matches a path
|
||||||
*/
|
*/
|
||||||
@@ -1401,13 +1608,36 @@ var MatchKind;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 1849:
|
/***/ 1849:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const assert = __nccwpck_require__(9491);
|
exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0;
|
||||||
const path = __nccwpck_require__(1017);
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
|
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
/**
|
/**
|
||||||
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
* Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths.
|
||||||
@@ -1447,8 +1677,8 @@ exports.dirname = dirname;
|
|||||||
* or `C:` are expanded based on the current working directory.
|
* or `C:` are expanded based on the current working directory.
|
||||||
*/
|
*/
|
||||||
function ensureAbsoluteRoot(root, itemPath) {
|
function ensureAbsoluteRoot(root, itemPath) {
|
||||||
assert(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`);
|
||||||
assert(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||||
// Already rooted
|
// Already rooted
|
||||||
if (hasAbsoluteRoot(itemPath)) {
|
if (hasAbsoluteRoot(itemPath)) {
|
||||||
return itemPath;
|
return itemPath;
|
||||||
@@ -1458,7 +1688,7 @@ function ensureAbsoluteRoot(root, itemPath) {
|
|||||||
// Check for itemPath like C: or C:foo
|
// Check for itemPath like C: or C:foo
|
||||||
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) {
|
||||||
let cwd = process.cwd();
|
let cwd = process.cwd();
|
||||||
assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||||
// Drive letter matches cwd? Expand to cwd
|
// Drive letter matches cwd? Expand to cwd
|
||||||
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) {
|
||||||
// Drive only, e.g. C:
|
// Drive only, e.g. C:
|
||||||
@@ -1483,11 +1713,11 @@ function ensureAbsoluteRoot(root, itemPath) {
|
|||||||
// Check for itemPath like \ or \foo
|
// Check for itemPath like \ or \foo
|
||||||
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) {
|
||||||
const cwd = process.cwd();
|
const cwd = process.cwd();
|
||||||
assert(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`);
|
||||||
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
return `${cwd[0]}:\\${itemPath.substr(1)}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`);
|
||||||
// Otherwise ensure root ends with a separator
|
// Otherwise ensure root ends with a separator
|
||||||
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) {
|
||||||
// Intentionally empty
|
// Intentionally empty
|
||||||
@@ -1504,7 +1734,7 @@ exports.ensureAbsoluteRoot = ensureAbsoluteRoot;
|
|||||||
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
* `\\hello\share` and `C:\hello` (and using alternate separator).
|
||||||
*/
|
*/
|
||||||
function hasAbsoluteRoot(itemPath) {
|
function hasAbsoluteRoot(itemPath) {
|
||||||
assert(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`);
|
||||||
// Normalize separators
|
// Normalize separators
|
||||||
itemPath = normalizeSeparators(itemPath);
|
itemPath = normalizeSeparators(itemPath);
|
||||||
// Windows
|
// Windows
|
||||||
@@ -1521,7 +1751,7 @@ exports.hasAbsoluteRoot = hasAbsoluteRoot;
|
|||||||
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
* `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator).
|
||||||
*/
|
*/
|
||||||
function hasRoot(itemPath) {
|
function hasRoot(itemPath) {
|
||||||
assert(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`);
|
||||||
// Normalize separators
|
// Normalize separators
|
||||||
itemPath = normalizeSeparators(itemPath);
|
itemPath = normalizeSeparators(itemPath);
|
||||||
// Windows
|
// Windows
|
||||||
@@ -1583,14 +1813,37 @@ exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 6836:
|
/***/ 6836:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const assert = __nccwpck_require__(9491);
|
exports.Path = void 0;
|
||||||
const path = __nccwpck_require__(1017);
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const pathHelper = __nccwpck_require__(1849);
|
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||||
|
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
/**
|
/**
|
||||||
* Helper class for parsing paths into segments
|
* Helper class for parsing paths into segments
|
||||||
@@ -1604,7 +1857,7 @@ class Path {
|
|||||||
this.segments = [];
|
this.segments = [];
|
||||||
// String
|
// String
|
||||||
if (typeof itemPath === 'string') {
|
if (typeof itemPath === 'string') {
|
||||||
assert(itemPath, `Parameter 'itemPath' must not be empty`);
|
assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`);
|
||||||
// Normalize slashes and trim unnecessary trailing slash
|
// Normalize slashes and trim unnecessary trailing slash
|
||||||
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
itemPath = pathHelper.safeTrimTrailingSeparator(itemPath);
|
||||||
// Not rooted
|
// Not rooted
|
||||||
@@ -1631,24 +1884,24 @@ class Path {
|
|||||||
// Array
|
// Array
|
||||||
else {
|
else {
|
||||||
// Must not be empty
|
// Must not be empty
|
||||||
assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`);
|
||||||
// Each segment
|
// Each segment
|
||||||
for (let i = 0; i < itemPath.length; i++) {
|
for (let i = 0; i < itemPath.length; i++) {
|
||||||
let segment = itemPath[i];
|
let segment = itemPath[i];
|
||||||
// Must not be empty
|
// Must not be empty
|
||||||
assert(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`);
|
||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
segment = pathHelper.normalizeSeparators(itemPath[i]);
|
||||||
// Root segment
|
// Root segment
|
||||||
if (i === 0 && pathHelper.hasRoot(segment)) {
|
if (i === 0 && pathHelper.hasRoot(segment)) {
|
||||||
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
segment = pathHelper.safeTrimTrailingSeparator(segment);
|
||||||
assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`);
|
||||||
this.segments.push(segment);
|
this.segments.push(segment);
|
||||||
}
|
}
|
||||||
// All other segments
|
// All other segments
|
||||||
else {
|
else {
|
||||||
// Must not contain slash
|
// Must not contain slash
|
||||||
assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`);
|
||||||
this.segments.push(segment);
|
this.segments.push(segment);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1680,12 +1933,32 @@ exports.Path = Path;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 9005:
|
/***/ 9005:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const pathHelper = __nccwpck_require__(1849);
|
exports.partialMatch = exports.match = exports.getSearchPaths = void 0;
|
||||||
|
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
/**
|
/**
|
||||||
@@ -1761,21 +2034,44 @@ exports.partialMatch = partialMatch;
|
|||||||
/***/ }),
|
/***/ }),
|
||||||
|
|
||||||
/***/ 4536:
|
/***/ 4536:
|
||||||
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
|
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
|
||||||
|
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||||
|
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||||
|
}) : function(o, v) {
|
||||||
|
o["default"] = v;
|
||||||
|
});
|
||||||
|
var __importStar = (this && this.__importStar) || function (mod) {
|
||||||
|
if (mod && mod.__esModule) return mod;
|
||||||
|
var result = {};
|
||||||
|
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||||
|
__setModuleDefault(result, mod);
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||||
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
const assert = __nccwpck_require__(9491);
|
exports.Pattern = void 0;
|
||||||
const os = __nccwpck_require__(2037);
|
const os = __importStar(__nccwpck_require__(2037));
|
||||||
const path = __nccwpck_require__(1017);
|
const path = __importStar(__nccwpck_require__(1017));
|
||||||
const pathHelper = __nccwpck_require__(1849);
|
const pathHelper = __importStar(__nccwpck_require__(1849));
|
||||||
|
const assert_1 = __importDefault(__nccwpck_require__(9491));
|
||||||
const minimatch_1 = __nccwpck_require__(3973);
|
const minimatch_1 = __nccwpck_require__(3973);
|
||||||
const internal_match_kind_1 = __nccwpck_require__(1063);
|
const internal_match_kind_1 = __nccwpck_require__(1063);
|
||||||
const internal_path_1 = __nccwpck_require__(6836);
|
const internal_path_1 = __nccwpck_require__(6836);
|
||||||
const IS_WINDOWS = process.platform === 'win32';
|
const IS_WINDOWS = process.platform === 'win32';
|
||||||
class Pattern {
|
class Pattern {
|
||||||
constructor(patternOrNegate, segments) {
|
constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) {
|
||||||
/**
|
/**
|
||||||
* Indicates whether matches should be excluded from the result set
|
* Indicates whether matches should be excluded from the result set
|
||||||
*/
|
*/
|
||||||
@@ -1789,9 +2085,9 @@ class Pattern {
|
|||||||
else {
|
else {
|
||||||
// Convert to pattern
|
// Convert to pattern
|
||||||
segments = segments || [];
|
segments = segments || [];
|
||||||
assert(segments.length, `Parameter 'segments' must not empty`);
|
assert_1.default(segments.length, `Parameter 'segments' must not empty`);
|
||||||
const root = Pattern.getLiteral(segments[0]);
|
const root = Pattern.getLiteral(segments[0]);
|
||||||
assert(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`);
|
||||||
pattern = new internal_path_1.Path(segments).toString().trim();
|
pattern = new internal_path_1.Path(segments).toString().trim();
|
||||||
if (patternOrNegate) {
|
if (patternOrNegate) {
|
||||||
pattern = `!${pattern}`;
|
pattern = `!${pattern}`;
|
||||||
@@ -1803,7 +2099,7 @@ class Pattern {
|
|||||||
pattern = pattern.substr(1).trim();
|
pattern = pattern.substr(1).trim();
|
||||||
}
|
}
|
||||||
// Normalize slashes and ensures absolute root
|
// Normalize slashes and ensures absolute root
|
||||||
pattern = Pattern.fixupPattern(pattern);
|
pattern = Pattern.fixupPattern(pattern, homedir);
|
||||||
// Segments
|
// Segments
|
||||||
this.segments = new internal_path_1.Path(pattern).segments;
|
this.segments = new internal_path_1.Path(pattern).segments;
|
||||||
// Trailing slash indicates the pattern should only match directories, not regular files
|
// Trailing slash indicates the pattern should only match directories, not regular files
|
||||||
@@ -1819,6 +2115,7 @@ class Pattern {
|
|||||||
this.searchPath = new internal_path_1.Path(searchSegments).toString();
|
this.searchPath = new internal_path_1.Path(searchSegments).toString();
|
||||||
// Root RegExp (required when determining partial match)
|
// Root RegExp (required when determining partial match)
|
||||||
this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
|
this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : '');
|
||||||
|
this.isImplicitPattern = isImplicitPattern;
|
||||||
// Create minimatch
|
// Create minimatch
|
||||||
const minimatchOptions = {
|
const minimatchOptions = {
|
||||||
dot: true,
|
dot: true,
|
||||||
@@ -1840,11 +2137,11 @@ class Pattern {
|
|||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
itemPath = pathHelper.normalizeSeparators(itemPath);
|
itemPath = pathHelper.normalizeSeparators(itemPath);
|
||||||
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
|
// Append a trailing slash. Otherwise Minimatch will not match the directory immediately
|
||||||
// preceeding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
|
// preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns
|
||||||
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
|
// false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk.
|
||||||
if (!itemPath.endsWith(path.sep)) {
|
if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) {
|
||||||
// Note, this is safe because the constructor ensures the pattern has an absolute root.
|
// Note, this is safe because the constructor ensures the pattern has an absolute root.
|
||||||
// For example, formats like C: and C:foo on Windows are resolved to an aboslute root.
|
// For example, formats like C: and C:foo on Windows are resolved to an absolute root.
|
||||||
itemPath = `${itemPath}${path.sep}`;
|
itemPath = `${itemPath}${path.sep}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1882,15 +2179,15 @@ class Pattern {
|
|||||||
/**
|
/**
|
||||||
* Normalizes slashes and ensures absolute root
|
* Normalizes slashes and ensures absolute root
|
||||||
*/
|
*/
|
||||||
static fixupPattern(pattern) {
|
static fixupPattern(pattern, homedir) {
|
||||||
// Empty
|
// Empty
|
||||||
assert(pattern, 'pattern cannot be empty');
|
assert_1.default(pattern, 'pattern cannot be empty');
|
||||||
// Must not contain `.` segment, unless first segment
|
// Must not contain `.` segment, unless first segment
|
||||||
// Must not contain `..` segment
|
// Must not contain `..` segment
|
||||||
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x));
|
||||||
assert(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`);
|
||||||
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
// Must not contain globs in root, e.g. Windows UNC path \\foo\b*r
|
||||||
assert(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`);
|
||||||
// Normalize slashes
|
// Normalize slashes
|
||||||
pattern = pathHelper.normalizeSeparators(pattern);
|
pattern = pathHelper.normalizeSeparators(pattern);
|
||||||
// Replace leading `.` segment
|
// Replace leading `.` segment
|
||||||
@@ -1899,9 +2196,9 @@ class Pattern {
|
|||||||
}
|
}
|
||||||
// Replace leading `~` segment
|
// Replace leading `~` segment
|
||||||
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) {
|
||||||
const homedir = os.homedir();
|
homedir = homedir || os.homedir();
|
||||||
assert(homedir, 'Unable to determine HOME directory');
|
assert_1.default(homedir, 'Unable to determine HOME directory');
|
||||||
assert(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`);
|
||||||
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
pattern = Pattern.globEscape(homedir) + pattern.substr(1);
|
||||||
}
|
}
|
||||||
// Replace relative drive root, e.g. pattern is C: or C:foo
|
// Replace relative drive root, e.g. pattern is C: or C:foo
|
||||||
@@ -2004,6 +2301,7 @@ exports.Pattern = Pattern;
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
Object.defineProperty(exports, "__esModule", ({ value: true }));
|
||||||
|
exports.SearchState = void 0;
|
||||||
class SearchState {
|
class SearchState {
|
||||||
constructor(path, level) {
|
constructor(path, level) {
|
||||||
this.path = path;
|
this.path = path;
|
||||||
@@ -2232,6 +2530,19 @@ class HttpClientResponse {
|
|||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
readBodyBuffer() {
|
||||||
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
const chunks = [];
|
||||||
|
this.message.on('data', (chunk) => {
|
||||||
|
chunks.push(chunk);
|
||||||
|
});
|
||||||
|
this.message.on('end', () => {
|
||||||
|
resolve(Buffer.concat(chunks));
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
exports.HttpClientResponse = HttpClientResponse;
|
exports.HttpClientResponse = HttpClientResponse;
|
||||||
function isHttps(requestUrl) {
|
function isHttps(requestUrl) {
|
||||||
@@ -2736,8 +3047,14 @@ function getProxyUrl(reqUrl) {
|
|||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
if (proxyVar) {
|
if (proxyVar) {
|
||||||
|
try {
|
||||||
return new URL(proxyVar);
|
return new URL(proxyVar);
|
||||||
}
|
}
|
||||||
|
catch (_a) {
|
||||||
|
if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://'))
|
||||||
|
return new URL(`http://${proxyVar}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
else {
|
else {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
@@ -2747,6 +3064,10 @@ function checkBypass(reqUrl) {
|
|||||||
if (!reqUrl.hostname) {
|
if (!reqUrl.hostname) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
const reqHost = reqUrl.hostname;
|
||||||
|
if (isLoopbackAddress(reqHost)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
|
||||||
if (!noProxy) {
|
if (!noProxy) {
|
||||||
return false;
|
return false;
|
||||||
@@ -2772,13 +3093,24 @@ function checkBypass(reqUrl) {
|
|||||||
.split(',')
|
.split(',')
|
||||||
.map(x => x.trim().toUpperCase())
|
.map(x => x.trim().toUpperCase())
|
||||||
.filter(x => x)) {
|
.filter(x => x)) {
|
||||||
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
|
if (upperNoProxyItem === '*' ||
|
||||||
|
upperReqHosts.some(x => x === upperNoProxyItem ||
|
||||||
|
x.endsWith(`.${upperNoProxyItem}`) ||
|
||||||
|
(upperNoProxyItem.startsWith('.') &&
|
||||||
|
x.endsWith(`${upperNoProxyItem}`)))) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
exports.checkBypass = checkBypass;
|
exports.checkBypass = checkBypass;
|
||||||
|
function isLoopbackAddress(host) {
|
||||||
|
const hostLower = host.toLowerCase();
|
||||||
|
return (hostLower === 'localhost' ||
|
||||||
|
hostLower.startsWith('127.') ||
|
||||||
|
hostLower.startsWith('[::1]') ||
|
||||||
|
hostLower.startsWith('[0:0:0:0:0:0:0:1]'));
|
||||||
|
}
|
||||||
//# sourceMappingURL=proxy.js.map
|
//# sourceMappingURL=proxy.js.map
|
||||||
|
|
||||||
/***/ }),
|
/***/ }),
|
||||||
@@ -2817,6 +3149,9 @@ function range(a, b, str) {
|
|||||||
var i = ai;
|
var i = ai;
|
||||||
|
|
||||||
if (ai >= 0 && bi > 0) {
|
if (ai >= 0 && bi > 0) {
|
||||||
|
if(a===b) {
|
||||||
|
return [ai, bi];
|
||||||
|
}
|
||||||
begs = [];
|
begs = [];
|
||||||
left = str.length;
|
left = str.length;
|
||||||
|
|
||||||
|
|||||||
@@ -76,6 +76,7 @@ mscordaccore_amd64_amd64_6.0.522.21309.dll
|
|||||||
mscordaccore_arm64_arm64_6.0.522.21309.dll
|
mscordaccore_arm64_arm64_6.0.522.21309.dll
|
||||||
mscordaccore_amd64_amd64_6.0.1322.58009.dll
|
mscordaccore_amd64_amd64_6.0.1322.58009.dll
|
||||||
mscordaccore_amd64_amd64_6.0.2023.32017.dll
|
mscordaccore_amd64_amd64_6.0.2023.32017.dll
|
||||||
|
mscordaccore_amd64_amd64_6.0.2223.42425.dll
|
||||||
mscordbi.dll
|
mscordbi.dll
|
||||||
mscorlib.dll
|
mscorlib.dll
|
||||||
mscorrc.debug.dll
|
mscorrc.debug.dll
|
||||||
|
|||||||
@@ -203,7 +203,7 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
if (StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY")))
|
||||||
{
|
{
|
||||||
_trace.Warning($"Runner is running under insecure mode: HTTPS server certifcate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
|
_trace.Warning($"Runner is running under insecure mode: HTTPS server certificate validation has been turned off by GITHUB_ACTIONS_RUNNER_TLS_NO_VERIFY environment variable.");
|
||||||
}
|
}
|
||||||
|
|
||||||
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
|
var credFile = GetConfigFile(WellKnownConfigFile.Credentials);
|
||||||
|
|||||||
73
src/Runner.Common/RedirectMessageHandler.cs
Normal file
73
src/Runner.Common/RedirectMessageHandler.cs
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
using System;
|
||||||
|
using System.ComponentModel;
|
||||||
|
using System.Net;
|
||||||
|
using System.Net.Http;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
|
using GitHub.Services.Common;
|
||||||
|
|
||||||
|
namespace GitHub.Runner.Common
|
||||||
|
{
|
||||||
|
/// <summary>
|
||||||
|
/// Handles redirects for Http requests
|
||||||
|
/// </summary>
|
||||||
|
[EditorBrowsable(EditorBrowsableState.Never)]
|
||||||
|
public class RedirectMessageHandler : DelegatingHandler
|
||||||
|
{
|
||||||
|
public RedirectMessageHandler(ITraceWriter trace)
|
||||||
|
{
|
||||||
|
Trace = trace;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected override async Task<HttpResponseMessage> SendAsync(
|
||||||
|
HttpRequestMessage request,
|
||||||
|
CancellationToken cancellationToken)
|
||||||
|
{
|
||||||
|
HttpResponseMessage response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||||
|
|
||||||
|
if (response != null &&
|
||||||
|
IsRedirect(response.StatusCode) &&
|
||||||
|
response.Headers.Location != null)
|
||||||
|
{
|
||||||
|
Trace.Info($"Redirecting to '{response.Headers.Location}'.");
|
||||||
|
|
||||||
|
request = await CloneAsync(request, response.Headers.Location).ConfigureAwait(false);
|
||||||
|
|
||||||
|
response.Dispose();
|
||||||
|
|
||||||
|
response = await base.SendAsync(request, cancellationToken).ConfigureAwait(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static bool IsRedirect(HttpStatusCode statusCode)
|
||||||
|
{
|
||||||
|
return (int)statusCode >= 300 && (int)statusCode < 400;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static async Task<HttpRequestMessage> CloneAsync(HttpRequestMessage request, Uri requestUri)
|
||||||
|
{
|
||||||
|
var clone = new HttpRequestMessage(request.Method, requestUri)
|
||||||
|
{
|
||||||
|
Version = request.Version
|
||||||
|
};
|
||||||
|
|
||||||
|
request.Headers.ForEach(header => clone.Headers.TryAddWithoutValidation(header.Key, header.Value));
|
||||||
|
|
||||||
|
request.Options.ForEach(option => clone.Options.Set(new HttpRequestOptionsKey<object>(option.Key), option.Value));
|
||||||
|
|
||||||
|
if (request.Content != null)
|
||||||
|
{
|
||||||
|
clone.Content = new ByteArrayContent(await request.Content.ReadAsByteArrayAsync().ConfigureAwait(false));
|
||||||
|
|
||||||
|
request.Content.Headers.ForEach(header => clone.Content.Headers.TryAddWithoutValidation(header.Key, header.Value));
|
||||||
|
}
|
||||||
|
|
||||||
|
return clone;
|
||||||
|
}
|
||||||
|
|
||||||
|
private readonly ITraceWriter Trace;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk">
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net6.0</TargetFramework>
|
<TargetFramework>net6.0</TargetFramework>
|
||||||
@@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
|
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
||||||
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.4.0" />
|
<PackageReference Include="System.Text.Encoding.CodePages" Version="4.4.0" />
|
||||||
<PackageReference Include="System.Threading.Channels" Version="4.4.0" />
|
<PackageReference Include="System.Threading.Channels" Version="4.4.0" />
|
||||||
|
|||||||
@@ -15,12 +15,11 @@ namespace GitHub.Runner.Common
|
|||||||
[ServiceLocator(Default = typeof(RunnerDotcomServer))]
|
[ServiceLocator(Default = typeof(RunnerDotcomServer))]
|
||||||
public interface IRunnerDotcomServer : IRunnerService
|
public interface IRunnerDotcomServer : IRunnerService
|
||||||
{
|
{
|
||||||
Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName);
|
Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName);
|
||||||
|
|
||||||
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
||||||
|
Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey);
|
||||||
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
|
Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken);
|
||||||
|
|
||||||
string GetGitHubRequestId(HttpResponseHeaders headers);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public enum RequestType
|
public enum RequestType
|
||||||
@@ -42,7 +41,7 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public async Task<List<TaskAgent>> GetRunnersAsync(int runnerGroupId, string githubUrl, string githubToken, string agentName = null)
|
public async Task<List<TaskAgent>> GetRunnerByNameAsync(string githubUrl, string githubToken, string agentName)
|
||||||
{
|
{
|
||||||
var githubApiUrl = "";
|
var githubApiUrl = "";
|
||||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||||
@@ -52,11 +51,11 @@ namespace GitHub.Runner.Common
|
|||||||
// org runner
|
// org runner
|
||||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||||
{
|
{
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runner-groups/{runnerGroupId}/runners";
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/orgs/{path[0]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (path.Length == 2)
|
else if (path.Length == 2)
|
||||||
@@ -69,11 +68,11 @@ namespace GitHub.Runner.Common
|
|||||||
|
|
||||||
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
if (UrlUtil.IsHostedServer(gitHubUrlBuilder))
|
||||||
{
|
{
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://api.{gitHubUrlBuilder.Host}/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runner-groups/{runnerGroupId}/runners";
|
githubApiUrl = $"{gitHubUrlBuilder.Scheme}://{gitHubUrlBuilder.Host}/api/v3/{path[0]}/{path[1]}/actions/runners?name={Uri.EscapeDataString(agentName)}";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@@ -82,14 +81,8 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
|
|
||||||
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
var runnersList = await RetryRequest<ListRunnersResponse>(githubApiUrl, githubToken, RequestType.Get, 3, "Failed to get agents pools");
|
||||||
var agents = runnersList.ToTaskAgents();
|
|
||||||
|
|
||||||
if (string.IsNullOrEmpty(agentName))
|
return runnersList.ToTaskAgents();
|
||||||
{
|
|
||||||
return agents;
|
|
||||||
}
|
|
||||||
|
|
||||||
return agents.Where(x => string.Equals(x.Name, agentName, StringComparison.OrdinalIgnoreCase)).ToList();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
|
public async Task<List<TaskAgentPool>> GetRunnerGroupsAsync(string githubUrl, string githubToken)
|
||||||
@@ -137,6 +130,16 @@ namespace GitHub.Runner.Common
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
|
public async Task<DistributedTask.WebApi.Runner> AddRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
|
||||||
|
{
|
||||||
|
return await AddOrReplaceRunner(runnerGroupId, agent, githubUrl, githubToken, publicKey, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async Task<DistributedTask.WebApi.Runner> ReplaceRunnerAsync(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey)
|
||||||
|
{
|
||||||
|
return await AddOrReplaceRunner(runnerGroupId, agent, githubUrl, githubToken, publicKey, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async Task<DistributedTask.WebApi.Runner> AddOrReplaceRunner(int runnerGroupId, TaskAgent agent, string githubUrl, string githubToken, string publicKey, bool replace)
|
||||||
{
|
{
|
||||||
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
var gitHubUrlBuilder = new UriBuilder(githubUrl);
|
||||||
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
var path = gitHubUrlBuilder.Path.Split('/', '\\', StringSplitOptions.RemoveEmptyEntries);
|
||||||
@@ -159,9 +162,15 @@ namespace GitHub.Runner.Common
|
|||||||
{"updates_disabled", agent.DisableUpdate},
|
{"updates_disabled", agent.DisableUpdate},
|
||||||
{"ephemeral", agent.Ephemeral},
|
{"ephemeral", agent.Ephemeral},
|
||||||
{"labels", agent.Labels},
|
{"labels", agent.Labels},
|
||||||
{"public_key", publicKey}
|
{"public_key", publicKey},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (replace)
|
||||||
|
{
|
||||||
|
bodyObject.Add("runner_id", agent.Id);
|
||||||
|
bodyObject.Add("replace", replace);
|
||||||
|
}
|
||||||
|
|
||||||
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
|
var body = new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json");
|
||||||
|
|
||||||
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
|
return await RetryRequest<DistributedTask.WebApi.Runner>(githubApiUrl, githubToken, RequestType.Post, 3, "Failed to add agent", body);
|
||||||
@@ -195,7 +204,7 @@ namespace GitHub.Runner.Common
|
|||||||
if (response != null)
|
if (response != null)
|
||||||
{
|
{
|
||||||
responseStatus = response.StatusCode;
|
responseStatus = response.StatusCode;
|
||||||
var githubRequestId = GetGitHubRequestId(response.Headers);
|
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||||
|
|
||||||
if (response.IsSuccessStatusCode)
|
if (response.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
@@ -224,14 +233,5 @@ namespace GitHub.Runner.Common
|
|||||||
await Task.Delay(backOff);
|
await Task.Delay(backOff);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public string GetGitHubRequestId(HttpResponseHeaders headers)
|
|
||||||
{
|
|
||||||
if (headers.TryGetValues("x-github-request-id", out var headerValues))
|
|
||||||
{
|
|
||||||
return headerValues.FirstOrDefault();
|
|
||||||
}
|
|
||||||
return string.Empty;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -108,7 +108,7 @@ namespace GitHub.Runner.Listener
|
|||||||
|
|
||||||
if (!IsGetNextMessageExceptionRetriable(ex))
|
if (!IsGetNextMessageExceptionRetriable(ex))
|
||||||
{
|
{
|
||||||
throw;
|
throw new NonRetryableException("Get next message failed with non-retryable error.", ex);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -244,7 +244,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
List<TaskAgent> agents;
|
List<TaskAgent> agents;
|
||||||
if (runnerSettings.UseV2Flow)
|
if (runnerSettings.UseV2Flow)
|
||||||
{
|
{
|
||||||
agents = await _dotcomServer.GetRunnersAsync(runnerSettings.PoolId, runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
|
agents = await _dotcomServer.GetRunnerByNameAsync(runnerSettings.GitHubUrl, registerToken, runnerSettings.AgentName);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -262,8 +262,24 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate, command.NoDefaultLabels);
|
agent = UpdateExistingAgent(agent, publicKey, userLabels, runnerSettings.Ephemeral, command.DisableUpdate, command.NoDefaultLabels);
|
||||||
|
|
||||||
try
|
try
|
||||||
|
{
|
||||||
|
if (runnerSettings.UseV2Flow)
|
||||||
|
{
|
||||||
|
var runner = await _dotcomServer.ReplaceRunnerAsync(runnerSettings.PoolId, agent, runnerSettings.GitHubUrl, registerToken, publicKeyXML);
|
||||||
|
runnerSettings.ServerUrlV2 = runner.RunnerAuthorization.ServerUrl;
|
||||||
|
|
||||||
|
agent.Id = runner.Id;
|
||||||
|
agent.Authorization = new TaskAgentAuthorization()
|
||||||
|
{
|
||||||
|
AuthorizationUrl = runner.RunnerAuthorization.AuthorizationUrl,
|
||||||
|
ClientId = new Guid(runner.RunnerAuthorization.ClientId)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
else
|
||||||
{
|
{
|
||||||
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
|
agent = await _runnerServer.ReplaceAgentAsync(runnerSettings.PoolId, agent);
|
||||||
|
}
|
||||||
|
|
||||||
if (command.DisableUpdate &&
|
if (command.DisableUpdate &&
|
||||||
command.DisableUpdate != agent.DisableUpdate)
|
command.DisableUpdate != agent.DisableUpdate)
|
||||||
{
|
{
|
||||||
@@ -709,7 +725,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(string.Empty));
|
||||||
responseStatus = response.StatusCode;
|
responseStatus = response.StatusCode;
|
||||||
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
|
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||||
|
|
||||||
if (response.IsSuccessStatusCode)
|
if (response.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
@@ -772,7 +788,7 @@ namespace GitHub.Runner.Listener.Configuration
|
|||||||
{
|
{
|
||||||
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
var response = await httpClient.PostAsync(githubApiUrl, new StringContent(StringUtil.ConvertToJson(bodyObject), null, "application/json"));
|
||||||
responseStatus = response.StatusCode;
|
responseStatus = response.StatusCode;
|
||||||
var githubRequestId = _dotcomServer.GetGitHubRequestId(response.Headers);
|
var githubRequestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||||
|
|
||||||
if (response.IsSuccessStatusCode)
|
if (response.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk">
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
|
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net6.0</TargetFramework>
|
<TargetFramework>net6.0</TargetFramework>
|
||||||
@@ -19,7 +19,7 @@
|
|||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
|
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||||
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="4.4.0" />
|
<PackageReference Include="System.IO.FileSystem.AccessControl" Version="4.4.0" />
|
||||||
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
<PackageReference Include="System.Security.Cryptography.ProtectedData" Version="4.4.0" />
|
||||||
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
|
<PackageReference Include="System.ServiceProcess.ServiceController" Version="4.4.0" />
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
using System;
|
using System;
|
||||||
|
using System.Linq;
|
||||||
|
using System.Net.Http.Headers;
|
||||||
|
|
||||||
namespace GitHub.Runner.Sdk
|
namespace GitHub.Runner.Sdk
|
||||||
{
|
{
|
||||||
@@ -48,5 +50,15 @@ namespace GitHub.Runner.Sdk
|
|||||||
|
|
||||||
return credUri.Uri;
|
return credUri.Uri;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static string GetGitHubRequestId(HttpResponseHeaders headers)
|
||||||
|
{
|
||||||
|
if (headers != null &&
|
||||||
|
headers.TryGetValues("x-github-request-id", out var headerValues))
|
||||||
|
{
|
||||||
|
return headerValues.FirstOrDefault();
|
||||||
|
}
|
||||||
|
return string.Empty;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,15 +10,15 @@ using System.Text;
|
|||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Tasks;
|
using System.Threading.Tasks;
|
||||||
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
using GitHub.DistributedTask.ObjectTemplating.Tokens;
|
||||||
|
using GitHub.DistributedTask.WebApi;
|
||||||
using GitHub.Runner.Common;
|
using GitHub.Runner.Common;
|
||||||
using GitHub.Runner.Common.Util;
|
using GitHub.Runner.Common.Util;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using GitHub.Runner.Worker.Container;
|
using GitHub.Runner.Worker.Container;
|
||||||
using GitHub.Services.Common;
|
using GitHub.Services.Common;
|
||||||
using WebApi = GitHub.DistributedTask.WebApi;
|
|
||||||
using Pipelines = GitHub.DistributedTask.Pipelines;
|
using Pipelines = GitHub.DistributedTask.Pipelines;
|
||||||
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
|
using PipelineTemplateConstants = GitHub.DistributedTask.Pipelines.ObjectTemplating.PipelineTemplateConstants;
|
||||||
using GitHub.DistributedTask.WebApi;
|
using WebApi = GitHub.DistributedTask.WebApi;
|
||||||
|
|
||||||
namespace GitHub.Runner.Worker
|
namespace GitHub.Runner.Worker
|
||||||
{
|
{
|
||||||
@@ -115,6 +115,14 @@ namespace GitHub.Runner.Worker
|
|||||||
executionContext.Result = TaskResult.Failed;
|
executionContext.Result = TaskResult.Failed;
|
||||||
throw;
|
throw;
|
||||||
}
|
}
|
||||||
|
catch (InvalidActionArchiveException ex)
|
||||||
|
{
|
||||||
|
// Log the error and fail the PrepareActionsAsync Initialization.
|
||||||
|
Trace.Error($"Caught exception from PrepareActionsAsync Initialization: {ex}");
|
||||||
|
executionContext.InfrastructureError(ex.Message);
|
||||||
|
executionContext.Result = TaskResult.Failed;
|
||||||
|
throw;
|
||||||
|
}
|
||||||
if (!FeatureManager.IsContainerHooksEnabled(executionContext.Global.Variables))
|
if (!FeatureManager.IsContainerHooksEnabled(executionContext.Global.Variables))
|
||||||
{
|
{
|
||||||
if (state.ImagesToPull.Count > 0)
|
if (state.ImagesToPull.Count > 0)
|
||||||
@@ -835,6 +843,12 @@ namespace GitHub.Runner.Worker
|
|||||||
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
httpClient.DefaultRequestHeaders.UserAgent.AddRange(HostContext.UserAgents);
|
||||||
using (var response = await httpClient.GetAsync(link))
|
using (var response = await httpClient.GetAsync(link))
|
||||||
{
|
{
|
||||||
|
var requestId = UrlUtil.GetGitHubRequestId(response.Headers);
|
||||||
|
if (!string.IsNullOrEmpty(requestId))
|
||||||
|
{
|
||||||
|
Trace.Info($"Request URL: {link} X-GitHub-Request-Id: {requestId} Http Status: {response.StatusCode}");
|
||||||
|
}
|
||||||
|
|
||||||
if (response.IsSuccessStatusCode)
|
if (response.IsSuccessStatusCode)
|
||||||
{
|
{
|
||||||
using (var result = await response.Content.ReadAsStreamAsync())
|
using (var result = await response.Content.ReadAsStreamAsync())
|
||||||
@@ -849,7 +863,7 @@ namespace GitHub.Runner.Worker
|
|||||||
else if (response.StatusCode == HttpStatusCode.NotFound)
|
else if (response.StatusCode == HttpStatusCode.NotFound)
|
||||||
{
|
{
|
||||||
// It doesn't make sense to retry in this case, so just stop
|
// It doesn't make sense to retry in this case, so just stop
|
||||||
throw new ActionNotFoundException(new Uri(link));
|
throw new ActionNotFoundException(new Uri(link), requestId);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -901,7 +915,14 @@ namespace GitHub.Runner.Worker
|
|||||||
Directory.CreateDirectory(stagingDirectory);
|
Directory.CreateDirectory(stagingDirectory);
|
||||||
|
|
||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
|
try
|
||||||
|
{
|
||||||
ZipFile.ExtractToDirectory(archiveFile, stagingDirectory);
|
ZipFile.ExtractToDirectory(archiveFile, stagingDirectory);
|
||||||
|
}
|
||||||
|
catch (InvalidDataException e)
|
||||||
|
{
|
||||||
|
throw new InvalidActionArchiveException($"Can't un-zip archive file: {archiveFile}. action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. error: {e}.");
|
||||||
|
}
|
||||||
#else
|
#else
|
||||||
string tar = WhichUtil.Which("tar", require: true, trace: Trace);
|
string tar = WhichUtil.Which("tar", require: true, trace: Trace);
|
||||||
|
|
||||||
@@ -927,7 +948,7 @@ namespace GitHub.Runner.Worker
|
|||||||
int exitCode = await processInvoker.ExecuteAsync(stagingDirectory, tar, $"-xzf \"{archiveFile}\"", null, executionContext.CancellationToken);
|
int exitCode = await processInvoker.ExecuteAsync(stagingDirectory, tar, $"-xzf \"{archiveFile}\"", null, executionContext.CancellationToken);
|
||||||
if (exitCode != 0)
|
if (exitCode != 0)
|
||||||
{
|
{
|
||||||
throw new NotSupportedException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. return code: {exitCode}.");
|
throw new InvalidActionArchiveException($"Can't use 'tar -xzf' extract archive file: {archiveFile}. Action being checked out: {downloadInfo.NameWithOwner}@{downloadInfo.Ref}. return code: {exitCode}.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@@ -5,8 +5,8 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
public class ActionNotFoundException : Exception
|
public class ActionNotFoundException : Exception
|
||||||
{
|
{
|
||||||
public ActionNotFoundException(Uri actionUri)
|
public ActionNotFoundException(Uri actionUri, string requestId)
|
||||||
: base(FormatMessage(actionUri))
|
: base(FormatMessage(actionUri, requestId))
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -25,8 +25,13 @@ namespace GitHub.Runner.Worker
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
private static string FormatMessage(Uri actionUri)
|
private static string FormatMessage(Uri actionUri, string requestId)
|
||||||
{
|
{
|
||||||
|
if (!string.IsNullOrEmpty(requestId))
|
||||||
|
{
|
||||||
|
return $"An action could not be found at the URI '{actionUri}' ({requestId})";
|
||||||
|
}
|
||||||
|
|
||||||
return $"An action could not be found at the URI '{actionUri}'";
|
return $"An action could not be found at the URI '{actionUri}'";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
using System.Text.RegularExpressions;
|
using System.Text.RegularExpressions;
|
||||||
using System.Threading;
|
using System.Threading;
|
||||||
using System.Threading.Channels;
|
using System.Threading.Channels;
|
||||||
@@ -46,7 +47,9 @@ namespace GitHub.Runner.Worker.Container
|
|||||||
{
|
{
|
||||||
base.Initialize(hostContext);
|
base.Initialize(hostContext);
|
||||||
DockerPath = WhichUtil.Which("docker", true, Trace);
|
DockerPath = WhichUtil.Which("docker", true, Trace);
|
||||||
DockerInstanceLabel = IOUtil.GetSha256Hash(hostContext.GetDirectory(WellKnownDirectory.Root)).Substring(0, 6);
|
string path = Path.Combine(HostContext.GetDirectory(WellKnownDirectory.Root), ".runner");
|
||||||
|
string json = File.ReadAllText(path, Encoding.UTF8);
|
||||||
|
DockerInstanceLabel = IOUtil.GetSha256Hash(json).Substring(0, 6);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async Task<DockerVersion> DockerVersion(IExecutionContext context)
|
public async Task<DockerVersion> DockerVersion(IExecutionContext context)
|
||||||
|
|||||||
@@ -141,6 +141,28 @@ namespace GitHub.Runner.Worker
|
|||||||
var pairs = new EnvFileKeyValuePairs(context, filePath);
|
var pairs = new EnvFileKeyValuePairs(context, filePath);
|
||||||
foreach (var pair in pairs)
|
foreach (var pair in pairs)
|
||||||
{
|
{
|
||||||
|
var isBlocked = false;
|
||||||
|
foreach (var blocked in _setEnvBlockList)
|
||||||
|
{
|
||||||
|
if (string.Equals(blocked, pair.Key, StringComparison.OrdinalIgnoreCase))
|
||||||
|
{
|
||||||
|
// Log Telemetry and let user know they shouldn't do this
|
||||||
|
var issue = new Issue()
|
||||||
|
{
|
||||||
|
Type = IssueType.Error,
|
||||||
|
Message = $"Can't store {blocked} output parameter using '$GITHUB_ENV' command."
|
||||||
|
};
|
||||||
|
issue.Data[Constants.Runner.InternalTelemetryIssueDataKey] = $"{Constants.Runner.UnsupportedCommand}_{pair.Key}";
|
||||||
|
context.AddIssue(issue, ExecutionContextLogOptions.Default);
|
||||||
|
|
||||||
|
isBlocked = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isBlocked)
|
||||||
|
{
|
||||||
|
continue;
|
||||||
|
}
|
||||||
SetEnvironmentVariable(context, pair.Key, pair.Value);
|
SetEnvironmentVariable(context, pair.Key, pair.Value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -154,6 +176,11 @@ namespace GitHub.Runner.Worker
|
|||||||
context.SetEnvContext(name, value);
|
context.SetEnvContext(name, value);
|
||||||
context.Debug($"{name}='{value}'");
|
context.Debug($"{name}='{value}'");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private string[] _setEnvBlockList =
|
||||||
|
{
|
||||||
|
"NODE_OPTIONS"
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public sealed class CreateStepSummaryCommand : RunnerService, IFileCommandExtension
|
public sealed class CreateStepSummaryCommand : RunnerService, IFileCommandExtension
|
||||||
@@ -322,9 +349,21 @@ namespace GitHub.Runner.Worker
|
|||||||
var equalsIndex = line.IndexOf("=", StringComparison.Ordinal);
|
var equalsIndex = line.IndexOf("=", StringComparison.Ordinal);
|
||||||
var heredocIndex = line.IndexOf("<<", StringComparison.Ordinal);
|
var heredocIndex = line.IndexOf("<<", StringComparison.Ordinal);
|
||||||
|
|
||||||
// Heredoc style NAME<<EOF (where EOF is typically randomly-generated Base64 and may include an '=' character)
|
// Normal style NAME=VALUE
|
||||||
// see https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings
|
if (equalsIndex >= 0 && (heredocIndex < 0 || equalsIndex < heredocIndex))
|
||||||
if (heredocIndex >= 0 && (equalsIndex < 0 || heredocIndex < equalsIndex))
|
{
|
||||||
|
var split = line.Split(new[] { '=' }, 2, StringSplitOptions.None);
|
||||||
|
if (string.IsNullOrEmpty(line))
|
||||||
|
{
|
||||||
|
throw new Exception($"Invalid format '{line}'. Name must not be empty");
|
||||||
|
}
|
||||||
|
|
||||||
|
key = split[0];
|
||||||
|
output = split[1];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Heredoc style NAME<<EOF
|
||||||
|
else if (heredocIndex >= 0 && (equalsIndex < 0 || heredocIndex < equalsIndex))
|
||||||
{
|
{
|
||||||
var split = line.Split(new[] { "<<" }, 2, StringSplitOptions.None);
|
var split = line.Split(new[] { "<<" }, 2, StringSplitOptions.None);
|
||||||
if (string.IsNullOrEmpty(split[0]) || string.IsNullOrEmpty(split[1]))
|
if (string.IsNullOrEmpty(split[0]) || string.IsNullOrEmpty(split[1]))
|
||||||
@@ -352,18 +391,6 @@ namespace GitHub.Runner.Worker
|
|||||||
|
|
||||||
output = endIndex > startIndex ? text.Substring(startIndex, endIndex - startIndex) : string.Empty;
|
output = endIndex > startIndex ? text.Substring(startIndex, endIndex - startIndex) : string.Empty;
|
||||||
}
|
}
|
||||||
// Normal style NAME=VALUE
|
|
||||||
else if (equalsIndex >= 0 && heredocIndex < 0)
|
|
||||||
{
|
|
||||||
var split = line.Split(new[] { '=' }, 2, StringSplitOptions.None);
|
|
||||||
if (string.IsNullOrEmpty(line))
|
|
||||||
{
|
|
||||||
throw new Exception($"Invalid format '{line}'. Name must not be empty");
|
|
||||||
}
|
|
||||||
|
|
||||||
key = split[0];
|
|
||||||
output = split[1];
|
|
||||||
}
|
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
throw new Exception($"Invalid format '{line}'");
|
throw new Exception($"Invalid format '{line}'");
|
||||||
|
|||||||
@@ -84,7 +84,14 @@ namespace GitHub.Runner.Worker
|
|||||||
Trace.Info($"Creating job server with URL: {jobServerUrl}");
|
Trace.Info($"Creating job server with URL: {jobServerUrl}");
|
||||||
// jobServerQueue is the throttling reporter.
|
// jobServerQueue is the throttling reporter.
|
||||||
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
|
_jobServerQueue = HostContext.GetService<IJobServerQueue>();
|
||||||
VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, new DelegatingHandler[] { new ThrottlingReportHandler(_jobServerQueue) });
|
var delegatingHandlers = new List<DelegatingHandler>() { new ThrottlingReportHandler(_jobServerQueue) };
|
||||||
|
message.Variables.TryGetValue("Actions.EnableHttpRedirects", out VariableValue enableHttpRedirects);
|
||||||
|
if (StringUtil.ConvertToBoolean(enableHttpRedirects?.Value) &&
|
||||||
|
!StringUtil.ConvertToBoolean(Environment.GetEnvironmentVariable("GITHUB_ACTIONS_RUNNER_NO_HTTP_REDIRECTS")))
|
||||||
|
{
|
||||||
|
delegatingHandlers.Add(new RedirectMessageHandler(Trace));
|
||||||
|
}
|
||||||
|
VssConnection jobConnection = VssUtil.CreateConnection(jobServerUrl, jobServerCredential, delegatingHandlers);
|
||||||
await jobServer.ConnectAsync(jobConnection);
|
await jobServer.ConnectAsync(jobConnection);
|
||||||
|
|
||||||
_jobServerQueue.Start(message);
|
_jobServerQueue.Start(message);
|
||||||
|
|||||||
@@ -2516,4 +2516,23 @@ namespace GitHub.DistributedTask.WebApi
|
|||||||
{
|
{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Serializable]
|
||||||
|
public sealed class InvalidActionArchiveException : DistributedTaskException
|
||||||
|
{
|
||||||
|
public InvalidActionArchiveException(String message)
|
||||||
|
: base(message)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
public InvalidActionArchiveException(String message, Exception innerException)
|
||||||
|
: base(message, innerException)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
|
private InvalidActionArchiveException(SerializationInfo info, StreamingContext context)
|
||||||
|
: base(info, context)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ namespace GitHub.DistributedTask.WebApi
|
|||||||
|
|
||||||
public List<TaskAgent> ToTaskAgents()
|
public List<TaskAgent> ToTaskAgents()
|
||||||
{
|
{
|
||||||
return Runners.Select(runner => new TaskAgent() { Name = runner.Name }).ToList();
|
return Runners.Select(runner => new TaskAgent() { Id = runner.Id, Name = runner.Name }).ToList();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,8 @@
|
|||||||
|
|
||||||
<ItemGroup>
|
<ItemGroup>
|
||||||
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
<PackageReference Include="Microsoft.Win32.Registry" Version="4.4.0" />
|
||||||
<PackageReference Include="Newtonsoft.Json" Version="13.0.1" />
|
<PackageReference Include="Newtonsoft.Json" Version="13.0.3" />
|
||||||
<PackageReference Include="Microsoft.AspNet.WebApi.Client" Version="5.2.4" />
|
<PackageReference Include="Microsoft.AspNet.WebApi.Client" Version="5.2.9" />
|
||||||
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="5.2.1" />
|
<PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="5.2.1" />
|
||||||
<PackageReference Include="System.Security.Cryptography.Cng" Version="4.4.0" />
|
<PackageReference Include="System.Security.Cryptography.Cng" Version="4.4.0" />
|
||||||
<PackageReference Include="System.Security.Cryptography.Pkcs" Version="4.4.0" />
|
<PackageReference Include="System.Security.Cryptography.Pkcs" Version="4.4.0" />
|
||||||
|
|||||||
@@ -115,7 +115,7 @@ namespace GitHub.Runner.Common.Tests.Listener.Configuration
|
|||||||
_runnerServer.Setup(x => x.AddAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
|
_runnerServer.Setup(x => x.AddAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
|
||||||
_runnerServer.Setup(x => x.ReplaceAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
|
_runnerServer.Setup(x => x.ReplaceAgentAsync(It.IsAny<int>(), It.IsAny<TaskAgent>())).Returns(Task.FromResult(expectedAgent));
|
||||||
|
|
||||||
_dotcomServer.Setup(x => x.GetRunnersAsync(It.IsAny<int>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedAgents));
|
_dotcomServer.Setup(x => x.GetRunnerByNameAsync(It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedAgents));
|
||||||
_dotcomServer.Setup(x => x.GetRunnerGroupsAsync(It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedPools));
|
_dotcomServer.Setup(x => x.GetRunnerGroupsAsync(It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedPools));
|
||||||
_dotcomServer.Setup(x => x.AddRunnerAsync(It.IsAny<int>(), It.IsAny<TaskAgent>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedRunner));
|
_dotcomServer.Setup(x => x.AddRunnerAsync(It.IsAny<int>(), It.IsAny<TaskAgent>(), It.IsAny<string>(), It.IsAny<string>(), It.IsAny<string>())).Returns(Task.FromResult(expectedRunner));
|
||||||
|
|
||||||
|
|||||||
@@ -1,21 +1,10 @@
|
|||||||
using System;
|
using System.IO;
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.IO;
|
|
||||||
using System.Runtime.CompilerServices;
|
|
||||||
using System.Text;
|
|
||||||
using Xunit;
|
using Xunit;
|
||||||
using GitHub.Runner.Sdk;
|
using GitHub.Runner.Sdk;
|
||||||
using System.Linq;
|
using System.Runtime.CompilerServices;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common.Tests
|
namespace GitHub.Runner.Common.Tests
|
||||||
{
|
{
|
||||||
public enum LineEndingType
|
|
||||||
{
|
|
||||||
Native,
|
|
||||||
Linux = 0x__0A,
|
|
||||||
Windows = 0x0D0A
|
|
||||||
}
|
|
||||||
|
|
||||||
public static class TestUtil
|
public static class TestUtil
|
||||||
{
|
{
|
||||||
private const string Src = "src";
|
private const string Src = "src";
|
||||||
@@ -52,24 +41,5 @@ namespace GitHub.Runner.Common.Tests
|
|||||||
Assert.True(Directory.Exists(testDataDir));
|
Assert.True(Directory.Exists(testDataDir));
|
||||||
return testDataDir;
|
return testDataDir;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void WriteContent(string path, string content, LineEndingType lineEnding = LineEndingType.Native)
|
|
||||||
{
|
|
||||||
WriteContent(path, Enumerable.Repeat(content, 1), lineEnding);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void WriteContent(string path, IEnumerable<string> content, LineEndingType lineEnding = LineEndingType.Native)
|
|
||||||
{
|
|
||||||
string newline = lineEnding switch
|
|
||||||
{
|
|
||||||
LineEndingType.Linux => "\n",
|
|
||||||
LineEndingType.Windows => "\r\n",
|
|
||||||
_ => Environment.NewLine,
|
|
||||||
};
|
|
||||||
var encoding = new UTF8Encoding(true); // Emit BOM
|
|
||||||
var contentStr = string.Join(newline, content);
|
|
||||||
File.WriteAllText(path, contentStr, encoding);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -96,6 +96,63 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
[Trait("Level", "L0")]
|
||||||
|
[Trait("Category", "Worker")]
|
||||||
|
public async void PrepareActions_DownloadActionFromDotCom_ZipFileError()
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// Arrange
|
||||||
|
Setup();
|
||||||
|
const string ActionName = "ownerName/sample-action";
|
||||||
|
var actions = new List<Pipelines.ActionStep>
|
||||||
|
{
|
||||||
|
new Pipelines.ActionStep()
|
||||||
|
{
|
||||||
|
Name = "action",
|
||||||
|
Id = Guid.NewGuid(),
|
||||||
|
Reference = new Pipelines.RepositoryPathReference()
|
||||||
|
{
|
||||||
|
Name = ActionName,
|
||||||
|
Ref = "main",
|
||||||
|
RepositoryType = "GitHub"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create a corrupted ZIP file for testing
|
||||||
|
var tempDir = _hc.GetDirectory(WellKnownDirectory.Temp);
|
||||||
|
Directory.CreateDirectory(tempDir);
|
||||||
|
var archiveFile = Path.Combine(tempDir, Path.GetRandomFileName());
|
||||||
|
using (var fileStream = new FileStream(archiveFile, FileMode.Create))
|
||||||
|
{
|
||||||
|
// Used Co-Pilot for magic bytes here. They represent the tar header and just need to be invalid for the CLI to break.
|
||||||
|
var buffer = new byte[] { 0x50, 0x4B, 0x03, 0x04, 0x0A, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00 };
|
||||||
|
fileStream.Write(buffer, 0, buffer.Length);
|
||||||
|
}
|
||||||
|
using var stream = File.OpenRead(archiveFile);
|
||||||
|
|
||||||
|
string dotcomArchiveLink = GetLinkToActionArchive("https://api.github.com", ActionName, "main");
|
||||||
|
var mockClientHandler = new Mock<HttpClientHandler>();
|
||||||
|
mockClientHandler.Protected().Setup<Task<HttpResponseMessage>>("SendAsync", ItExpr.Is<HttpRequestMessage>(m => m.RequestUri == new Uri(dotcomArchiveLink)), ItExpr.IsAny<CancellationToken>())
|
||||||
|
.ReturnsAsync(new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(stream) });
|
||||||
|
|
||||||
|
var mockHandlerFactory = new Mock<IHttpClientHandlerFactory>();
|
||||||
|
mockHandlerFactory.Setup(p => p.CreateClientHandler(It.IsAny<RunnerWebProxy>())).Returns(mockClientHandler.Object);
|
||||||
|
_hc.SetSingleton(mockHandlerFactory.Object);
|
||||||
|
|
||||||
|
_configurationStore.Object.GetSettings().IsHostedServer = true;
|
||||||
|
|
||||||
|
// Act + Assert
|
||||||
|
await Assert.ThrowsAsync<InvalidActionArchiveException>(async () => await _actionManager.PrepareActionsAsync(_ec.Object, actions));
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
Teardown();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
[Trait("Level", "L0")]
|
[Trait("Level", "L0")]
|
||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
|
|||||||
@@ -124,7 +124,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
"",
|
"",
|
||||||
"## This is more markdown content",
|
"## This is more markdown content",
|
||||||
};
|
};
|
||||||
TestUtil.WriteContent(stepSummaryFile, content);
|
WriteContent(stepSummaryFile, content);
|
||||||
|
|
||||||
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
|
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
|
||||||
_jobExecutionContext.Complete();
|
_jobExecutionContext.Complete();
|
||||||
@@ -153,7 +153,7 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
"",
|
"",
|
||||||
"# GITHUB_TOKEN ghs_verysecuretoken",
|
"# GITHUB_TOKEN ghs_verysecuretoken",
|
||||||
};
|
};
|
||||||
TestUtil.WriteContent(stepSummaryFile, content);
|
WriteContent(stepSummaryFile, content);
|
||||||
|
|
||||||
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
|
_createStepCommand.ProcessCommand(_executionContext.Object, stepSummaryFile, null);
|
||||||
|
|
||||||
@@ -167,6 +167,21 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void WriteContent(
|
||||||
|
string path,
|
||||||
|
List<string> content,
|
||||||
|
string newline = null)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrEmpty(newline))
|
||||||
|
{
|
||||||
|
newline = Environment.NewLine;
|
||||||
|
}
|
||||||
|
|
||||||
|
var encoding = new UTF8Encoding(true); // Emit BOM
|
||||||
|
var contentStr = string.Join(newline, content);
|
||||||
|
File.WriteAllText(path, contentStr, encoding);
|
||||||
|
}
|
||||||
|
|
||||||
private TestHostContext Setup([CallerMemberName] string name = "")
|
private TestHostContext Setup([CallerMemberName] string name = "")
|
||||||
{
|
{
|
||||||
var hostContext = new TestHostContext(this, name);
|
var hostContext = new TestHostContext(this, name);
|
||||||
|
|||||||
@@ -1,420 +0,0 @@
|
|||||||
using System;
|
|
||||||
using System.Collections.Generic;
|
|
||||||
using System.IO;
|
|
||||||
using System.Linq;
|
|
||||||
using System.Runtime.CompilerServices;
|
|
||||||
using GitHub.Runner.Common.Util;
|
|
||||||
using GitHub.Runner.Sdk;
|
|
||||||
using GitHub.Runner.Worker;
|
|
||||||
using Moq;
|
|
||||||
using Xunit;
|
|
||||||
using DTWebApi = GitHub.DistributedTask.WebApi;
|
|
||||||
|
|
||||||
namespace GitHub.Runner.Common.Tests.Worker
|
|
||||||
{
|
|
||||||
public abstract class FileCommandTestBase<T>
|
|
||||||
where T : IFileCommandExtension, new()
|
|
||||||
{
|
|
||||||
|
|
||||||
protected void TestDirectoryNotFound()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "directory-not-found", "env");
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(0, _store.Count);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestNotFound()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "file-not-found");
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(0, _store.Count);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestEmptyFile()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "empty-file");
|
|
||||||
var content = new List<string>();
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(0, _store.Count);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestSimple()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "simple");
|
|
||||||
var content = new List<string>
|
|
||||||
{
|
|
||||||
"MY_KEY=MY VALUE",
|
|
||||||
};
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(1, _store.Count);
|
|
||||||
Assert.Equal("MY VALUE", _store["MY_KEY"]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestSimple_SkipEmptyLines()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "simple");
|
|
||||||
var content = new List<string>
|
|
||||||
{
|
|
||||||
string.Empty,
|
|
||||||
"MY_KEY=my value",
|
|
||||||
string.Empty,
|
|
||||||
"MY_KEY_2=my second value",
|
|
||||||
string.Empty,
|
|
||||||
};
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(2, _store.Count);
|
|
||||||
Assert.Equal("my value", _store["MY_KEY"]);
|
|
||||||
Assert.Equal("my second value", _store["MY_KEY_2"]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestSimple_EmptyValue()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "simple-empty-value");
|
|
||||||
var content = new List<string>
|
|
||||||
{
|
|
||||||
"MY_KEY=",
|
|
||||||
};
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(1, _store.Count);
|
|
||||||
Assert.Equal(string.Empty, _store["MY_KEY"]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestSimple_MultipleValues()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "simple");
|
|
||||||
var content = new List<string>
|
|
||||||
{
|
|
||||||
"MY_KEY=my value",
|
|
||||||
"MY_KEY_2=",
|
|
||||||
"MY_KEY_3=my third value",
|
|
||||||
};
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(3, _store.Count);
|
|
||||||
Assert.Equal("my value", _store["MY_KEY"]);
|
|
||||||
Assert.Equal(string.Empty, _store["MY_KEY_2"]);
|
|
||||||
Assert.Equal("my third value", _store["MY_KEY_3"]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestSimple_SpecialCharacters()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "simple");
|
|
||||||
var content = new List<string>
|
|
||||||
{
|
|
||||||
"MY_KEY==abc",
|
|
||||||
"MY_KEY_2=def=ghi",
|
|
||||||
"MY_KEY_3=jkl=",
|
|
||||||
};
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(3, _store.Count);
|
|
||||||
Assert.Equal("=abc", _store["MY_KEY"]);
|
|
||||||
Assert.Equal("def=ghi", _store["MY_KEY_2"]);
|
|
||||||
Assert.Equal("jkl=", _store["MY_KEY_3"]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestHeredoc()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
|
||||||
var content = new List<string>
|
|
||||||
{
|
|
||||||
"MY_KEY<<EOF",
|
|
||||||
"line one",
|
|
||||||
"line two",
|
|
||||||
"line three",
|
|
||||||
"EOF",
|
|
||||||
};
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(1, _store.Count);
|
|
||||||
Assert.Equal($"line one{BREAK}line two{BREAK}line three", _store["MY_KEY"]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestHeredoc_EmptyValue()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
|
||||||
var content = new List<string>
|
|
||||||
{
|
|
||||||
"MY_KEY<<EOF",
|
|
||||||
"EOF",
|
|
||||||
};
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(1, _store.Count);
|
|
||||||
Assert.Equal(string.Empty, _store["MY_KEY"]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestHeredoc_SkipEmptyLines()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
|
||||||
var content = new List<string>
|
|
||||||
{
|
|
||||||
string.Empty,
|
|
||||||
"MY_KEY<<EOF",
|
|
||||||
"hello",
|
|
||||||
"world",
|
|
||||||
"EOF",
|
|
||||||
string.Empty,
|
|
||||||
"MY_KEY_2<<EOF",
|
|
||||||
"HELLO",
|
|
||||||
"AGAIN",
|
|
||||||
"EOF",
|
|
||||||
string.Empty,
|
|
||||||
};
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(2, _store.Count);
|
|
||||||
Assert.Equal($"hello{BREAK}world", _store["MY_KEY"]);
|
|
||||||
Assert.Equal($"HELLO{BREAK}AGAIN", _store["MY_KEY_2"]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestHeredoc_EdgeCases()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
|
||||||
var content = new List<string>
|
|
||||||
{
|
|
||||||
"MY_KEY_1<<EOF",
|
|
||||||
"hello",
|
|
||||||
string.Empty,
|
|
||||||
"three",
|
|
||||||
string.Empty,
|
|
||||||
"EOF",
|
|
||||||
"MY_KEY_2<<EOF",
|
|
||||||
"hello=two",
|
|
||||||
"EOF",
|
|
||||||
"MY_KEY_3<<EOF",
|
|
||||||
" EOF",
|
|
||||||
"EOF",
|
|
||||||
"MY_KEY_4<<EOF",
|
|
||||||
"EOF EOF",
|
|
||||||
"EOF",
|
|
||||||
};
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(4, _store.Count);
|
|
||||||
Assert.Equal($"hello{BREAK}{BREAK}three{BREAK}", _store["MY_KEY_1"]);
|
|
||||||
Assert.Equal($"hello=two", _store["MY_KEY_2"]);
|
|
||||||
Assert.Equal($" EOF", _store["MY_KEY_3"]);
|
|
||||||
Assert.Equal($"EOF EOF", _store["MY_KEY_4"]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestHeredoc_EndMarkerVariations(string validEndMarker)
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
|
||||||
string eof = validEndMarker;
|
|
||||||
var content = new List<string>
|
|
||||||
{
|
|
||||||
$"MY_KEY_1<<{eof}",
|
|
||||||
$"hello",
|
|
||||||
$"one",
|
|
||||||
$"{eof}",
|
|
||||||
$"MY_KEY_2<<{eof}",
|
|
||||||
$"hello=two",
|
|
||||||
$"{eof}",
|
|
||||||
$"MY_KEY_3<<{eof}",
|
|
||||||
$" {eof}",
|
|
||||||
$"{eof}",
|
|
||||||
$"MY_KEY_4<<{eof}",
|
|
||||||
$"{eof} {eof}",
|
|
||||||
$"{eof}",
|
|
||||||
};
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(4, _store.Count);
|
|
||||||
Assert.Equal($"hello{BREAK}one", _store["MY_KEY_1"]);
|
|
||||||
Assert.Equal($"hello=two", _store["MY_KEY_2"]);
|
|
||||||
Assert.Equal($" {eof}", _store["MY_KEY_3"]);
|
|
||||||
Assert.Equal($"{eof} {eof}", _store["MY_KEY_4"]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestHeredoc_EqualBeforeMultilineIndicator()
|
|
||||||
{
|
|
||||||
using var hostContext = Setup();
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
|
||||||
|
|
||||||
// Define a hypothetical injectable payload that just happens to contain the '=' character.
|
|
||||||
string contrivedGitHubIssueTitle = "Issue 999: Better handling for the `=` character";
|
|
||||||
|
|
||||||
// The docs recommend using randomly-generated EOF markers.
|
|
||||||
// Here's a randomly-generated base64 EOF marker that just happens to contain an '=' character. ('=' is a padding character in base64.)
|
|
||||||
// see https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings
|
|
||||||
string randomizedEOF = "khkIhPxsVA==";
|
|
||||||
var content = new List<string>
|
|
||||||
{
|
|
||||||
// In a real world scenario, "%INJECT%" might instead be something like "${{ github.event.issue.title }}"
|
|
||||||
$"PREFIX_%INJECT%<<{randomizedEOF}".Replace("%INJECT%", contrivedGitHubIssueTitle),
|
|
||||||
"RandomDataThatJustHappensToContainAnEquals=Character",
|
|
||||||
randomizedEOF,
|
|
||||||
};
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
var ex = Assert.Throws<Exception>(() => _fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null));
|
|
||||||
Assert.StartsWith("Invalid format", ex.Message);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestHeredoc_MissingNewLine()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
|
||||||
string content = "MY_KEY<<EOF line one line two line three EOF";
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
var ex = Assert.Throws<Exception>(() => _fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null));
|
|
||||||
Assert.Contains("Matching delimiter not found", ex.Message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestHeredoc_MissingNewLineMultipleLines()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
|
||||||
string multilineFragment = @"line one
|
|
||||||
line two
|
|
||||||
line three";
|
|
||||||
|
|
||||||
// Note that the final EOF does not appear on it's own line.
|
|
||||||
string content = $"MY_KEY<<EOF {multilineFragment} EOF";
|
|
||||||
TestUtil.WriteContent(stateFile, content);
|
|
||||||
var ex = Assert.Throws<Exception>(() => _fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null));
|
|
||||||
Assert.Contains("EOF marker missing new line", ex.Message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void TestHeredoc_PreservesNewline()
|
|
||||||
{
|
|
||||||
using (var hostContext = Setup())
|
|
||||||
{
|
|
||||||
var newline = "\n";
|
|
||||||
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
|
||||||
var content = new List<string>
|
|
||||||
{
|
|
||||||
"MY_KEY<<EOF",
|
|
||||||
"hello",
|
|
||||||
"world",
|
|
||||||
"EOF",
|
|
||||||
};
|
|
||||||
TestUtil.WriteContent(stateFile, content, LineEndingType.Linux);
|
|
||||||
_fileCmdExtension.ProcessCommand(_executionContext.Object, stateFile, null);
|
|
||||||
Assert.Equal(0, _issues.Count);
|
|
||||||
Assert.Equal(1, _store.Count);
|
|
||||||
Assert.Equal($"hello{newline}world", _store["MY_KEY"]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
protected TestHostContext Setup([CallerMemberName] string name = "")
|
|
||||||
{
|
|
||||||
_issues = new List<Tuple<DTWebApi.Issue, string>>();
|
|
||||||
|
|
||||||
var hostContext = new TestHostContext(this, name);
|
|
||||||
|
|
||||||
// Trace
|
|
||||||
_trace = hostContext.GetTrace();
|
|
||||||
|
|
||||||
// Directory for test data
|
|
||||||
var workDirectory = hostContext.GetDirectory(WellKnownDirectory.Work);
|
|
||||||
ArgUtil.NotNullOrEmpty(workDirectory, nameof(workDirectory));
|
|
||||||
Directory.CreateDirectory(workDirectory);
|
|
||||||
_rootDirectory = Path.Combine(workDirectory, nameof(T));
|
|
||||||
Directory.CreateDirectory(_rootDirectory);
|
|
||||||
|
|
||||||
// Execution context
|
|
||||||
_executionContext = new Mock<IExecutionContext>();
|
|
||||||
_executionContext.Setup(x => x.Global)
|
|
||||||
.Returns(new GlobalContext
|
|
||||||
{
|
|
||||||
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
|
|
||||||
WriteDebug = true,
|
|
||||||
});
|
|
||||||
_executionContext.Setup(x => x.AddIssue(It.IsAny<DTWebApi.Issue>(), It.IsAny<ExecutionContextLogOptions>()))
|
|
||||||
.Callback((DTWebApi.Issue issue, ExecutionContextLogOptions logOptions) =>
|
|
||||||
{
|
|
||||||
var resolvedMessage = issue.Message;
|
|
||||||
if (logOptions.WriteToLog && !string.IsNullOrEmpty(logOptions.LogMessageOverride))
|
|
||||||
{
|
|
||||||
resolvedMessage = logOptions.LogMessageOverride;
|
|
||||||
}
|
|
||||||
_issues.Add(new(issue, resolvedMessage));
|
|
||||||
_trace.Info($"Issue '{issue.Type}': {resolvedMessage}");
|
|
||||||
});
|
|
||||||
_executionContext.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>()))
|
|
||||||
.Callback((string tag, string message) =>
|
|
||||||
{
|
|
||||||
_trace.Info($"{tag}{message}");
|
|
||||||
});
|
|
||||||
|
|
||||||
_store = PostSetup();
|
|
||||||
|
|
||||||
_fileCmdExtension = new T();
|
|
||||||
_fileCmdExtension.Initialize(hostContext);
|
|
||||||
|
|
||||||
return hostContext;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected abstract IDictionary<string, string> PostSetup();
|
|
||||||
|
|
||||||
protected static readonly string BREAK = Environment.NewLine;
|
|
||||||
|
|
||||||
protected IFileCommandExtension _fileCmdExtension { get; private set; }
|
|
||||||
protected Mock<IExecutionContext> _executionContext { get; private set; }
|
|
||||||
protected List<Tuple<DTWebApi.Issue, string>> _issues { get; private set; }
|
|
||||||
protected IDictionary<string, string> _store { get; private set; }
|
|
||||||
protected string _rootDirectory { get; private set; }
|
|
||||||
protected ITraceWriter _trace { get; private set; }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,27 +1,44 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.Globalization;
|
||||||
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using System.Runtime.CompilerServices;
|
||||||
|
using GitHub.Runner.Common.Util;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
using GitHub.Runner.Worker;
|
using GitHub.Runner.Worker;
|
||||||
|
using GitHub.Runner.Worker.Container;
|
||||||
|
using GitHub.Runner.Worker.Handlers;
|
||||||
|
using Moq;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
using DTWebApi = GitHub.DistributedTask.WebApi;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common.Tests.Worker
|
namespace GitHub.Runner.Common.Tests.Worker
|
||||||
{
|
{
|
||||||
public sealed class SaveStateFileCommandL0 : FileCommandTestBase<SaveStateFileCommand>
|
public sealed class SaveStateFileCommandL0
|
||||||
{
|
{
|
||||||
|
private Mock<IExecutionContext> _executionContext;
|
||||||
protected override IDictionary<string, string> PostSetup()
|
private List<Tuple<DTWebApi.Issue, string>> _issues;
|
||||||
{
|
private string _rootDirectory;
|
||||||
var intraActionState = new Dictionary<string, string>();
|
private SaveStateFileCommand _saveStateFileCommand;
|
||||||
_executionContext.Setup(x => x.IntraActionState).Returns(intraActionState);
|
private Dictionary<string, string> _intraActionState;
|
||||||
return intraActionState;
|
private ITraceWriter _trace;
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
[Trait("Level", "L0")]
|
[Trait("Level", "L0")]
|
||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_DirectoryNotFound()
|
public void SaveStateFileCommand_DirectoryNotFound()
|
||||||
{
|
{
|
||||||
base.TestDirectoryNotFound();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "directory-not-found", "env");
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(0, _intraActionState.Count);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -29,7 +46,13 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_NotFound()
|
public void SaveStateFileCommand_NotFound()
|
||||||
{
|
{
|
||||||
base.TestNotFound();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "file-not-found");
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(0, _intraActionState.Count);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -37,7 +60,15 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_EmptyFile()
|
public void SaveStateFileCommand_EmptyFile()
|
||||||
{
|
{
|
||||||
base.TestEmptyFile();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "empty-file");
|
||||||
|
var content = new List<string>();
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(0, _intraActionState.Count);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -45,7 +76,19 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_Simple()
|
public void SaveStateFileCommand_Simple()
|
||||||
{
|
{
|
||||||
base.TestSimple();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_STATE=MY VALUE",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _intraActionState.Count);
|
||||||
|
Assert.Equal("MY VALUE", _intraActionState["MY_STATE"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -53,7 +96,24 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_Simple_SkipEmptyLines()
|
public void SaveStateFileCommand_Simple_SkipEmptyLines()
|
||||||
{
|
{
|
||||||
base.TestSimple_SkipEmptyLines();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
string.Empty,
|
||||||
|
"MY_STATE=my value",
|
||||||
|
string.Empty,
|
||||||
|
"MY_STATE_2=my second value",
|
||||||
|
string.Empty,
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(2, _intraActionState.Count);
|
||||||
|
Assert.Equal("my value", _intraActionState["MY_STATE"]);
|
||||||
|
Assert.Equal("my second value", _intraActionState["MY_STATE_2"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -61,7 +121,19 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_Simple_EmptyValue()
|
public void SaveStateFileCommand_Simple_EmptyValue()
|
||||||
{
|
{
|
||||||
base.TestSimple_EmptyValue();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "simple-empty-value");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_STATE=",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _intraActionState.Count);
|
||||||
|
Assert.Equal(string.Empty, _intraActionState["MY_STATE"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -69,7 +141,23 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_Simple_MultipleValues()
|
public void SaveStateFileCommand_Simple_MultipleValues()
|
||||||
{
|
{
|
||||||
base.TestSimple_MultipleValues();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_STATE=my value",
|
||||||
|
"MY_STATE_2=",
|
||||||
|
"MY_STATE_3=my third value",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(3, _intraActionState.Count);
|
||||||
|
Assert.Equal("my value", _intraActionState["MY_STATE"]);
|
||||||
|
Assert.Equal(string.Empty, _intraActionState["MY_STATE_2"]);
|
||||||
|
Assert.Equal("my third value", _intraActionState["MY_STATE_3"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -77,7 +165,23 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_Simple_SpecialCharacters()
|
public void SaveStateFileCommand_Simple_SpecialCharacters()
|
||||||
{
|
{
|
||||||
base.TestSimple_SpecialCharacters();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_STATE==abc",
|
||||||
|
"MY_STATE_2=def=ghi",
|
||||||
|
"MY_STATE_3=jkl=",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(3, _intraActionState.Count);
|
||||||
|
Assert.Equal("=abc", _intraActionState["MY_STATE"]);
|
||||||
|
Assert.Equal("def=ghi", _intraActionState["MY_STATE_2"]);
|
||||||
|
Assert.Equal("jkl=", _intraActionState["MY_STATE_3"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -85,7 +189,23 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_Heredoc()
|
public void SaveStateFileCommand_Heredoc()
|
||||||
{
|
{
|
||||||
base.TestHeredoc();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_STATE<<EOF",
|
||||||
|
"line one",
|
||||||
|
"line two",
|
||||||
|
"line three",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _intraActionState.Count);
|
||||||
|
Assert.Equal($"line one{Environment.NewLine}line two{Environment.NewLine}line three", _intraActionState["MY_STATE"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -93,7 +213,20 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_Heredoc_EmptyValue()
|
public void SaveStateFileCommand_Heredoc_EmptyValue()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_EmptyValue();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_STATE<<EOF",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _intraActionState.Count);
|
||||||
|
Assert.Equal(string.Empty, _intraActionState["MY_STATE"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -101,52 +234,73 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_Heredoc_SkipEmptyLines()
|
public void SaveStateFileCommand_Heredoc_SkipEmptyLines()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_SkipEmptyLines();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
string.Empty,
|
||||||
|
"MY_STATE<<EOF",
|
||||||
|
"hello",
|
||||||
|
"world",
|
||||||
|
"EOF",
|
||||||
|
string.Empty,
|
||||||
|
"MY_STATE_2<<EOF",
|
||||||
|
"HELLO",
|
||||||
|
"AGAIN",
|
||||||
|
"EOF",
|
||||||
|
string.Empty,
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(2, _intraActionState.Count);
|
||||||
|
Assert.Equal($"hello{Environment.NewLine}world", _intraActionState["MY_STATE"]);
|
||||||
|
Assert.Equal($"HELLO{Environment.NewLine}AGAIN", _intraActionState["MY_STATE_2"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
[Trait("Level", "L0")]
|
[Trait("Level", "L0")]
|
||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_Heredoc_EdgeCases()
|
public void SaveStateFileCommand_Heredoc_SpecialCharacters()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_EdgeCases();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_STATE<<=EOF",
|
||||||
|
"hello",
|
||||||
|
"one",
|
||||||
|
"=EOF",
|
||||||
|
"MY_STATE_2<<<EOF",
|
||||||
|
"hello",
|
||||||
|
"two",
|
||||||
|
"<EOF",
|
||||||
|
"MY_STATE_3<<EOF",
|
||||||
|
"hello",
|
||||||
|
string.Empty,
|
||||||
|
"three",
|
||||||
|
string.Empty,
|
||||||
|
"EOF",
|
||||||
|
"MY_STATE_4<<EOF",
|
||||||
|
"hello=four",
|
||||||
|
"EOF",
|
||||||
|
"MY_STATE_5<<EOF",
|
||||||
|
" EOF",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(5, _intraActionState.Count);
|
||||||
|
Assert.Equal($"hello{Environment.NewLine}one", _intraActionState["MY_STATE"]);
|
||||||
|
Assert.Equal($"hello{Environment.NewLine}two", _intraActionState["MY_STATE_2"]);
|
||||||
|
Assert.Equal($"hello{Environment.NewLine}{Environment.NewLine}three{Environment.NewLine}", _intraActionState["MY_STATE_3"]);
|
||||||
|
Assert.Equal($"hello=four", _intraActionState["MY_STATE_4"]);
|
||||||
|
Assert.Equal($" EOF", _intraActionState["MY_STATE_5"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
|
||||||
[Trait("Level", "L0")]
|
|
||||||
[Trait("Category", "Worker")]
|
|
||||||
// All of the following are not only valid, but quite plausible end markers.
|
|
||||||
// Most are derived straight from the example at https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings
|
|
||||||
#pragma warning disable format
|
|
||||||
[InlineData("=EOF")][InlineData("==EOF")][InlineData("EO=F")][InlineData("EO==F")][InlineData("EOF=")][InlineData("EOF==")]
|
|
||||||
[InlineData("<EOF")][InlineData("<<EOF")][InlineData("EO<F")][InlineData("EO<<F")][InlineData("EOF<")][InlineData("EOF<<")]
|
|
||||||
[InlineData("+EOF")][InlineData("++EOF")][InlineData("EO+F")][InlineData("EO++F")][InlineData("EOF+")][InlineData("EOF++")]
|
|
||||||
[InlineData("/EOF")][InlineData("//EOF")][InlineData("EO/F")][InlineData("EO//F")][InlineData("EOF/")][InlineData("EOF//")]
|
|
||||||
#pragma warning restore format
|
|
||||||
[InlineData("<<//++==")]
|
|
||||||
[InlineData("contrivedBase64==")]
|
|
||||||
[InlineData("khkIhPxsVA==")]
|
|
||||||
[InlineData("D+Y8zE/EOw==")]
|
|
||||||
[InlineData("wuOWG4S6FQ==")]
|
|
||||||
[InlineData("7wigCJ//iw==")]
|
|
||||||
[InlineData("uifTuYTs8K4=")]
|
|
||||||
[InlineData("M7N2ITg/04c=")]
|
|
||||||
[InlineData("Xhh+qp+Y6iM=")]
|
|
||||||
[InlineData("5tdblQajc/b+EGBZXo0w")]
|
|
||||||
[InlineData("jk/UMjIx/N0eVcQYOUfw")]
|
|
||||||
[InlineData("/n5lsw73Cwl35Hfuscdz")]
|
|
||||||
[InlineData("ZvnAEW+9O0tXp3Fmb3Oh")]
|
|
||||||
public void SaveStateFileCommand_Heredoc_EndMarkerVariations(string validEndMarker)
|
|
||||||
{
|
|
||||||
base.TestHeredoc_EndMarkerVariations(validEndMarker);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
[Trait("Level", "L0")]
|
|
||||||
[Trait("Category", "Worker")]
|
|
||||||
public void SaveStateFileCommand_Heredoc_EqualBeforeMultilineIndicator()
|
|
||||||
{
|
|
||||||
base.TestHeredoc_EqualBeforeMultilineIndicator();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -154,7 +308,21 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_Heredoc_MissingNewLine()
|
public void SaveStateFileCommand_Heredoc_MissingNewLine()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_MissingNewLine();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_STATE<<EOF",
|
||||||
|
"line one",
|
||||||
|
"line two",
|
||||||
|
"line three",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content, " ");
|
||||||
|
var ex = Assert.Throws<Exception>(() => _saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null));
|
||||||
|
Assert.Contains("Matching delimiter not found", ex.Message);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -162,7 +330,21 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_Heredoc_MissingNewLineMultipleLines()
|
public void SaveStateFileCommand_Heredoc_MissingNewLineMultipleLines()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_MissingNewLineMultipleLines();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_STATE<<EOF",
|
||||||
|
@"line one
|
||||||
|
line two
|
||||||
|
line three",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content, " ");
|
||||||
|
var ex = Assert.Throws<Exception>(() => _saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null));
|
||||||
|
Assert.Contains("EOF marker missing new line", ex.Message);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
@@ -171,9 +353,90 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SaveStateFileCommand_Heredoc_PreservesNewline()
|
public void SaveStateFileCommand_Heredoc_PreservesNewline()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_PreservesNewline();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var newline = "\n";
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_STATE<<EOF",
|
||||||
|
"hello",
|
||||||
|
"world",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content, newline: newline);
|
||||||
|
_saveStateFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _intraActionState.Count);
|
||||||
|
Assert.Equal($"hello{newline}world", _intraActionState["MY_STATE"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
private void WriteContent(
|
||||||
|
string path,
|
||||||
|
List<string> content,
|
||||||
|
string newline = null)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrEmpty(newline))
|
||||||
|
{
|
||||||
|
newline = Environment.NewLine;
|
||||||
|
}
|
||||||
|
|
||||||
|
var encoding = new UTF8Encoding(true); // Emit BOM
|
||||||
|
var contentStr = string.Join(newline, content);
|
||||||
|
File.WriteAllText(path, contentStr, encoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
private TestHostContext Setup([CallerMemberName] string name = "")
|
||||||
|
{
|
||||||
|
_issues = new List<Tuple<DTWebApi.Issue, string>>();
|
||||||
|
_intraActionState = new Dictionary<string, string>();
|
||||||
|
|
||||||
|
var hostContext = new TestHostContext(this, name);
|
||||||
|
|
||||||
|
// Trace
|
||||||
|
_trace = hostContext.GetTrace();
|
||||||
|
|
||||||
|
// Directory for test data
|
||||||
|
var workDirectory = hostContext.GetDirectory(WellKnownDirectory.Work);
|
||||||
|
ArgUtil.NotNullOrEmpty(workDirectory, nameof(workDirectory));
|
||||||
|
Directory.CreateDirectory(workDirectory);
|
||||||
|
_rootDirectory = Path.Combine(workDirectory, nameof(SaveStateFileCommandL0));
|
||||||
|
Directory.CreateDirectory(_rootDirectory);
|
||||||
|
|
||||||
|
// Execution context
|
||||||
|
_executionContext = new Mock<IExecutionContext>();
|
||||||
|
_executionContext.Setup(x => x.Global)
|
||||||
|
.Returns(new GlobalContext
|
||||||
|
{
|
||||||
|
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
|
||||||
|
WriteDebug = true,
|
||||||
|
});
|
||||||
|
_executionContext.Setup(x => x.AddIssue(It.IsAny<DTWebApi.Issue>(), It.IsAny<ExecutionContextLogOptions>()))
|
||||||
|
.Callback((DTWebApi.Issue issue, ExecutionContextLogOptions logOptions) =>
|
||||||
|
{
|
||||||
|
var resolvedMessage = issue.Message;
|
||||||
|
if (logOptions.WriteToLog && !string.IsNullOrEmpty(logOptions.LogMessageOverride))
|
||||||
|
{
|
||||||
|
resolvedMessage = logOptions.LogMessageOverride;
|
||||||
|
}
|
||||||
|
_issues.Add(new(issue, resolvedMessage));
|
||||||
|
_trace.Info($"Issue '{issue.Type}': {resolvedMessage}");
|
||||||
|
});
|
||||||
|
_executionContext.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>()))
|
||||||
|
.Callback((string tag, string message) =>
|
||||||
|
{
|
||||||
|
_trace.Info($"{tag}{message}");
|
||||||
|
});
|
||||||
|
_executionContext.Setup(x => x.IntraActionState)
|
||||||
|
.Returns(_intraActionState);
|
||||||
|
|
||||||
|
// SaveStateFileCommand
|
||||||
|
_saveStateFileCommand = new SaveStateFileCommand();
|
||||||
|
_saveStateFileCommand.Initialize(hostContext);
|
||||||
|
|
||||||
|
return hostContext;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,25 +1,43 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.Globalization;
|
||||||
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using System.Runtime.CompilerServices;
|
||||||
|
using GitHub.Runner.Common.Util;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
using GitHub.Runner.Worker;
|
using GitHub.Runner.Worker;
|
||||||
|
using GitHub.Runner.Worker.Container;
|
||||||
|
using GitHub.Runner.Worker.Handlers;
|
||||||
|
using Moq;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
using DTWebApi = GitHub.DistributedTask.WebApi;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common.Tests.Worker
|
namespace GitHub.Runner.Common.Tests.Worker
|
||||||
{
|
{
|
||||||
public sealed class SetEnvFileCommandL0 : FileCommandTestBase<SetEnvFileCommand>
|
public sealed class SetEnvFileCommandL0
|
||||||
{
|
{
|
||||||
|
private Mock<IExecutionContext> _executionContext;
|
||||||
protected override IDictionary<string, string> PostSetup()
|
private List<Tuple<DTWebApi.Issue, string>> _issues;
|
||||||
{
|
private string _rootDirectory;
|
||||||
return _executionContext.Object.Global.EnvironmentVariables;
|
private SetEnvFileCommand _setEnvFileCommand;
|
||||||
}
|
private ITraceWriter _trace;
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
[Trait("Level", "L0")]
|
[Trait("Level", "L0")]
|
||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_DirectoryNotFound()
|
public void SetEnvFileCommand_DirectoryNotFound()
|
||||||
{
|
{
|
||||||
base.TestDirectoryNotFound();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "directory-not-found", "env");
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(0, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -27,7 +45,13 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_NotFound()
|
public void SetEnvFileCommand_NotFound()
|
||||||
{
|
{
|
||||||
base.TestNotFound();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "file-not-found");
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(0, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -35,7 +59,15 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_EmptyFile()
|
public void SetEnvFileCommand_EmptyFile()
|
||||||
{
|
{
|
||||||
base.TestEmptyFile();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "empty-file");
|
||||||
|
var content = new List<string>();
|
||||||
|
WriteContent(envFile, content);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(0, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -43,7 +75,19 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_Simple()
|
public void SetEnvFileCommand_Simple()
|
||||||
{
|
{
|
||||||
base.TestSimple();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_ENV=MY VALUE",
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
Assert.Equal("MY VALUE", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -51,7 +95,24 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_Simple_SkipEmptyLines()
|
public void SetEnvFileCommand_Simple_SkipEmptyLines()
|
||||||
{
|
{
|
||||||
base.TestSimple_SkipEmptyLines();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
string.Empty,
|
||||||
|
"MY_ENV=my value",
|
||||||
|
string.Empty,
|
||||||
|
"MY_ENV_2=my second value",
|
||||||
|
string.Empty,
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(2, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
Assert.Equal("my value", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
|
||||||
|
Assert.Equal("my second value", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_2"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -59,7 +120,19 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_Simple_EmptyValue()
|
public void SetEnvFileCommand_Simple_EmptyValue()
|
||||||
{
|
{
|
||||||
base.TestSimple_EmptyValue();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "simple-empty-value");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_ENV=",
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
Assert.Equal(string.Empty, _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -67,7 +140,23 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_Simple_MultipleValues()
|
public void SetEnvFileCommand_Simple_MultipleValues()
|
||||||
{
|
{
|
||||||
base.TestSimple_MultipleValues();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_ENV=my value",
|
||||||
|
"MY_ENV_2=",
|
||||||
|
"MY_ENV_3=my third value",
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(3, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
Assert.Equal("my value", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
|
||||||
|
Assert.Equal(string.Empty, _executionContext.Object.Global.EnvironmentVariables["MY_ENV_2"]);
|
||||||
|
Assert.Equal("my third value", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_3"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -75,7 +164,64 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_Simple_SpecialCharacters()
|
public void SetEnvFileCommand_Simple_SpecialCharacters()
|
||||||
{
|
{
|
||||||
base.TestSimple_SpecialCharacters();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_ENV==abc",
|
||||||
|
"MY_ENV_2=def=ghi",
|
||||||
|
"MY_ENV_3=jkl=",
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(3, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
Assert.Equal("=abc", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
|
||||||
|
Assert.Equal("def=ghi", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_2"]);
|
||||||
|
Assert.Equal("jkl=", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_3"]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
[Trait("Level", "L0")]
|
||||||
|
[Trait("Category", "Worker")]
|
||||||
|
public void SetEnvFileCommand_BlockListItemsFiltered()
|
||||||
|
{
|
||||||
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"NODE_OPTIONS=asdf",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(1, _issues.Count);
|
||||||
|
Assert.Equal(0, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
[Fact]
|
||||||
|
[Trait("Level", "L0")]
|
||||||
|
[Trait("Category", "Worker")]
|
||||||
|
public void SetEnvFileCommand_BlockListItemsFiltered_Heredoc()
|
||||||
|
{
|
||||||
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"NODE_OPTIONS<<EOF",
|
||||||
|
"asdf",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(1, _issues.Count);
|
||||||
|
Assert.Equal(0, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -83,7 +229,23 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_Heredoc()
|
public void SetEnvFileCommand_Heredoc()
|
||||||
{
|
{
|
||||||
base.TestHeredoc();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_ENV<<EOF",
|
||||||
|
"line one",
|
||||||
|
"line two",
|
||||||
|
"line three",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
Assert.Equal($"line one{Environment.NewLine}line two{Environment.NewLine}line three", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -91,7 +253,20 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_Heredoc_EmptyValue()
|
public void SetEnvFileCommand_Heredoc_EmptyValue()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_EmptyValue();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_ENV<<EOF",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
Assert.Equal(string.Empty, _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -99,52 +274,73 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_Heredoc_SkipEmptyLines()
|
public void SetEnvFileCommand_Heredoc_SkipEmptyLines()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_SkipEmptyLines();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
string.Empty,
|
||||||
|
"MY_ENV<<EOF",
|
||||||
|
"hello",
|
||||||
|
"world",
|
||||||
|
"EOF",
|
||||||
|
string.Empty,
|
||||||
|
"MY_ENV_2<<EOF",
|
||||||
|
"HELLO",
|
||||||
|
"AGAIN",
|
||||||
|
"EOF",
|
||||||
|
string.Empty,
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(2, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
Assert.Equal($"hello{Environment.NewLine}world", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
|
||||||
|
Assert.Equal($"HELLO{Environment.NewLine}AGAIN", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_2"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
[Trait("Level", "L0")]
|
[Trait("Level", "L0")]
|
||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_Heredoc_EdgeCases()
|
public void SetEnvFileCommand_Heredoc_SpecialCharacters()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_EdgeCases();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_ENV<<=EOF",
|
||||||
|
"hello",
|
||||||
|
"one",
|
||||||
|
"=EOF",
|
||||||
|
"MY_ENV_2<<<EOF",
|
||||||
|
"hello",
|
||||||
|
"two",
|
||||||
|
"<EOF",
|
||||||
|
"MY_ENV_3<<EOF",
|
||||||
|
"hello",
|
||||||
|
string.Empty,
|
||||||
|
"three",
|
||||||
|
string.Empty,
|
||||||
|
"EOF",
|
||||||
|
"MY_ENV_4<<EOF",
|
||||||
|
"hello=four",
|
||||||
|
"EOF",
|
||||||
|
"MY_ENV_5<<EOF",
|
||||||
|
" EOF",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(5, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
Assert.Equal($"hello{Environment.NewLine}one", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
|
||||||
|
Assert.Equal($"hello{Environment.NewLine}two", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_2"]);
|
||||||
|
Assert.Equal($"hello{Environment.NewLine}{Environment.NewLine}three{Environment.NewLine}", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_3"]);
|
||||||
|
Assert.Equal($"hello=four", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_4"]);
|
||||||
|
Assert.Equal($" EOF", _executionContext.Object.Global.EnvironmentVariables["MY_ENV_5"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
|
||||||
[Trait("Level", "L0")]
|
|
||||||
[Trait("Category", "Worker")]
|
|
||||||
// All of the following are not only valid, but quite plausible end markers.
|
|
||||||
// Most are derived straight from the example at https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings
|
|
||||||
#pragma warning disable format
|
|
||||||
[InlineData("=EOF")][InlineData("==EOF")][InlineData("EO=F")][InlineData("EO==F")][InlineData("EOF=")][InlineData("EOF==")]
|
|
||||||
[InlineData("<EOF")][InlineData("<<EOF")][InlineData("EO<F")][InlineData("EO<<F")][InlineData("EOF<")][InlineData("EOF<<")]
|
|
||||||
[InlineData("+EOF")][InlineData("++EOF")][InlineData("EO+F")][InlineData("EO++F")][InlineData("EOF+")][InlineData("EOF++")]
|
|
||||||
[InlineData("/EOF")][InlineData("//EOF")][InlineData("EO/F")][InlineData("EO//F")][InlineData("EOF/")][InlineData("EOF//")]
|
|
||||||
#pragma warning restore format
|
|
||||||
[InlineData("<<//++==")]
|
|
||||||
[InlineData("contrivedBase64==")]
|
|
||||||
[InlineData("khkIhPxsVA==")]
|
|
||||||
[InlineData("D+Y8zE/EOw==")]
|
|
||||||
[InlineData("wuOWG4S6FQ==")]
|
|
||||||
[InlineData("7wigCJ//iw==")]
|
|
||||||
[InlineData("uifTuYTs8K4=")]
|
|
||||||
[InlineData("M7N2ITg/04c=")]
|
|
||||||
[InlineData("Xhh+qp+Y6iM=")]
|
|
||||||
[InlineData("5tdblQajc/b+EGBZXo0w")]
|
|
||||||
[InlineData("jk/UMjIx/N0eVcQYOUfw")]
|
|
||||||
[InlineData("/n5lsw73Cwl35Hfuscdz")]
|
|
||||||
[InlineData("ZvnAEW+9O0tXp3Fmb3Oh")]
|
|
||||||
public void SetEnvFileCommand_Heredoc_EndMarkerVariations(string validEndMarker)
|
|
||||||
{
|
|
||||||
base.TestHeredoc_EndMarkerVariations(validEndMarker);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
[Trait("Level", "L0")]
|
|
||||||
[Trait("Category", "Worker")]
|
|
||||||
public void SetEnvFileCommand_Heredoc_EqualBeforeMultilineIndicator()
|
|
||||||
{
|
|
||||||
base.TestHeredoc_EqualBeforeMultilineIndicator();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -152,15 +348,43 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_Heredoc_MissingNewLine()
|
public void SetEnvFileCommand_Heredoc_MissingNewLine()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_MissingNewLine();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_ENV<<EOF",
|
||||||
|
"line one",
|
||||||
|
"line two",
|
||||||
|
"line three",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content, " ");
|
||||||
|
var ex = Assert.Throws<Exception>(() => _setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null));
|
||||||
|
Assert.Contains("Matching delimiter not found", ex.Message);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
[Trait("Level", "L0")]
|
[Trait("Level", "L0")]
|
||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_Heredoc_MissingNewLineMultipleLines()
|
public void SetEnvFileCommand_Heredoc_MissingNewLineMultipleLinesEnv()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_MissingNewLineMultipleLines();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_ENV<<EOF",
|
||||||
|
@"line one
|
||||||
|
line two
|
||||||
|
line three",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content, " ");
|
||||||
|
var ex = Assert.Throws<Exception>(() => _setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null));
|
||||||
|
Assert.Contains("EOF marker missing new line", ex.Message);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
@@ -169,9 +393,87 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetEnvFileCommand_Heredoc_PreservesNewline()
|
public void SetEnvFileCommand_Heredoc_PreservesNewline()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_PreservesNewline();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var newline = "\n";
|
||||||
|
var envFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_ENV<<EOF",
|
||||||
|
"hello",
|
||||||
|
"world",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(envFile, content, newline: newline);
|
||||||
|
_setEnvFileCommand.ProcessCommand(_executionContext.Object, envFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _executionContext.Object.Global.EnvironmentVariables.Count);
|
||||||
|
Assert.Equal($"hello{newline}world", _executionContext.Object.Global.EnvironmentVariables["MY_ENV"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
private void WriteContent(
|
||||||
|
string path,
|
||||||
|
List<string> content,
|
||||||
|
string newline = null)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrEmpty(newline))
|
||||||
|
{
|
||||||
|
newline = Environment.NewLine;
|
||||||
|
}
|
||||||
|
|
||||||
|
var encoding = new UTF8Encoding(true); // Emit BOM
|
||||||
|
var contentStr = string.Join(newline, content);
|
||||||
|
File.WriteAllText(path, contentStr, encoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
private TestHostContext Setup([CallerMemberName] string name = "")
|
||||||
|
{
|
||||||
|
_issues = new List<Tuple<DTWebApi.Issue, string>>();
|
||||||
|
|
||||||
|
var hostContext = new TestHostContext(this, name);
|
||||||
|
|
||||||
|
// Trace
|
||||||
|
_trace = hostContext.GetTrace();
|
||||||
|
|
||||||
|
// Directory for test data
|
||||||
|
var workDirectory = hostContext.GetDirectory(WellKnownDirectory.Work);
|
||||||
|
ArgUtil.NotNullOrEmpty(workDirectory, nameof(workDirectory));
|
||||||
|
Directory.CreateDirectory(workDirectory);
|
||||||
|
_rootDirectory = Path.Combine(workDirectory, nameof(SetEnvFileCommandL0));
|
||||||
|
Directory.CreateDirectory(_rootDirectory);
|
||||||
|
|
||||||
|
// Execution context
|
||||||
|
_executionContext = new Mock<IExecutionContext>();
|
||||||
|
_executionContext.Setup(x => x.Global)
|
||||||
|
.Returns(new GlobalContext
|
||||||
|
{
|
||||||
|
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
|
||||||
|
WriteDebug = true,
|
||||||
|
});
|
||||||
|
_executionContext.Setup(x => x.AddIssue(It.IsAny<DTWebApi.Issue>(), It.IsAny<ExecutionContextLogOptions>()))
|
||||||
|
.Callback((DTWebApi.Issue issue, ExecutionContextLogOptions logOptions) =>
|
||||||
|
{
|
||||||
|
var resolvedMessage = issue.Message;
|
||||||
|
if (logOptions.WriteToLog && !string.IsNullOrEmpty(logOptions.LogMessageOverride))
|
||||||
|
{
|
||||||
|
resolvedMessage = logOptions.LogMessageOverride;
|
||||||
|
}
|
||||||
|
_issues.Add(new(issue, resolvedMessage));
|
||||||
|
_trace.Info($"Issue '{issue.Type}': {resolvedMessage}");
|
||||||
|
});
|
||||||
|
_executionContext.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>()))
|
||||||
|
.Callback((string tag, string message) =>
|
||||||
|
{
|
||||||
|
_trace.Info($"{tag}{message}");
|
||||||
|
});
|
||||||
|
|
||||||
|
// SetEnvFileCommand
|
||||||
|
_setEnvFileCommand = new SetEnvFileCommand();
|
||||||
|
_setEnvFileCommand.Initialize(hostContext);
|
||||||
|
|
||||||
|
return hostContext;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,36 +1,44 @@
|
|||||||
using System;
|
using System;
|
||||||
using System.Collections.Generic;
|
using System.Collections.Generic;
|
||||||
|
using System.Globalization;
|
||||||
|
using System.IO;
|
||||||
using System.Linq;
|
using System.Linq;
|
||||||
|
using System.Text;
|
||||||
|
using System.Threading;
|
||||||
|
using System.Threading.Tasks;
|
||||||
|
using System.Runtime.CompilerServices;
|
||||||
|
using GitHub.Runner.Common.Util;
|
||||||
|
using GitHub.Runner.Sdk;
|
||||||
using GitHub.Runner.Worker;
|
using GitHub.Runner.Worker;
|
||||||
|
using GitHub.Runner.Worker.Container;
|
||||||
|
using GitHub.Runner.Worker.Handlers;
|
||||||
using Moq;
|
using Moq;
|
||||||
using Xunit;
|
using Xunit;
|
||||||
|
using DTWebApi = GitHub.DistributedTask.WebApi;
|
||||||
|
|
||||||
namespace GitHub.Runner.Common.Tests.Worker
|
namespace GitHub.Runner.Common.Tests.Worker
|
||||||
{
|
{
|
||||||
public sealed class SetOutputFileCommandL0 : FileCommandTestBase<SetOutputFileCommand>
|
public sealed class SetOutputFileCommandL0
|
||||||
{
|
{
|
||||||
|
private Mock<IExecutionContext> _executionContext;
|
||||||
protected override IDictionary<string, string> PostSetup()
|
private List<Tuple<DTWebApi.Issue, string>> _issues;
|
||||||
{
|
private Dictionary<string, string> _outputs;
|
||||||
var outputs = new Dictionary<string, string>();
|
private string _rootDirectory;
|
||||||
var reference = string.Empty;
|
private SetOutputFileCommand _setOutputFileCommand;
|
||||||
_executionContext.Setup(x => x.SetOutput(It.IsAny<string>(), It.IsAny<string>(), out reference))
|
private ITraceWriter _trace;
|
||||||
.Callback((string name, string value, out string reference) =>
|
|
||||||
{
|
|
||||||
reference = value;
|
|
||||||
outputs[name] = value;
|
|
||||||
});
|
|
||||||
|
|
||||||
return outputs;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
[Trait("Level", "L0")]
|
[Trait("Level", "L0")]
|
||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_DirectoryNotFound()
|
public void SetOutputFileCommand_DirectoryNotFound()
|
||||||
{
|
{
|
||||||
base.TestDirectoryNotFound();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "directory-not-found", "env");
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(0, _outputs.Count);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -38,7 +46,13 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_NotFound()
|
public void SetOutputFileCommand_NotFound()
|
||||||
{
|
{
|
||||||
base.TestNotFound();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "file-not-found");
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(0, _outputs.Count);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -46,7 +60,15 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_EmptyFile()
|
public void SetOutputFileCommand_EmptyFile()
|
||||||
{
|
{
|
||||||
base.TestEmptyFile();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "empty-file");
|
||||||
|
var content = new List<string>();
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(0, _outputs.Count);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -54,7 +76,19 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_Simple()
|
public void SetOutputFileCommand_Simple()
|
||||||
{
|
{
|
||||||
base.TestSimple();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_OUTPUT=MY VALUE",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _outputs.Count);
|
||||||
|
Assert.Equal("MY VALUE", _outputs["MY_OUTPUT"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -62,7 +96,24 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_Simple_SkipEmptyLines()
|
public void SetOutputFileCommand_Simple_SkipEmptyLines()
|
||||||
{
|
{
|
||||||
base.TestSimple_SkipEmptyLines();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
string.Empty,
|
||||||
|
"MY_OUTPUT=my value",
|
||||||
|
string.Empty,
|
||||||
|
"MY_OUTPUT_2=my second value",
|
||||||
|
string.Empty,
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(2, _outputs.Count);
|
||||||
|
Assert.Equal("my value", _outputs["MY_OUTPUT"]);
|
||||||
|
Assert.Equal("my second value", _outputs["MY_OUTPUT_2"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -70,7 +121,19 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_Simple_EmptyValue()
|
public void SetOutputFileCommand_Simple_EmptyValue()
|
||||||
{
|
{
|
||||||
base.TestSimple_EmptyValue();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "simple-empty-value");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_OUTPUT=",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _outputs.Count);
|
||||||
|
Assert.Equal(string.Empty, _outputs["MY_OUTPUT"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -78,7 +141,23 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_Simple_MultipleValues()
|
public void SetOutputFileCommand_Simple_MultipleValues()
|
||||||
{
|
{
|
||||||
base.TestSimple_MultipleValues();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_OUTPUT=my value",
|
||||||
|
"MY_OUTPUT_2=",
|
||||||
|
"MY_OUTPUT_3=my third value",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(3, _outputs.Count);
|
||||||
|
Assert.Equal("my value", _outputs["MY_OUTPUT"]);
|
||||||
|
Assert.Equal(string.Empty, _outputs["MY_OUTPUT_2"]);
|
||||||
|
Assert.Equal("my third value", _outputs["MY_OUTPUT_3"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -86,7 +165,23 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_Simple_SpecialCharacters()
|
public void SetOutputFileCommand_Simple_SpecialCharacters()
|
||||||
{
|
{
|
||||||
base.TestSimple_SpecialCharacters();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "simple");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_OUTPUT==abc",
|
||||||
|
"MY_OUTPUT_2=def=ghi",
|
||||||
|
"MY_OUTPUT_3=jkl=",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(3, _outputs.Count);
|
||||||
|
Assert.Equal("=abc", _outputs["MY_OUTPUT"]);
|
||||||
|
Assert.Equal("def=ghi", _outputs["MY_OUTPUT_2"]);
|
||||||
|
Assert.Equal("jkl=", _outputs["MY_OUTPUT_3"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -94,7 +189,23 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_Heredoc()
|
public void SetOutputFileCommand_Heredoc()
|
||||||
{
|
{
|
||||||
base.TestHeredoc();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_OUTPUT<<EOF",
|
||||||
|
"line one",
|
||||||
|
"line two",
|
||||||
|
"line three",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _outputs.Count);
|
||||||
|
Assert.Equal($"line one{Environment.NewLine}line two{Environment.NewLine}line three", _outputs["MY_OUTPUT"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -102,7 +213,20 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_Heredoc_EmptyValue()
|
public void SetOutputFileCommand_Heredoc_EmptyValue()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_EmptyValue();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_OUTPUT<<EOF",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _outputs.Count);
|
||||||
|
Assert.Equal(string.Empty, _outputs["MY_OUTPUT"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -110,52 +234,73 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_Heredoc_SkipEmptyLines()
|
public void SetOutputFileCommand_Heredoc_SkipEmptyLines()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_SkipEmptyLines();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
string.Empty,
|
||||||
|
"MY_OUTPUT<<EOF",
|
||||||
|
"hello",
|
||||||
|
"world",
|
||||||
|
"EOF",
|
||||||
|
string.Empty,
|
||||||
|
"MY_OUTPUT_2<<EOF",
|
||||||
|
"HELLO",
|
||||||
|
"AGAIN",
|
||||||
|
"EOF",
|
||||||
|
string.Empty,
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(2, _outputs.Count);
|
||||||
|
Assert.Equal($"hello{Environment.NewLine}world", _outputs["MY_OUTPUT"]);
|
||||||
|
Assert.Equal($"HELLO{Environment.NewLine}AGAIN", _outputs["MY_OUTPUT_2"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
[Trait("Level", "L0")]
|
[Trait("Level", "L0")]
|
||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_Heredoc_EdgeCases()
|
public void SetOutputFileCommand_Heredoc_SpecialCharacters()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_EdgeCases();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_OUTPUT<<=EOF",
|
||||||
|
"hello",
|
||||||
|
"one",
|
||||||
|
"=EOF",
|
||||||
|
"MY_OUTPUT_2<<<EOF",
|
||||||
|
"hello",
|
||||||
|
"two",
|
||||||
|
"<EOF",
|
||||||
|
"MY_OUTPUT_3<<EOF",
|
||||||
|
"hello",
|
||||||
|
string.Empty,
|
||||||
|
"three",
|
||||||
|
string.Empty,
|
||||||
|
"EOF",
|
||||||
|
"MY_OUTPUT_4<<EOF",
|
||||||
|
"hello=four",
|
||||||
|
"EOF",
|
||||||
|
"MY_OUTPUT_5<<EOF",
|
||||||
|
" EOF",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content);
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(5, _outputs.Count);
|
||||||
|
Assert.Equal($"hello{Environment.NewLine}one", _outputs["MY_OUTPUT"]);
|
||||||
|
Assert.Equal($"hello{Environment.NewLine}two", _outputs["MY_OUTPUT_2"]);
|
||||||
|
Assert.Equal($"hello{Environment.NewLine}{Environment.NewLine}three{Environment.NewLine}", _outputs["MY_OUTPUT_3"]);
|
||||||
|
Assert.Equal($"hello=four", _outputs["MY_OUTPUT_4"]);
|
||||||
|
Assert.Equal($" EOF", _outputs["MY_OUTPUT_5"]);
|
||||||
}
|
}
|
||||||
|
|
||||||
[Theory]
|
|
||||||
[Trait("Level", "L0")]
|
|
||||||
[Trait("Category", "Worker")]
|
|
||||||
// All of the following are not only valid, but quite plausible end markers.
|
|
||||||
// Most are derived straight from the example at https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings
|
|
||||||
#pragma warning disable format
|
|
||||||
[InlineData("=EOF")][InlineData("==EOF")][InlineData("EO=F")][InlineData("EO==F")][InlineData("EOF=")][InlineData("EOF==")]
|
|
||||||
[InlineData("<EOF")][InlineData("<<EOF")][InlineData("EO<F")][InlineData("EO<<F")][InlineData("EOF<")][InlineData("EOF<<")]
|
|
||||||
[InlineData("+EOF")][InlineData("++EOF")][InlineData("EO+F")][InlineData("EO++F")][InlineData("EOF+")][InlineData("EOF++")]
|
|
||||||
[InlineData("/EOF")][InlineData("//EOF")][InlineData("EO/F")][InlineData("EO//F")][InlineData("EOF/")][InlineData("EOF//")]
|
|
||||||
#pragma warning restore format
|
|
||||||
[InlineData("<<//++==")]
|
|
||||||
[InlineData("contrivedBase64==")]
|
|
||||||
[InlineData("khkIhPxsVA==")]
|
|
||||||
[InlineData("D+Y8zE/EOw==")]
|
|
||||||
[InlineData("wuOWG4S6FQ==")]
|
|
||||||
[InlineData("7wigCJ//iw==")]
|
|
||||||
[InlineData("uifTuYTs8K4=")]
|
|
||||||
[InlineData("M7N2ITg/04c=")]
|
|
||||||
[InlineData("Xhh+qp+Y6iM=")]
|
|
||||||
[InlineData("5tdblQajc/b+EGBZXo0w")]
|
|
||||||
[InlineData("jk/UMjIx/N0eVcQYOUfw")]
|
|
||||||
[InlineData("/n5lsw73Cwl35Hfuscdz")]
|
|
||||||
[InlineData("ZvnAEW+9O0tXp3Fmb3Oh")]
|
|
||||||
public void SetOutputFileCommand_Heredoc_EndMarkerVariations(string validEndMarker)
|
|
||||||
{
|
|
||||||
base.TestHeredoc_EndMarkerVariations(validEndMarker);
|
|
||||||
}
|
|
||||||
|
|
||||||
[Fact]
|
|
||||||
[Trait("Level", "L0")]
|
|
||||||
[Trait("Category", "Worker")]
|
|
||||||
public void SetOutputFileCommand_Heredoc_EqualBeforeMultilineIndicator()
|
|
||||||
{
|
|
||||||
base.TestHeredoc_EqualBeforeMultilineIndicator();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -163,7 +308,21 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_Heredoc_MissingNewLine()
|
public void SetOutputFileCommand_Heredoc_MissingNewLine()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_MissingNewLine();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_OUTPUT<<EOF",
|
||||||
|
"line one",
|
||||||
|
"line two",
|
||||||
|
"line three",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content, " ");
|
||||||
|
var ex = Assert.Throws<Exception>(() => _setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null));
|
||||||
|
Assert.Contains("Matching delimiter not found", ex.Message);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[Fact]
|
[Fact]
|
||||||
@@ -171,7 +330,21 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_Heredoc_MissingNewLineMultipleLines()
|
public void SetOutputFileCommand_Heredoc_MissingNewLineMultipleLines()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_MissingNewLineMultipleLines();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_OUTPUT<<EOF",
|
||||||
|
@"line one
|
||||||
|
line two
|
||||||
|
line three",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content, " ");
|
||||||
|
var ex = Assert.Throws<Exception>(() => _setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null));
|
||||||
|
Assert.Contains("EOF marker missing new line", ex.Message);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#if OS_WINDOWS
|
#if OS_WINDOWS
|
||||||
@@ -180,9 +353,96 @@ namespace GitHub.Runner.Common.Tests.Worker
|
|||||||
[Trait("Category", "Worker")]
|
[Trait("Category", "Worker")]
|
||||||
public void SetOutputFileCommand_Heredoc_PreservesNewline()
|
public void SetOutputFileCommand_Heredoc_PreservesNewline()
|
||||||
{
|
{
|
||||||
base.TestHeredoc_PreservesNewline();
|
using (var hostContext = Setup())
|
||||||
|
{
|
||||||
|
var newline = "\n";
|
||||||
|
var stateFile = Path.Combine(_rootDirectory, "heredoc");
|
||||||
|
var content = new List<string>
|
||||||
|
{
|
||||||
|
"MY_OUTPUT<<EOF",
|
||||||
|
"hello",
|
||||||
|
"world",
|
||||||
|
"EOF",
|
||||||
|
};
|
||||||
|
WriteContent(stateFile, content, newline: newline);
|
||||||
|
_setOutputFileCommand.ProcessCommand(_executionContext.Object, stateFile, null);
|
||||||
|
Assert.Equal(0, _issues.Count);
|
||||||
|
Assert.Equal(1, _outputs.Count);
|
||||||
|
Assert.Equal($"hello{newline}world", _outputs["MY_OUTPUT"]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
private void WriteContent(
|
||||||
|
string path,
|
||||||
|
List<string> content,
|
||||||
|
string newline = null)
|
||||||
|
{
|
||||||
|
if (string.IsNullOrEmpty(newline))
|
||||||
|
{
|
||||||
|
newline = Environment.NewLine;
|
||||||
|
}
|
||||||
|
|
||||||
|
var encoding = new UTF8Encoding(true); // Emit BOM
|
||||||
|
var contentStr = string.Join(newline, content);
|
||||||
|
File.WriteAllText(path, contentStr, encoding);
|
||||||
|
}
|
||||||
|
|
||||||
|
private TestHostContext Setup([CallerMemberName] string name = "")
|
||||||
|
{
|
||||||
|
_issues = new List<Tuple<DTWebApi.Issue, string>>();
|
||||||
|
_outputs = new Dictionary<string, string>();
|
||||||
|
|
||||||
|
var hostContext = new TestHostContext(this, name);
|
||||||
|
|
||||||
|
// Trace
|
||||||
|
_trace = hostContext.GetTrace();
|
||||||
|
|
||||||
|
// Directory for test data
|
||||||
|
var workDirectory = hostContext.GetDirectory(WellKnownDirectory.Work);
|
||||||
|
ArgUtil.NotNullOrEmpty(workDirectory, nameof(workDirectory));
|
||||||
|
Directory.CreateDirectory(workDirectory);
|
||||||
|
_rootDirectory = Path.Combine(workDirectory, nameof(SetOutputFileCommandL0));
|
||||||
|
Directory.CreateDirectory(_rootDirectory);
|
||||||
|
|
||||||
|
// Execution context
|
||||||
|
_executionContext = new Mock<IExecutionContext>();
|
||||||
|
_executionContext.Setup(x => x.Global)
|
||||||
|
.Returns(new GlobalContext
|
||||||
|
{
|
||||||
|
EnvironmentVariables = new Dictionary<string, string>(VarUtil.EnvironmentVariableKeyComparer),
|
||||||
|
WriteDebug = true,
|
||||||
|
});
|
||||||
|
_executionContext.Setup(x => x.AddIssue(It.IsAny<DTWebApi.Issue>(), It.IsAny<ExecutionContextLogOptions>()))
|
||||||
|
.Callback((DTWebApi.Issue issue, ExecutionContextLogOptions logOptions) =>
|
||||||
|
{
|
||||||
|
var resolvedMessage = issue.Message;
|
||||||
|
if (logOptions.WriteToLog && !string.IsNullOrEmpty(logOptions.LogMessageOverride))
|
||||||
|
{
|
||||||
|
resolvedMessage = logOptions.LogMessageOverride;
|
||||||
|
}
|
||||||
|
_issues.Add(new(issue, resolvedMessage));
|
||||||
|
_trace.Info($"Issue '{issue.Type}': {resolvedMessage}");
|
||||||
|
});
|
||||||
|
_executionContext.Setup(x => x.Write(It.IsAny<string>(), It.IsAny<string>()))
|
||||||
|
.Callback((string tag, string message) =>
|
||||||
|
{
|
||||||
|
_trace.Info($"{tag}{message}");
|
||||||
|
});
|
||||||
|
|
||||||
|
var reference = string.Empty;
|
||||||
|
_executionContext.Setup(x => x.SetOutput(It.IsAny<string>(), It.IsAny<string>(), out reference))
|
||||||
|
.Callback((string name, string value, out string reference) =>
|
||||||
|
{
|
||||||
|
reference = value;
|
||||||
|
_outputs[name] = value;
|
||||||
|
});
|
||||||
|
|
||||||
|
// SetOutputFileCommand
|
||||||
|
_setOutputFileCommand = new SetOutputFileCommand();
|
||||||
|
_setOutputFileCommand.Initialize(hostContext);
|
||||||
|
|
||||||
|
return hostContext;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
<Project Sdk="Microsoft.NET.Sdk">
|
<Project Sdk="Microsoft.NET.Sdk">
|
||||||
<PropertyGroup>
|
<PropertyGroup>
|
||||||
<TargetFramework>net6.0</TargetFramework>
|
<TargetFramework>net6.0</TargetFramework>
|
||||||
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64</RuntimeIdentifiers>
|
<RuntimeIdentifiers>win-x64;win-x86;linux-x64;linux-arm64;linux-arm;osx-x64;osx-arm64;win-arm64</RuntimeIdentifiers>
|
||||||
@@ -18,7 +18,7 @@
|
|||||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.2.0" />
|
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.2.0" />
|
||||||
<PackageReference Include="xunit" Version="2.4.1" />
|
<PackageReference Include="xunit" Version="2.4.1" />
|
||||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.1" />
|
<PackageReference Include="xunit.runner.visualstudio" Version="2.4.1" />
|
||||||
<PackageReference Include="System.Buffers" Version="4.3.0" />
|
<PackageReference Include="System.Buffers" Version="4.5.1" />
|
||||||
<PackageReference Include="System.Reflection.TypeExtensions" Version="4.4.0" />
|
<PackageReference Include="System.Reflection.TypeExtensions" Version="4.4.0" />
|
||||||
<PackageReference Include="System.Threading.ThreadPool" Version="4.3.0" />
|
<PackageReference Include="System.Threading.ThreadPool" Version="4.3.0" />
|
||||||
<PackageReference Include="Moq" Version="4.11.0" />
|
<PackageReference Include="Moq" Version="4.11.0" />
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ DOWNLOAD_DIR="$SCRIPT_DIR/../_downloads/netcore2x"
|
|||||||
PACKAGE_DIR="$SCRIPT_DIR/../_package"
|
PACKAGE_DIR="$SCRIPT_DIR/../_package"
|
||||||
PACKAGE_TRIMS_DIR="$SCRIPT_DIR/../_package_trims"
|
PACKAGE_TRIMS_DIR="$SCRIPT_DIR/../_package_trims"
|
||||||
DOTNETSDK_ROOT="$SCRIPT_DIR/../_dotnetsdk"
|
DOTNETSDK_ROOT="$SCRIPT_DIR/../_dotnetsdk"
|
||||||
DOTNETSDK_VERSION="6.0.412"
|
DOTNETSDK_VERSION="6.0.414"
|
||||||
DOTNETSDK_INSTALLDIR="$DOTNETSDK_ROOT/$DOTNETSDK_VERSION"
|
DOTNETSDK_INSTALLDIR="$DOTNETSDK_ROOT/$DOTNETSDK_VERSION"
|
||||||
RUNNER_VERSION=$(cat runnerversion)
|
RUNNER_VERSION=$(cat runnerversion)
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"sdk": {
|
"sdk": {
|
||||||
"version": "6.0.412"
|
"version": "6.0.414"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1 +1 @@
|
|||||||
2.308.0
|
2.309.0
|
||||||
|
|||||||
Reference in New Issue
Block a user