diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000..f676f1be --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,31 @@ +# Each line represents a rule, followed by a list of members. +# These members are the default owners for all files that match the specified pattern. +# The pattern generally follows the same syntax used for .gitignore files. +# The last matching rule always wins; those that appear lower in the file take precedence over rules that appear higher up. +# Specify owners by their username, email, or role assignment in the project. + +# Examples: + +# Member with username "john.smith" and member with email "alex@mycompany.com" +# own any JavaScript file in repository +# *.js john.smith alex@mycompany.com + +# Bob owns all files under "subdir" directory at the repository root and all its subdirectories +# /subdir/ Bob + +# All members who are assigned the Project Collaborator role own any file under docs/ directory +# anywhere in the repository, but not further nested files +# docs/* "Project Collaborator" + +# This file itself is owned by members who are assigned the Project Admin role in this project. + +.circleci/ @tiulpin +.github/ @tiulpin +common/ @tiulpin +orb/ @tiulpin +vsts/ @tiulpin +scan/ @tiulpin +*.yml @tiulpin +*.yaml @tiulpin +gradle/ @JetBrains/qodana-developers +plugin/ @JetBrains/qodana-developers \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 7577691b..4a3d1e35 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -14,15 +14,12 @@ updates: schedule: interval: weekly groups: - npm-development: - dependency-type: development + dependencies: + patterns: + - "*" update-types: - minor - patch - npm-production: - dependency-type: production - update-types: - - patch - package-ecosystem: gradle directory: gradle diff --git a/.github/linters/.eslintrc.yml b/.github/linters/.eslintrc.yml new file mode 100644 index 00000000..06bd49d9 --- /dev/null +++ b/.github/linters/.eslintrc.yml @@ -0,0 +1,68 @@ +env: + node: true + es6: true + jest: true + +globals: + Atomics: readonly + SharedArrayBuffer: readonly + +ignorePatterns: + - '!.*' + - 'common/update-cli.js' + - '**/node_modules/.*' + - '**/dist/.*' + - '**/lib/.*' + - '*.js' + - '*.d.ts' + - '*.test.ts' + - '**/coverage/.*' + - '*.json' + +parser: '@typescript-eslint/parser' + +parserOptions: + ecmaVersion: 2023 + sourceType: module + project: + - './tsconfig.json' + +plugins: + - jest + - '@typescript-eslint' + +extends: + - eslint:recommended + - plugin:@typescript-eslint/recommended-type-checked + - plugin:jest/recommended + +rules: + { + 'camelcase': 'off', + 'eslint-comments/no-use': 'off', + 'eslint-comments/no-unused-disable': 'off', + 'i18n-text/no-en': 'off', + 'import/no-namespace': 'off', + 'no-console': 'off', + 'semi': 'off', + '@typescript-eslint/array-type': 'error', + '@typescript-eslint/consistent-type-assertions': 'error', + '@typescript-eslint/explicit-member-accessibility': + ['error', { 'accessibility': 'no-public' }], + '@typescript-eslint/explicit-function-return-type': + ['error', { 'allowExpressions': true }], + '@typescript-eslint/no-empty-interface': 'error', + '@typescript-eslint/no-extraneous-class': 'error', + '@typescript-eslint/no-inferrable-types': 'error', + '@typescript-eslint/no-non-null-assertion': 'warn', + '@typescript-eslint/no-unnecessary-qualifier': 'error', + '@typescript-eslint/no-useless-constructor': 'error', + '@typescript-eslint/no-var-requires': 'error', + '@typescript-eslint/prefer-for-of': 'warn', + '@typescript-eslint/prefer-function-type': 'warn', + '@typescript-eslint/prefer-includes': 'error', + '@typescript-eslint/prefer-string-starts-ends-with': 'error', + '@typescript-eslint/promise-function-async': 'error', + '@typescript-eslint/require-array-sort-compare': 'error', + '@typescript-eslint/space-before-function-paren': 'off' + } diff --git a/.github/workflows/node.yml b/.github/workflows/node.yml index d98b3bed..9ca35e49 100644 --- a/.github/workflows/node.yml +++ b/.github/workflows/node.yml @@ -34,10 +34,12 @@ jobs: - uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.ref }} - - name: Set Node.js 16.x - uses: actions/setup-node@v4.1.0 + - name: Setup Node.js + id: setup-node + uses: actions/setup-node@v4 with: - node-version: 16.x + node-version-file: .node-version + cache: npm - name: Install dependencies run: npm ci && npm run build - name: Rebuild scan/dist/ @@ -89,6 +91,7 @@ jobs: runs-on: ${{ matrix.os }} needs: [lint] strategy: + fail-fast: false matrix: os: [ubuntu-latest] directory: [ @@ -384,11 +387,12 @@ jobs: filters: | azure-dev: - "vsts/vss-extension.dev.json" - - name: Set Node.js 16.x + - name: Set Node.js if: steps.filter.outputs.azure-dev == 'true' - uses: actions/setup-node@v4.1.0 + uses: actions/setup-node@v4 with: - node-version: 16.x + node-version-file: .node-version + cache: npm - name: Install dependencies if: steps.filter.outputs.azure-dev == 'true' run: cd vsts && npm install && cd QodanaScan && npm install && npm i -g tfx-cli diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 03e407da..64562d2c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -45,10 +45,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Set Node.js 16.x - uses: actions/setup-node@v4.1.0 + - name: Set Node.js + uses: actions/setup-node@v4 with: - node-version: 16.x + node-version-file: .node-version + cache: npm - name: Install dependencies run: cd vsts && npm install && cd QodanaScan && npm install && npm i -g tfx-cli - name: Package and publish diff --git a/.node-version b/.node-version new file mode 100644 index 00000000..9944ce63 --- /dev/null +++ b/.node-version @@ -0,0 +1 @@ +21.6.2 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a74a050f..6927a7af 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,7 +10,7 @@ By participating in this project, you agree to abide by our [Code of conduct](.g Prerequisites: -- [Node.js 12.x](https://nodejs.org/) +- [Node.js](https://nodejs.org/) (the version is specified in [.node-version](.node-version)) - Java 11 Other things you might need to develop: diff --git a/common/.eslintrc.json b/common/.eslintrc.json deleted file mode 100644 index 98d0bd73..00000000 --- a/common/.eslintrc.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "plugins": ["jest", "@typescript-eslint"], - "extends": ["plugin:github/recommended"], - "parser": "@typescript-eslint/parser", - "parserOptions": { - "ecmaVersion": 9, - "sourceType": "module", - "project": "tsconfig.json" - }, - "rules": { - "i18n-text/no-en": "off", - "eslint-comments/no-use": "off", - "import/no-namespace": "off", - "no-unused-vars": "off", - "@typescript-eslint/no-unused-vars": "error", - "@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}], - "@typescript-eslint/no-require-imports": "error", - "@typescript-eslint/array-type": "error", - "@typescript-eslint/await-thenable": "error", - "@typescript-eslint/ban-ts-comment": "error", - "camelcase": "off", - "@typescript-eslint/consistent-type-assertions": "error", - "@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}], - "@typescript-eslint/func-call-spacing": ["error", "never"], - "@typescript-eslint/no-array-constructor": "error", - "@typescript-eslint/no-empty-interface": "error", - "@typescript-eslint/no-explicit-any": "error", - "@typescript-eslint/no-extraneous-class": "error", - "@typescript-eslint/no-for-in-array": "error", - "@typescript-eslint/no-inferrable-types": "error", - "@typescript-eslint/no-misused-new": "error", - "@typescript-eslint/no-namespace": "error", - "@typescript-eslint/no-non-null-assertion": "warn", - "@typescript-eslint/no-unnecessary-qualifier": "error", - "@typescript-eslint/no-unnecessary-type-assertion": "error", - "@typescript-eslint/no-useless-constructor": "error", - "@typescript-eslint/no-var-requires": "error", - "@typescript-eslint/prefer-for-of": "warn", - "@typescript-eslint/prefer-function-type": "warn", - "@typescript-eslint/prefer-includes": "error", - "@typescript-eslint/prefer-string-starts-ends-with": "error", - "@typescript-eslint/promise-function-async": "error", - "@typescript-eslint/require-array-sort-compare": "error", - "@typescript-eslint/restrict-plus-operands": "error", - "semi": "off", - "@typescript-eslint/semi": ["error", "never"], - "@typescript-eslint/type-annotation-spacing": "error", - "@typescript-eslint/unbound-method": "error" - }, - "env": { - "node": true, - "es6": true, - "jest/globals": true - } - } \ No newline at end of file diff --git a/common/package.json b/common/package.json index 4bbefc34..e3b87b1b 100644 --- a/common/package.json +++ b/common/package.json @@ -7,22 +7,21 @@ "url": "git+https://github.com/JetBrains/qodana-cli.git" }, "scripts": { - "lint": "eslint --fix **/*.ts" + "lint": "eslint --fix **/*.ts -c ../.github/linters/.eslintrc.yml" }, "files": [ "qodana.ts", "cli.json" ], "devDependencies": { - "@types/node": "^22.5.2", - "@typescript-eslint/parser": "^7.18.0", + "@types/node": "^22.10.1", + "@typescript-eslint/parser": "^8.18.0", "eslint": "^8.57.1", - "eslint-plugin-github": "^5.0.1", - "eslint-plugin-jest": "^28.8.2", + "eslint-plugin-jest": "^28.9.0", "jest": "^29.7.0", "js-yaml": "^4.1.0", - "prettier": "3.3.3", + "prettier": "3.4.2", "ts-jest": "^29.2.5", - "typescript": "^5.5.4" + "typescript": "^5.7.2" } } diff --git a/common/qodana.ts b/common/qodana.ts index 4751280c..a7c0734a 100644 --- a/common/qodana.ts +++ b/common/qodana.ts @@ -95,7 +95,6 @@ export function getQodanaUrl( return `https://github.com/JetBrains/qodana-cli/releases/download/${cli_version}/qodana_${platform}_${arch}.${archive}` } -// eslint-disable-next-line no-shadow -- shadowing is intentional here (ESLint bug) export enum QodanaExitCode { Success = 0, FailThreshold = 255 @@ -225,6 +224,8 @@ export interface Coverage { freshCoverage: number freshLines: number freshCoveredLines: number + totalCoverageThreshold: number + freshCoverageThreshold: number } /** @@ -250,7 +251,16 @@ export function getCoverageFromSarif(sarifPath: string): Coverage { freshLines: sarifContents.runs[0].properties['coverage']['freshLines'] || 0, freshCoveredLines: - sarifContents.runs[0].properties['coverage']['freshCoveredLines'] || 0 + sarifContents.runs[0].properties['coverage']['freshCoveredLines'] || + 0, + totalCoverageThreshold: + sarifContents.runs[0].properties['qodanaFailureConditions']?.[ + 'testCoverageThresholds' + ]?.['totalCoverage'] || COVERAGE_THRESHOLD, + freshCoverageThreshold: + sarifContents.runs[0].properties['qodanaFailureConditions']?.[ + 'testCoverageThresholds' + ]?.['freshCoverage'] || COVERAGE_THRESHOLD } } else { return { @@ -259,7 +269,9 @@ export function getCoverageFromSarif(sarifPath: string): Coverage { totalCoveredLines: 0, freshCoverage: 0, freshLines: 0, - freshCoveredLines: 0 + freshCoveredLines: 0, + totalCoverageThreshold: COVERAGE_THRESHOLD, + freshCoverageThreshold: COVERAGE_THRESHOLD } } } diff --git a/package-lock.json b/package-lock.json index ea07b5b7..9c7a1cdd 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,14 +14,14 @@ "vsts" ], "devDependencies": { + "@typescript-eslint/eslint-plugin": "^8.18.0", "eslint": "^8.57.1", - "eslint-plugin-github": "^5.0.1", - "eslint-plugin-jest": "^28.8.2", + "eslint-plugin-jest": "^28.9.0", "eslint-plugin-prettier": "^5.2.1", - "prettier": "3.3.3", + "prettier": "3.4.2", "prettier-eslint": "^16.3.0", "ts-node": "^10.9.2", - "typescript": "^5.5.4" + "typescript": "^5.7.2" } }, "common": { @@ -29,16 +29,15 @@ "version": "1.0.0", "license": "Apache-2.0", "devDependencies": { - "@types/node": "^22.5.2", - "@typescript-eslint/parser": "^7.18.0", + "@types/node": "^22.10.1", + "@typescript-eslint/parser": "^8.18.0", "eslint": "^8.57.1", - "eslint-plugin-github": "^5.0.1", - "eslint-plugin-jest": "^28.8.2", + "eslint-plugin-jest": "^28.9.0", "jest": "^29.7.0", "js-yaml": "^4.1.0", - "prettier": "3.3.3", + "prettier": "3.4.2", "ts-jest": "^29.2.5", - "typescript": "^5.5.4" + "typescript": "^5.7.2" } }, "node_modules/@aashutoshrathi/word-wrap": { @@ -191,11 +190,11 @@ "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" }, "node_modules/@actions/cache": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.4.tgz", - "integrity": "sha512-RuHnwfcDagtX+37s0ZWy7clbOfnZ7AlDJQ7k/9rzt2W4Gnwde3fa/qjSjVuz4vLcLIpc7fUob27CMrqiWZytYA==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-4.0.0.tgz", + "integrity": "sha512-WIuxjnZ44lNYtIS4fqSaYvF00hORdy3cSin+jx8xNgBVGWnNIAiCBHjlwusVQlcgExoQC9pHXGrDsZyZr7rCDQ==", "dependencies": { - "@actions/core": "^1.10.0", + "@actions/core": "^1.11.1", "@actions/exec": "^1.0.1", "@actions/glob": "^0.1.0", "@actions/http-client": "^2.1.1", @@ -203,8 +202,9 @@ "@azure/abort-controller": "^1.1.0", "@azure/ms-rest-js": "^2.6.0", "@azure/storage-blob": "^12.13.0", + "@protobuf-ts/plugin": "^2.9.4", "semver": "^6.3.1", - "uuid": "^3.3.3" + "twirp-ts": "^2.5.0" } }, "node_modules/@actions/cache/node_modules/semver": { @@ -216,20 +216,12 @@ } }, "node_modules/@actions/core": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz", - "integrity": "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==", + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", + "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", "dependencies": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" - } - }, - "node_modules/@actions/core/node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "bin": { - "uuid": "dist/bin/uuid" + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" } }, "node_modules/@actions/exec": { @@ -1201,18 +1193,6 @@ "@babel/core": "^7.0.0-0" } }, - "node_modules/@babel/runtime": { - "version": "7.24.4", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.4.tgz", - "integrity": "sha512-dkxf7+hn8mFBwKjs9bvBlArzLVxVbS8usaPUDd5p2a9JCL9tB8OaOVN1isD4+Xyk4ns89/xeOmbQvgdK7IIVdA==", - "dev": true, - "dependencies": { - "regenerator-runtime": "^0.14.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/template": { "version": "7.24.0", "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.0.tgz", @@ -1290,9 +1270,9 @@ } }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.23.1.tgz", - "integrity": "sha512-6VhYk1diRqrhBAqpJEdjASR/+WVRtfjpqKuNw11cLiaWpAT/Uu+nokB+UJnevzy/P9C/ty6AOe0dwueMrGh/iQ==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.24.0.tgz", + "integrity": "sha512-WtKdFM7ls47zkKHFVzMz8opM7LkcsIp9amDUBIAWirg70RM71WRSjdILPsY5Uv1D42ZpUfaPILDlfactHgsRkw==", "cpu": [ "ppc64" ], @@ -1306,9 +1286,9 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.23.1.tgz", - "integrity": "sha512-uz6/tEy2IFm9RYOyvKl88zdzZfwEfKZmnX9Cj1BHjeSGNuGLuMD1kR8y5bteYmwqKm1tj8m4cb/aKEorr6fHWQ==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.24.0.tgz", + "integrity": "sha512-arAtTPo76fJ/ICkXWetLCc9EwEHKaeya4vMrReVlEIUCAUncH7M4bhMQ+M9Vf+FFOZJdTNMXNBrWwW+OXWpSew==", "cpu": [ "arm" ], @@ -1322,9 +1302,9 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.23.1.tgz", - "integrity": "sha512-xw50ipykXcLstLeWH7WRdQuysJqejuAGPd30vd1i5zSyKK3WE+ijzHmLKxdiCMtH1pHz78rOg0BKSYOSB/2Khw==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.24.0.tgz", + "integrity": "sha512-Vsm497xFM7tTIPYK9bNTYJyF/lsP590Qc1WxJdlB6ljCbdZKU9SY8i7+Iin4kyhV/KV5J2rOKsBQbB77Ab7L/w==", "cpu": [ "arm64" ], @@ -1338,9 +1318,9 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.23.1.tgz", - "integrity": "sha512-nlN9B69St9BwUoB+jkyU090bru8L0NA3yFvAd7k8dNsVH8bi9a8cUAUSEcEEgTp2z3dbEDGJGfP6VUnkQnlReg==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.24.0.tgz", + "integrity": "sha512-t8GrvnFkiIY7pa7mMgJd7p8p8qqYIz1NYiAoKc75Zyv73L3DZW++oYMSHPRarcotTKuSs6m3hTOa5CKHaS02TQ==", "cpu": [ "x64" ], @@ -1354,9 +1334,9 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.23.1.tgz", - "integrity": "sha512-YsS2e3Wtgnw7Wq53XXBLcV6JhRsEq8hkfg91ESVadIrzr9wO6jJDMZnCQbHm1Guc5t/CdDiFSSfWP58FNuvT3Q==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.24.0.tgz", + "integrity": "sha512-CKyDpRbK1hXwv79soeTJNHb5EiG6ct3efd/FTPdzOWdbZZfGhpbcqIpiD0+vwmpu0wTIL97ZRPZu8vUt46nBSw==", "cpu": [ "arm64" ], @@ -1370,9 +1350,9 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.23.1.tgz", - "integrity": "sha512-aClqdgTDVPSEGgoCS8QDG37Gu8yc9lTHNAQlsztQ6ENetKEO//b8y31MMu2ZaPbn4kVsIABzVLXYLhCGekGDqw==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.24.0.tgz", + "integrity": "sha512-rgtz6flkVkh58od4PwTRqxbKH9cOjaXCMZgWD905JOzjFKW+7EiUObfd/Kav+A6Gyud6WZk9w+xu6QLytdi2OA==", "cpu": [ "x64" ], @@ -1386,9 +1366,9 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.23.1.tgz", - "integrity": "sha512-h1k6yS8/pN/NHlMl5+v4XPfikhJulk4G+tKGFIOwURBSFzE8bixw1ebjluLOjfwtLqY0kewfjLSrO6tN2MgIhA==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.24.0.tgz", + "integrity": "sha512-6Mtdq5nHggwfDNLAHkPlyLBpE5L6hwsuXZX8XNmHno9JuL2+bg2BX5tRkwjyfn6sKbxZTq68suOjgWqCicvPXA==", "cpu": [ "arm64" ], @@ -1402,9 +1382,9 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.23.1.tgz", - "integrity": "sha512-lK1eJeyk1ZX8UklqFd/3A60UuZ/6UVfGT2LuGo3Wp4/z7eRTRYY+0xOu2kpClP+vMTi9wKOfXi2vjUpO1Ro76g==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.24.0.tgz", + "integrity": "sha512-D3H+xh3/zphoX8ck4S2RxKR6gHlHDXXzOf6f/9dbFt/NRBDIE33+cVa49Kil4WUjxMGW0ZIYBYtaGCa2+OsQwQ==", "cpu": [ "x64" ], @@ -1418,9 +1398,9 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.23.1.tgz", - "integrity": "sha512-CXXkzgn+dXAPs3WBwE+Kvnrf4WECwBdfjfeYHpMeVxWE0EceB6vhWGShs6wi0IYEqMSIzdOF1XjQ/Mkm5d7ZdQ==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.24.0.tgz", + "integrity": "sha512-gJKIi2IjRo5G6Glxb8d3DzYXlxdEj2NlkixPsqePSZMhLudqPhtZ4BUrpIuTjJYXxvF9njql+vRjB2oaC9XpBw==", "cpu": [ "arm" ], @@ -1434,9 +1414,9 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.23.1.tgz", - "integrity": "sha512-/93bf2yxencYDnItMYV/v116zff6UyTjo4EtEQjUBeGiVpMmffDNUyD9UN2zV+V3LRV3/on4xdZ26NKzn6754g==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.24.0.tgz", + "integrity": "sha512-TDijPXTOeE3eaMkRYpcy3LarIg13dS9wWHRdwYRnzlwlA370rNdZqbcp0WTyyV/k2zSxfko52+C7jU5F9Tfj1g==", "cpu": [ "arm64" ], @@ -1450,9 +1430,9 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.23.1.tgz", - "integrity": "sha512-VTN4EuOHwXEkXzX5nTvVY4s7E/Krz7COC8xkftbbKRYAl96vPiUssGkeMELQMOnLOJ8k3BY1+ZY52tttZnHcXQ==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.24.0.tgz", + "integrity": "sha512-K40ip1LAcA0byL05TbCQ4yJ4swvnbzHscRmUilrmP9Am7//0UjPreh4lpYzvThT2Quw66MhjG//20mrufm40mA==", "cpu": [ "ia32" ], @@ -1466,9 +1446,9 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.23.1.tgz", - "integrity": "sha512-Vx09LzEoBa5zDnieH8LSMRToj7ir/Jeq0Gu6qJ/1GcBq9GkfoEAoXvLiW1U9J1qE/Y/Oyaq33w5p2ZWrNNHNEw==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.24.0.tgz", + "integrity": "sha512-0mswrYP/9ai+CU0BzBfPMZ8RVm3RGAN/lmOMgW4aFUSOQBjA31UP8Mr6DDhWSuMwj7jaWOT0p0WoZ6jeHhrD7g==", "cpu": [ "loong64" ], @@ -1482,9 +1462,9 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.23.1.tgz", - "integrity": "sha512-nrFzzMQ7W4WRLNUOU5dlWAqa6yVeI0P78WKGUo7lg2HShq/yx+UYkeNSE0SSfSure0SqgnsxPvmAUu/vu0E+3Q==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.24.0.tgz", + "integrity": "sha512-hIKvXm0/3w/5+RDtCJeXqMZGkI2s4oMUGj3/jM0QzhgIASWrGO5/RlzAzm5nNh/awHE0A19h/CvHQe6FaBNrRA==", "cpu": [ "mips64el" ], @@ -1498,9 +1478,9 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.23.1.tgz", - "integrity": "sha512-dKN8fgVqd0vUIjxuJI6P/9SSSe/mB9rvA98CSH2sJnlZ/OCZWO1DJvxj8jvKTfYUdGfcq2dDxoKaC6bHuTlgcw==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.24.0.tgz", + "integrity": "sha512-HcZh5BNq0aC52UoocJxaKORfFODWXZxtBaaZNuN3PUX3MoDsChsZqopzi5UupRhPHSEHotoiptqikjN/B77mYQ==", "cpu": [ "ppc64" ], @@ -1514,9 +1494,9 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.23.1.tgz", - "integrity": "sha512-5AV4Pzp80fhHL83JM6LoA6pTQVWgB1HovMBsLQ9OZWLDqVY8MVobBXNSmAJi//Csh6tcY7e7Lny2Hg1tElMjIA==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.24.0.tgz", + "integrity": "sha512-bEh7dMn/h3QxeR2KTy1DUszQjUrIHPZKyO6aN1X4BCnhfYhuQqedHaa5MxSQA/06j3GpiIlFGSsy1c7Gf9padw==", "cpu": [ "riscv64" ], @@ -1530,9 +1510,9 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.23.1.tgz", - "integrity": "sha512-9ygs73tuFCe6f6m/Tb+9LtYxWR4c9yg7zjt2cYkjDbDpV/xVn+68cQxMXCjUpYwEkze2RcU/rMnfIXNRFmSoDw==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.24.0.tgz", + "integrity": "sha512-ZcQ6+qRkw1UcZGPyrCiHHkmBaj9SiCD8Oqd556HldP+QlpUIe2Wgn3ehQGVoPOvZvtHm8HPx+bH20c9pvbkX3g==", "cpu": [ "s390x" ], @@ -1546,9 +1526,9 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.23.1.tgz", - "integrity": "sha512-EV6+ovTsEXCPAp58g2dD68LxoP/wK5pRvgy0J/HxPGB009omFPv3Yet0HiaqvrIrgPTBuC6wCH1LTOY91EO5hQ==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.24.0.tgz", + "integrity": "sha512-vbutsFqQ+foy3wSSbmjBXXIJ6PL3scghJoM8zCL142cGaZKAdCZHyf+Bpu/MmX9zT9Q0zFBVKb36Ma5Fzfa8xA==", "cpu": [ "x64" ], @@ -1562,9 +1542,9 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.23.1.tgz", - "integrity": "sha512-aevEkCNu7KlPRpYLjwmdcuNz6bDFiE7Z8XC4CPqExjTvrHugh28QzUXVOZtiYghciKUacNktqxdpymplil1beA==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.24.0.tgz", + "integrity": "sha512-hjQ0R/ulkO8fCYFsG0FZoH+pWgTTDreqpqY7UnQntnaKv95uP5iW3+dChxnx7C3trQQU40S+OgWhUVwCjVFLvg==", "cpu": [ "x64" ], @@ -1578,9 +1558,9 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.23.1.tgz", - "integrity": "sha512-3x37szhLexNA4bXhLrCC/LImN/YtWis6WXr1VESlfVtVeoFJBRINPJ3f0a/6LV8zpikqoUg4hyXw0sFBt5Cr+Q==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.24.0.tgz", + "integrity": "sha512-MD9uzzkPQbYehwcN583yx3Tu5M8EIoTD+tUgKF982WYL9Pf5rKy9ltgD0eUgs8pvKnmizxjXZyLt0z6DC3rRXg==", "cpu": [ "arm64" ], @@ -1594,9 +1574,9 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.23.1.tgz", - "integrity": "sha512-aY2gMmKmPhxfU+0EdnN+XNtGbjfQgwZj43k8G3fyrDM/UdZww6xrWxmDkuz2eCZchqVeABjV5BpildOrUbBTqA==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.24.0.tgz", + "integrity": "sha512-4ir0aY1NGUhIC1hdoCzr1+5b43mw99uNwVzhIq1OY3QcEwPDO3B7WNXBzaKY5Nsf1+N11i1eOfFcq+D/gOS15Q==", "cpu": [ "x64" ], @@ -1610,9 +1590,9 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.23.1.tgz", - "integrity": "sha512-RBRT2gqEl0IKQABT4XTj78tpk9v7ehp+mazn2HbUeZl1YMdaGAQqhapjGTCe7uw7y0frDi4gS0uHzhvpFuI1sA==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.0.tgz", + "integrity": "sha512-jVzdzsbM5xrotH+W5f1s+JtUy1UWgjU0Cf4wMvffTB8m6wP5/kx0KiaLHlbJO+dMgtxKV8RQ/JvtlFcdZ1zCPA==", "cpu": [ "x64" ], @@ -1626,9 +1606,9 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.23.1.tgz", - "integrity": "sha512-4O+gPR5rEBe2FpKOVyiJ7wNDPA8nGzDuJ6gN4okSA1gEOYZ67N8JPk58tkWtdtPeLz7lBnY6I5L3jdsr3S+A6A==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.24.0.tgz", + "integrity": "sha512-iKc8GAslzRpBytO2/aN3d2yb2z8XTVfNV0PjGlCxKo5SgWmNXx82I/Q3aG1tFfS+A2igVCY97TJ8tnYwpUWLCA==", "cpu": [ "arm64" ], @@ -1642,9 +1622,9 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.23.1.tgz", - "integrity": "sha512-BcaL0Vn6QwCwre3Y717nVHZbAa4UBEigzFm6VdsVdT/MbZ38xoj1X9HPkZhbmaBGUD1W8vxAfffbDe8bA6AKnQ==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.24.0.tgz", + "integrity": "sha512-vQW36KZolfIudCcTnaTpmLQ24Ha1RjygBo39/aLkM2kmjkWmZGEJ5Gn9l5/7tzXA42QGIoWbICfg6KLLkIw6yw==", "cpu": [ "ia32" ], @@ -1658,9 +1638,9 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.23.1.tgz", - "integrity": "sha512-BHpFFeslkWrXWyUPnbKm+xYYVYruCinGcftSBaa8zoF9hZO4BcSCFUvHVTtzpIY6YzUnYtuEhZ+C9iEXjxnasg==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.24.0.tgz", + "integrity": "sha512-7IAFPrjSQIJrGsK6flwg7NFmwBoSTyF3rl7If0hNUFQU4ilTsEPL6GuMuU9BfIWVVGuRnuIidkSMC+c0Otu8IA==", "cpu": [ "x64" ], @@ -1737,12 +1717,6 @@ "node": ">=14" } }, - "node_modules/@github/browserslist-config": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@github/browserslist-config/-/browserslist-config-1.0.0.tgz", - "integrity": "sha512-gIhjdJp/c2beaIWWIlsXdqXVRUz3r2BxBCpfz/F3JXHvSAQ1paMYjLH+maEATtENg+k5eLV7gA+9yPp762ieuw==", - "dev": true - }, "node_modules/@humanwhocodes/config-array": { "version": "0.13.0", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", @@ -2452,16 +2426,16 @@ "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" }, "node_modules/@octokit/openapi-webhooks-types": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-webhooks-types/-/openapi-webhooks-types-8.3.0.tgz", - "integrity": "sha512-vKLsoR4xQxg4Z+6rU/F65ItTUz/EXbD+j/d4mlq2GW8TsA4Tc8Kdma2JTAAJ5hrKWUQzkR/Esn2fjsqiVRYaQg==" + "version": "8.5.1", + "resolved": "https://registry.npmjs.org/@octokit/openapi-webhooks-types/-/openapi-webhooks-types-8.5.1.tgz", + "integrity": "sha512-i3h1b5zpGSB39ffBbYdSGuAd0NhBAwPyA3QV3LYi/lx4lsbZiu7u2UHgXVUR6EpvOI8REOuVh1DZTRfHoJDvuQ==" }, "node_modules/@octokit/plugin-paginate-rest": { - "version": "11.3.5", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.5.tgz", - "integrity": "sha512-cgwIRtKrpwhLoBi0CUNuY83DPGRMaWVjqVI/bGKsLJ4PzyWZNaEmhHroI2xlrVXkk6nFv0IsZpOp+ZWSWUS2AQ==", + "version": "11.3.6", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.6.tgz", + "integrity": "sha512-zcvqqf/+TicbTCa/Z+3w4eBJcAxCFymtc0UAIsR3dEVoNilWld4oXdscQ3laXamTszUZdusw97K8+DrbFiOwjw==", "dependencies": { - "@octokit/types": "^13.6.0" + "@octokit/types": "^13.6.2" }, "engines": { "node": ">= 18" @@ -2679,19 +2653,19 @@ "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" }, "node_modules/@octokit/types": { - "version": "13.6.1", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.1.tgz", - "integrity": "sha512-PHZE9Z+kWXb23Ndik8MKPirBPziOc0D2/3KH1P+6jK5nGWe96kadZuE4jev2/Jq7FvIfTlT2Ltg8Fv2x1v0a5g==", + "version": "13.6.2", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.6.2.tgz", + "integrity": "sha512-WpbZfZUcZU77DrSW4wbsSgTPfKcp286q3ItaIgvSbBpZJlu6mnYXAkjZz6LVZPXkEvLIM8McanyZejKTYUHipA==", "dependencies": { "@octokit/openapi-types": "^22.2.0" } }, "node_modules/@octokit/webhooks": { - "version": "13.3.0", - "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-13.3.0.tgz", - "integrity": "sha512-TUkJLtI163Bz5+JK0O+zDkQpn4gKwN+BovclUvCj6pI/6RXrFqQvUMRS2M+Rt8Rv0qR3wjoMoOPmpJKeOh0nBg==", + "version": "13.4.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-13.4.1.tgz", + "integrity": "sha512-I5YPUtfWidh+OzyrlDahJsUpkpGK0kCTmDRbuqGmlCUzOtxdEkX3R4d6Cd08ijQYwkVXQJanPdbKuZBeV2NMaA==", "dependencies": { - "@octokit/openapi-webhooks-types": "8.3.0", + "@octokit/openapi-webhooks-types": "8.5.1", "@octokit/request-error": "^6.0.1", "@octokit/webhooks-methods": "^5.0.0" }, @@ -2970,33 +2944,29 @@ } }, "node_modules/@types/jest": { - "version": "29.5.12", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.12.tgz", - "integrity": "sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw==", + "version": "29.5.14", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", + "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", "dev": true, "dependencies": { "expect": "^29.0.0", "pretty-format": "^29.0.0" } }, - "node_modules/@types/json-schema": { - "version": "7.0.15", - "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", - "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", - "dev": true - }, "node_modules/@types/json5": { "version": "0.0.29", "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", - "dev": true + "dev": true, + "optional": true, + "peer": true }, "node_modules/@types/node": { - "version": "22.5.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.2.tgz", - "integrity": "sha512-acJsPTEqYqulZS/Yp/S3GgeE6GZ0qYODUR8aVr/DkhHQ8l9nd4j5x1/ZJy9/gHrRlFMqkO6i0I3E27Alu4jjPg==", + "version": "22.10.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.1.tgz", + "integrity": "sha512-qKgsUwfHZV2WCWLAnVP1JqnpE6Im6h3Y0+fYgMTasNQ7V++CBX5OT1as0g0f+OyubbFqhf6XVNIsmN4IIhEgGQ==", "dependencies": { - "undici-types": "~6.19.2" + "undici-types": "~6.20.0" } }, "node_modules/@types/node-fetch": { @@ -3032,12 +3002,6 @@ "resolved": "https://registry.npmjs.org/@types/sarif/-/sarif-2.1.7.tgz", "integrity": "sha512-kRz0VEkJqWLf1LLVN4pT1cg1Z9wAuvI6L97V3m2f5B76Tg8d413ddvLBPTEHAZJlnn4XSvu0FkZtViCQGVyrXQ==" }, - "node_modules/@types/semver": { - "version": "7.5.8", - "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.5.8.tgz", - "integrity": "sha512-I8EUhyrgfLrcTkzV3TSsGyl1tSuPrEDzr0yd5m90UgNxQkyDXULk3b6MlQqTCpZpNtWe1K0hzclnZkTcLBe2UQ==", - "dev": true - }, "node_modules/@types/stack-utils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", @@ -3073,178 +3037,69 @@ "dev": true }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-7.7.1.tgz", - "integrity": "sha512-KwfdWXJBOviaBVhxO3p5TJiLpNuh2iyXyjmWN0f1nU87pwyvfS0EmjC6ukQVYVFJd/K1+0NWGPDXiyEyQorn0Q==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.18.0.tgz", + "integrity": "sha512-NR2yS7qUqCL7AIxdJUQf2MKKNDVNaig/dEB0GBLU7D+ZdHgK1NoH/3wsgO3OnPVipn51tG3MAwaODEGil70WEw==", "dev": true, "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "7.7.1", - "@typescript-eslint/type-utils": "7.7.1", - "@typescript-eslint/utils": "7.7.1", - "@typescript-eslint/visitor-keys": "7.7.1", - "debug": "^4.3.4", + "@typescript-eslint/scope-manager": "8.18.0", + "@typescript-eslint/type-utils": "8.18.0", + "@typescript-eslint/utils": "8.18.0", + "@typescript-eslint/visitor-keys": "8.18.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", - "semver": "^7.6.0", "ts-api-utils": "^1.3.0" }, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "@typescript-eslint/parser": "^7.0.0", - "eslint": "^8.56.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.8.0" } }, "node_modules/@typescript-eslint/parser": { - "version": "7.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-7.18.0.tgz", - "integrity": "sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.18.0.tgz", + "integrity": "sha512-hgUZ3kTEpVzKaK3uNibExUYm6SKKOmTU2BOxBSvOYwtJEPdVQ70kZJpPjstlnhCHcuc2WGfSbpKlb/69ttyN5Q==", "dev": true, "dependencies": { - "@typescript-eslint/scope-manager": "7.18.0", - "@typescript-eslint/types": "7.18.0", - "@typescript-eslint/typescript-estree": "7.18.0", - "@typescript-eslint/visitor-keys": "7.18.0", + "@typescript-eslint/scope-manager": "8.18.0", + "@typescript-eslint/types": "8.18.0", + "@typescript-eslint/typescript-estree": "8.18.0", + "@typescript-eslint/visitor-keys": "8.18.0", "debug": "^4.3.4" }, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.56.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { - "version": "7.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.18.0.tgz", - "integrity": "sha512-jjhdIE/FPF2B7Z1uzc6i3oWKbGcHb87Qw7AWj6jmEqNOfDFbJWtjt/XfwCpvNkpGWlcJaog5vTR+VV8+w9JflA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "7.18.0", - "@typescript-eslint/visitor-keys": "7.18.0" - }, - "engines": { - "node": "^18.18.0 || >=20.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "7.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.18.0.tgz", - "integrity": "sha512-iZqi+Ds1y4EDYUtlOOC+aUmxnE9xS/yCigkjA7XpTKV6nCBd3Hp/PRGGmdwnfkV2ThMyYldP1wRpm/id99spTQ==", - "dev": true, - "engines": { - "node": "^18.18.0 || >=20.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "7.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.18.0.tgz", - "integrity": "sha512-aP1v/BSPnnyhMHts8cf1qQ6Q1IFwwRvAQGRvBFkWlo3/lH29OXA3Pts+c10nxRxIBrDnoMqzhgdwVe5f2D6OzA==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "7.18.0", - "@typescript-eslint/visitor-keys": "7.18.0", - "debug": "^4.3.4", - "globby": "^11.1.0", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^1.3.0" - }, - "engines": { - "node": "^18.18.0 || >=20.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "7.18.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.18.0.tgz", - "integrity": "sha512-cDF0/Gf81QpY3xYyJKDV14Zwdmid5+uuENhjH2EqFaF0ni+yAyq/LzMaIJdhNJXZI7uLzwIlA+V7oWoyn6Curg==", - "dev": true, - "dependencies": { - "@typescript-eslint/types": "7.18.0", - "eslint-visitor-keys": "^3.4.3" - }, - "engines": { - "node": "^18.18.0 || >=20.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.8.0" } }, "node_modules/@typescript-eslint/scope-manager": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.7.1.tgz", - "integrity": "sha512-PytBif2SF+9SpEUKynYn5g1RHFddJUcyynGpztX3l/ik7KmZEv19WCMhUBkHXPU9es/VWGD3/zg3wg90+Dh2rA==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.18.0.tgz", + "integrity": "sha512-PNGcHop0jkK2WVYGotk/hxj+UFLhXtGPiGtiaWgVBVP1jhMoMCHlTyJA+hEj4rszoSdLTK3fN4oOatrL0Cp+Xw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.7.1", - "@typescript-eslint/visitor-keys": "7.7.1" + "@typescript-eslint/types": "8.18.0", + "@typescript-eslint/visitor-keys": "8.18.0" }, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", @@ -3252,39 +3107,35 @@ } }, "node_modules/@typescript-eslint/type-utils": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-7.7.1.tgz", - "integrity": "sha512-ZksJLW3WF7o75zaBPScdW1Gbkwhd/lyeXGf1kQCxJaOeITscoSl0MjynVvCzuV5boUz/3fOI06Lz8La55mu29Q==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.18.0.tgz", + "integrity": "sha512-er224jRepVAVLnMF2Q7MZJCq5CsdH2oqjP4dT7K6ij09Kyd+R21r7UVJrF0buMVdZS5QRhDzpvzAxHxabQadow==", "dev": true, "dependencies": { - "@typescript-eslint/typescript-estree": "7.7.1", - "@typescript-eslint/utils": "7.7.1", + "@typescript-eslint/typescript-estree": "8.18.0", + "@typescript-eslint/utils": "8.18.0", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.56.0" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.8.0" } }, "node_modules/@typescript-eslint/types": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.7.1.tgz", - "integrity": "sha512-AmPmnGW1ZLTpWa+/2omPrPfR7BcbUU4oha5VIbSbS1a1Tv966bklvLNXxp3mrbc+P2j4MNOTfDffNsk4o0c6/w==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.18.0.tgz", + "integrity": "sha512-FNYxgyTCAnFwTrzpBGq+zrnoTO4x0c1CKYY5MuUTzpScqmY5fmsh2o3+57lqdI3NZucBDCzDgdEbIaNfAjAHQA==", "dev": true, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", @@ -3292,31 +3143,29 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.7.1.tgz", - "integrity": "sha512-CXe0JHCXru8Fa36dteXqmH2YxngKJjkQLjxzoj6LYwzZ7qZvgsLSc+eqItCrqIop8Vl2UKoAi0StVWu97FQZIQ==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.18.0.tgz", + "integrity": "sha512-rqQgFRu6yPkauz+ms3nQpohwejS8bvgbPyIDq13cgEDbkXt4LH4OkDMT0/fN1RUtzG8e8AKJyDBoocuQh8qNeg==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.7.1", - "@typescript-eslint/visitor-keys": "7.7.1", + "@typescript-eslint/types": "8.18.0", + "@typescript-eslint/visitor-keys": "8.18.0", "debug": "^4.3.4", - "globby": "^11.1.0", + "fast-glob": "^3.3.2", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^1.3.0" }, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } + "peerDependencies": { + "typescript": ">=4.8.4 <5.8.0" } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { @@ -3329,9 +3178,9 @@ } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", - "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", "dev": true, "dependencies": { "brace-expansion": "^2.0.1" @@ -3344,47 +3193,57 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.7.1.tgz", - "integrity": "sha512-QUvBxPEaBXf41ZBbaidKICgVL8Hin0p6prQDu6bbetWo39BKbWJxRsErOzMNT1rXvTll+J7ChrbmMCXM9rsvOQ==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.18.0.tgz", + "integrity": "sha512-p6GLdY383i7h5b0Qrfbix3Vc3+J2k6QWw6UMUeY5JGfm3C5LbZ4QIZzJNoNOfgyRe0uuYKjvVOsO/jD4SJO+xg==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@types/json-schema": "^7.0.15", - "@types/semver": "^7.5.8", - "@typescript-eslint/scope-manager": "7.7.1", - "@typescript-eslint/types": "7.7.1", - "@typescript-eslint/typescript-estree": "7.7.1", - "semver": "^7.6.0" + "@typescript-eslint/scope-manager": "8.18.0", + "@typescript-eslint/types": "8.18.0", + "@typescript-eslint/typescript-estree": "8.18.0" }, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.56.0" + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.8.0" } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "7.7.1", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.7.1.tgz", - "integrity": "sha512-gBL3Eq25uADw1LQ9kVpf3hRM+DWzs0uZknHYK3hq4jcTPqVCClHGDnB6UUUV2SFeBeA4KWHWbbLqmbGcZ4FYbw==", + "version": "8.18.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.18.0.tgz", + "integrity": "sha512-pCh/qEA8Lb1wVIqNvBke8UaRjJ6wrAWkJO5yyIbs8Yx6TNGYyfNjOo61tLv+WwLvoLPp4BQ8B7AHKijl8NGUfw==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.7.1", - "eslint-visitor-keys": "^3.4.3" + "@typescript-eslint/types": "8.18.0", + "eslint-visitor-keys": "^4.2.0" }, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" } }, + "node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", + "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, "node_modules/@ungap/structured-clone": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", @@ -3403,9 +3262,9 @@ } }, "node_modules/acorn": { - "version": "8.11.3", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", - "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==", + "version": "8.14.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", + "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -3618,20 +3477,13 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true }, - "node_modules/aria-query": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", - "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", - "dev": true, - "dependencies": { - "dequal": "^2.0.3" - } - }, "node_modules/array-buffer-byte-length": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.5", "is-array-buffer": "^3.0.4" @@ -3648,6 +3500,8 @@ "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", "integrity": "sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -3677,6 +3531,8 @@ "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.5.tgz", "integrity": "sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -3697,6 +3553,8 @@ "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz", "integrity": "sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", @@ -3715,6 +3573,8 @@ "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz", "integrity": "sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", @@ -3733,6 +3593,8 @@ "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "array-buffer-byte-length": "^1.0.1", "call-bind": "^1.0.5", @@ -3756,12 +3618,6 @@ "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", "dev": true }, - "node_modules/ast-types-flow": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/ast-types-flow/-/ast-types-flow-0.0.8.tgz", - "integrity": "sha512-OH/2E5Fg20h2aPrbe+QL8JZQFko0YZaF+j4mnQ7BGhfavO7OpSLa8a0y9sBwomHdSbkhTS8TQNayBfnW5DwbvQ==", - "dev": true - }, "node_modules/async": { "version": "3.2.5", "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", @@ -3777,6 +3633,8 @@ "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "possible-typed-array-names": "^1.0.0" }, @@ -3787,19 +3645,10 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/axe-core": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/axe-core/-/axe-core-4.7.0.tgz", - "integrity": "sha512-M0JtH+hlOL5pLQwHOLNYZaXuhqmvS8oExsqB1SBYgA4Dk7u/xx+YdGHXaK5pyUfed5mYXdlYiphWq3G8cRi5JQ==", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/axios": { - "version": "1.7.7", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.7.tgz", - "integrity": "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==", + "version": "1.7.9", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.9.tgz", + "integrity": "sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==", "dev": true, "dependencies": { "follow-redirects": "^1.15.6", @@ -3821,15 +3670,6 @@ "node": ">= 6" } }, - "node_modules/axobject-query": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.2.1.tgz", - "integrity": "sha512-jsyHu61e6N4Vbz/v18DHwWYKK0bSWLqn47eeDSKPB7m8tqMHF9YJ+mhIk2lVteyZrY8tnSj/jHOv4YiTCuCJgg==", - "dev": true, - "dependencies": { - "dequal": "^2.0.3" - } - }, "node_modules/azure-pipelines-task-lib": { "version": "4.17.3", "resolved": "https://registry.npmjs.org/azure-pipelines-task-lib/-/azure-pipelines-task-lib-4.17.3.tgz", @@ -4147,9 +3987,9 @@ } }, "node_modules/browserslist": { - "version": "4.23.0", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", - "integrity": "sha512-QW8HiM1shhT2GuzkvklfjcKDiWFXHOeFCIA/huJPwHsslwcydgk7X+z2zXpEijP98UCY7HbubZt5J2Zgvf0CaQ==", + "version": "4.24.2", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.2.tgz", + "integrity": "sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg==", "dev": true, "funding": [ { @@ -4166,10 +4006,10 @@ } ], "dependencies": { - "caniuse-lite": "^1.0.30001587", - "electron-to-chromium": "^1.4.668", - "node-releases": "^2.0.14", - "update-browserslist-db": "^1.0.13" + "caniuse-lite": "^1.0.30001669", + "electron-to-chromium": "^1.5.41", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.1" }, "bin": { "browserslist": "cli.js" @@ -4290,9 +4130,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001612", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001612.tgz", - "integrity": "sha512-lFgnZ07UhaCcsSZgWW0K5j4e69dK1u/ltrL9lTUiFOwNHs12S3UMIEYgBV0Z6C6hRDev7iRnMzzYmKabYdXF9g==", + "version": "1.0.30001684", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001684.tgz", + "integrity": "sha512-G1LRwLIQjBQoyq0ZJGqGIJUXzJ8irpbjHLpVRXDvBEScFJ9b17sgK6vlx0GAJFE21okD7zXl08rRRUfq6HdoEQ==", "dev": true, "funding": [ { @@ -4591,17 +4431,13 @@ "node": ">= 8" } }, - "node_modules/damerau-levenshtein": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz", - "integrity": "sha512-sdQSFB7+llfUcQHUQO3+B8ERRj0Oa4w9POWMI/puGtuf7gFywGmkaLCElnudfTiKZV+NvHqL0ifzdrI8Ro7ESA==", - "dev": true - }, "node_modules/data-view-buffer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.6", "es-errors": "^1.3.0", @@ -4619,6 +4455,8 @@ "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "es-errors": "^1.3.0", @@ -4636,6 +4474,8 @@ "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.6", "es-errors": "^1.3.0", @@ -4649,11 +4489,11 @@ } }, "node_modules/debug": { - "version": "4.3.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", - "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", + "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", "dependencies": { - "ms": "2.1.2" + "ms": "^2.1.3" }, "engines": { "node": ">=6.0" @@ -4714,6 +4554,8 @@ "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "define-data-property": "^1.0.1", "has-property-descriptors": "^1.0.0", @@ -4739,15 +4581,6 @@ "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" }, - "node_modules/dequal": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", - "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", - "dev": true, - "engines": { - "node": ">=6" - } - }, "node_modules/des.js": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.1.0.tgz", @@ -4847,9 +4680,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.4.746", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.746.tgz", - "integrity": "sha512-jeWaIta2rIG2FzHaYIhSuVWqC6KJYo7oSBX4Jv7g+aVujKztfvdpf+n6MGwZdC5hQXbax4nntykLH2juIQrfPg==", + "version": "1.5.67", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.67.tgz", + "integrity": "sha512-nz88NNBsD7kQSAGGJyp8hS6xSPtWwqNogA0mjtc2nUYeEf3nURK9qpV18TuBdDmEDgVWotS8Wkzf+V52dSQ/LQ==", "dev": true }, "node_modules/emittery": { @@ -4896,6 +4729,8 @@ "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "array-buffer-byte-length": "^1.0.1", "arraybuffer.prototype.slice": "^1.0.3", @@ -4970,36 +4805,13 @@ "node": ">= 0.4" } }, - "node_modules/es-iterator-helpers": { - "version": "1.0.18", - "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.18.tgz", - "integrity": "sha512-scxAJaewsahbqTYrGKJihhViaM6DDZDDoucfvzNbK0pOren1g/daDQ3IAhzn+1G14rBG7w+i5N+qul60++zlKA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.0", - "es-errors": "^1.3.0", - "es-set-tostringtag": "^2.0.3", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "globalthis": "^1.0.3", - "has-property-descriptors": "^1.0.2", - "has-proto": "^1.0.3", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.7", - "iterator.prototype": "^1.1.2", - "safe-array-concat": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/es-object-atoms": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "es-errors": "^1.3.0" }, @@ -5012,6 +4824,8 @@ "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "get-intrinsic": "^1.2.4", "has-tostringtag": "^1.0.2", @@ -5026,6 +4840,8 @@ "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz", "integrity": "sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "hasown": "^2.0.0" } @@ -5035,6 +4851,8 @@ "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "is-callable": "^1.1.4", "is-date-object": "^1.0.1", @@ -5048,9 +4866,9 @@ } }, "node_modules/esbuild": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.23.1.tgz", - "integrity": "sha512-VVNz/9Sa0bs5SELtn3f7qhJCDPCF5oMEl5cO9/SSinpE9hbPVvxbd572HH5AKiP7WD8INO53GgfDDhRjkylHEg==", + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.24.0.tgz", + "integrity": "sha512-FuLPevChGDshgSicjisSooU0cemp/sGXR841D5LHMB7mTVOmsEHcAxaH3irL53+8YDIeVNQEySh4DaYU/iuPqQ==", "dev": true, "hasInstallScript": true, "bin": { @@ -5060,36 +4878,36 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.23.1", - "@esbuild/android-arm": "0.23.1", - "@esbuild/android-arm64": "0.23.1", - "@esbuild/android-x64": "0.23.1", - "@esbuild/darwin-arm64": "0.23.1", - "@esbuild/darwin-x64": "0.23.1", - "@esbuild/freebsd-arm64": "0.23.1", - "@esbuild/freebsd-x64": "0.23.1", - "@esbuild/linux-arm": "0.23.1", - "@esbuild/linux-arm64": "0.23.1", - "@esbuild/linux-ia32": "0.23.1", - "@esbuild/linux-loong64": "0.23.1", - "@esbuild/linux-mips64el": "0.23.1", - "@esbuild/linux-ppc64": "0.23.1", - "@esbuild/linux-riscv64": "0.23.1", - "@esbuild/linux-s390x": "0.23.1", - "@esbuild/linux-x64": "0.23.1", - "@esbuild/netbsd-x64": "0.23.1", - "@esbuild/openbsd-arm64": "0.23.1", - "@esbuild/openbsd-x64": "0.23.1", - "@esbuild/sunos-x64": "0.23.1", - "@esbuild/win32-arm64": "0.23.1", - "@esbuild/win32-ia32": "0.23.1", - "@esbuild/win32-x64": "0.23.1" + "@esbuild/aix-ppc64": "0.24.0", + "@esbuild/android-arm": "0.24.0", + "@esbuild/android-arm64": "0.24.0", + "@esbuild/android-x64": "0.24.0", + "@esbuild/darwin-arm64": "0.24.0", + "@esbuild/darwin-x64": "0.24.0", + "@esbuild/freebsd-arm64": "0.24.0", + "@esbuild/freebsd-x64": "0.24.0", + "@esbuild/linux-arm": "0.24.0", + "@esbuild/linux-arm64": "0.24.0", + "@esbuild/linux-ia32": "0.24.0", + "@esbuild/linux-loong64": "0.24.0", + "@esbuild/linux-mips64el": "0.24.0", + "@esbuild/linux-ppc64": "0.24.0", + "@esbuild/linux-riscv64": "0.24.0", + "@esbuild/linux-s390x": "0.24.0", + "@esbuild/linux-x64": "0.24.0", + "@esbuild/netbsd-x64": "0.24.0", + "@esbuild/openbsd-arm64": "0.24.0", + "@esbuild/openbsd-x64": "0.24.0", + "@esbuild/sunos-x64": "0.24.0", + "@esbuild/win32-arm64": "0.24.0", + "@esbuild/win32-ia32": "0.24.0", + "@esbuild/win32-x64": "0.24.0" } }, "node_modules/escalade": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", - "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "dev": true, "engines": { "node": ">=6" @@ -5167,6 +4985,8 @@ "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-9.1.0.tgz", "integrity": "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw==", "dev": true, + "optional": true, + "peer": true, "bin": { "eslint-config-prettier": "bin/cli.js" }, @@ -5179,6 +4999,8 @@ "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz", "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "debug": "^3.2.7", "is-core-module": "^2.13.0", @@ -5190,24 +5012,26 @@ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "ms": "^2.1.1" } }, "node_modules/eslint-import-resolver-typescript": { - "version": "3.6.3", - "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.6.3.tgz", - "integrity": "sha512-ud9aw4szY9cCT1EWWdGv1L1XR6hh2PaRWif0j2QjQ0pgTY/69iw+W0Z4qZv5wHahOl8isEr+k/JnyAqNQkLkIA==", + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-typescript/-/eslint-import-resolver-typescript-3.7.0.tgz", + "integrity": "sha512-Vrwyi8HHxY97K5ebydMtffsWAn1SCR9eol49eCd5fJS4O1WV7PaAjbcjmbfJJSMz/t4Mal212Uz/fQZrOB8mow==", "dev": true, "dependencies": { "@nolyfill/is-core-module": "1.0.39", - "debug": "^4.3.5", + "debug": "^4.3.7", "enhanced-resolve": "^5.15.0", - "eslint-module-utils": "^2.8.1", "fast-glob": "^3.3.2", "get-tsconfig": "^4.7.5", "is-bun-module": "^1.0.2", - "is-glob": "^4.0.3" + "is-glob": "^4.0.3", + "stable-hash": "^0.0.4" }, "engines": { "node": "^14.18.0 || >=16.0.0" @@ -5234,6 +5058,8 @@ "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.1.tgz", "integrity": "sha512-rXDXR3h7cs7dy9RNpUlQf80nX31XWJEyGq1tRMo+6GsO5VmTe4UTwtmonAD4ZkAsrfMVDA2wlGJ3790Ys+D49Q==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "debug": "^3.2.7" }, @@ -5251,128 +5077,37 @@ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "ms": "^2.1.1" } }, - "node_modules/eslint-plugin-escompat": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-escompat/-/eslint-plugin-escompat-3.4.0.tgz", - "integrity": "sha512-ufTPv8cwCxTNoLnTZBFTQ5SxU2w7E7wiMIS7PSxsgP1eAxFjtSaoZ80LRn64hI8iYziE6kJG6gX/ZCJVxh48Bg==", + "node_modules/eslint-plugin-import": { + "version": "2.29.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz", + "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { - "browserslist": "^4.21.0" - }, - "peerDependencies": { - "eslint": ">=5.14.1" - } - }, - "node_modules/eslint-plugin-eslint-comments": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-eslint-comments/-/eslint-plugin-eslint-comments-3.2.0.tgz", - "integrity": "sha512-0jkOl0hfojIHHmEHgmNdqv4fmh7300NdpA9FFpF7zaoLvB/QeXOGNLIo86oAveJFrfB1p05kC8hpEMHM8DwWVQ==", - "dev": true, - "dependencies": { - "escape-string-regexp": "^1.0.5", - "ignore": "^5.0.5" - }, - "engines": { - "node": ">=6.5.0" - }, - "funding": { - "url": "https://github.com/sponsors/mysticatea" - }, - "peerDependencies": { - "eslint": ">=4.19.1" - } - }, - "node_modules/eslint-plugin-eslint-comments/node_modules/escape-string-regexp": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", - "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", - "dev": true, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/eslint-plugin-filenames": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-filenames/-/eslint-plugin-filenames-1.3.2.tgz", - "integrity": "sha512-tqxJTiEM5a0JmRCUYQmxw23vtTxrb2+a3Q2mMOPhFxvt7ZQQJmdiuMby9B/vUAuVMghyP7oET+nIf6EO6CBd/w==", - "dev": true, - "dependencies": { - "lodash.camelcase": "4.3.0", - "lodash.kebabcase": "4.1.1", - "lodash.snakecase": "4.1.1", - "lodash.upperfirst": "4.3.1" - }, - "peerDependencies": { - "eslint": "*" - } - }, - "node_modules/eslint-plugin-github": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-github/-/eslint-plugin-github-5.0.1.tgz", - "integrity": "sha512-qbXG3wL5Uh2JB92EKeX2hPtO9c/t75qVxQjVLYuTFfhHifLZzv9CBvLCvoaBhLrAC/xTMVht7DK/NofYK8X4Dg==", - "dev": true, - "dependencies": { - "@github/browserslist-config": "^1.0.0", - "@typescript-eslint/eslint-plugin": "^7.0.1", - "@typescript-eslint/parser": "^7.0.1", - "aria-query": "^5.3.0", - "eslint-config-prettier": ">=8.0.0", - "eslint-plugin-escompat": "^3.3.3", - "eslint-plugin-eslint-comments": "^3.2.0", - "eslint-plugin-filenames": "^1.3.2", - "eslint-plugin-i18n-text": "^1.0.1", - "eslint-plugin-import": "^2.25.2", - "eslint-plugin-jsx-a11y": "^6.7.1", - "eslint-plugin-no-only-tests": "^3.0.0", - "eslint-plugin-prettier": "^5.0.0", - "eslint-rule-documentation": ">=1.0.0", - "jsx-ast-utils": "^3.3.2", - "prettier": "^3.0.0", - "svg-element-attributes": "^1.3.1" - }, - "bin": { - "eslint-ignore-errors": "bin/eslint-ignore-errors.js" - }, - "peerDependencies": { - "eslint": "^8.0.1" - } - }, - "node_modules/eslint-plugin-i18n-text": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-i18n-text/-/eslint-plugin-i18n-text-1.0.1.tgz", - "integrity": "sha512-3G3UetST6rdqhqW9SfcfzNYMpQXS7wNkJvp6dsXnjzGiku6Iu5hl3B0kmk6lIcFPwYjhQIY+tXVRtK9TlGT7RA==", - "dev": true, - "peerDependencies": { - "eslint": ">=5.0.0" - } - }, - "node_modules/eslint-plugin-import": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz", - "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==", - "dev": true, - "dependencies": { - "array-includes": "^3.1.7", - "array.prototype.findlastindex": "^1.2.3", - "array.prototype.flat": "^1.3.2", - "array.prototype.flatmap": "^1.3.2", - "debug": "^3.2.7", - "doctrine": "^2.1.0", - "eslint-import-resolver-node": "^0.3.9", - "eslint-module-utils": "^2.8.0", - "hasown": "^2.0.0", - "is-core-module": "^2.13.1", - "is-glob": "^4.0.3", - "minimatch": "^3.1.2", - "object.fromentries": "^2.0.7", - "object.groupby": "^1.0.1", - "object.values": "^1.1.7", - "semver": "^6.3.1", - "tsconfig-paths": "^3.15.0" + "array-includes": "^3.1.7", + "array.prototype.findlastindex": "^1.2.3", + "array.prototype.flat": "^1.3.2", + "array.prototype.flatmap": "^1.3.2", + "debug": "^3.2.7", + "doctrine": "^2.1.0", + "eslint-import-resolver-node": "^0.3.9", + "eslint-module-utils": "^2.8.0", + "hasown": "^2.0.0", + "is-core-module": "^2.13.1", + "is-glob": "^4.0.3", + "minimatch": "^3.1.2", + "object.fromentries": "^2.0.7", + "object.groupby": "^1.0.1", + "object.values": "^1.1.7", + "semver": "^6.3.1", + "tsconfig-paths": "^3.15.0" }, "engines": { "node": ">=4" @@ -5386,6 +5121,8 @@ "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "ms": "^2.1.1" } @@ -5395,6 +5132,8 @@ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "esutils": "^2.0.2" }, @@ -5407,14 +5146,16 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "optional": true, + "peer": true, "bin": { "semver": "bin/semver.js" } }, "node_modules/eslint-plugin-jest": { - "version": "28.8.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-28.8.2.tgz", - "integrity": "sha512-mC3OyklHmS5i7wYU1rGId9EnxRI8TVlnFG56AE+8U9iRy6zwaNygZR+DsdZuCL0gRG0wVeyzq+uWcPt6yJrrMA==", + "version": "28.9.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-28.9.0.tgz", + "integrity": "sha512-rLu1s1Wf96TgUUxSw6loVIkNtUjq1Re7A9QdCCHSohnvXEBAjuL420h0T/fMmkQlNsQP2GhQzEUpYHPfxBkvYQ==", "dev": true, "dependencies": { "@typescript-eslint/utils": "^6.0.0 || ^7.0.0 || ^8.0.0" @@ -5436,45 +5177,6 @@ } } }, - "node_modules/eslint-plugin-jsx-a11y": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsx-a11y/-/eslint-plugin-jsx-a11y-6.8.0.tgz", - "integrity": "sha512-Hdh937BS3KdwwbBaKd5+PLCOmYY6U4f2h9Z2ktwtNKvIdIEu137rjYbcb9ApSbVJfWxANNuiKTD/9tOKjK9qOA==", - "dev": true, - "dependencies": { - "@babel/runtime": "^7.23.2", - "aria-query": "^5.3.0", - "array-includes": "^3.1.7", - "array.prototype.flatmap": "^1.3.2", - "ast-types-flow": "^0.0.8", - "axe-core": "=4.7.0", - "axobject-query": "^3.2.1", - "damerau-levenshtein": "^1.0.8", - "emoji-regex": "^9.2.2", - "es-iterator-helpers": "^1.0.15", - "hasown": "^2.0.0", - "jsx-ast-utils": "^3.3.5", - "language-tags": "^1.0.9", - "minimatch": "^3.1.2", - "object.entries": "^1.1.7", - "object.fromentries": "^2.0.7" - }, - "engines": { - "node": ">=4.0" - }, - "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" - } - }, - "node_modules/eslint-plugin-no-only-tests": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-no-only-tests/-/eslint-plugin-no-only-tests-3.1.0.tgz", - "integrity": "sha512-Lf4YW/bL6Un1R6A76pRZyE1dl1vr31G/ev8UzIc/geCgFWyrKil8hVjYqWVKGB/UIGmb6Slzs9T0wNezdSVegw==", - "dev": true, - "engines": { - "node": ">=5.0.0" - } - }, "node_modules/eslint-plugin-prettier": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.2.1.tgz", @@ -5505,15 +5207,6 @@ } } }, - "node_modules/eslint-rule-documentation": { - "version": "1.0.23", - "resolved": "https://registry.npmjs.org/eslint-rule-documentation/-/eslint-rule-documentation-1.0.23.tgz", - "integrity": "sha512-pWReu3fkohwyvztx/oQWWgld2iad25TfUdi6wvhhaDPIQjHU/pyvlKgXFw1kX31SQK2Nq9MH+vRDWB0ZLy8fYw==", - "dev": true, - "engines": { - "node": ">=4.0.0" - } - }, "node_modules/eslint-scope": { "version": "7.2.2", "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", @@ -5866,6 +5559,8 @@ "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "is-callable": "^1.1.3" } @@ -5942,6 +5637,8 @@ "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.6.tgz", "integrity": "sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.2", "define-properties": "^1.2.0", @@ -5960,6 +5657,8 @@ "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==", "dev": true, + "optional": true, + "peer": true, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -6035,6 +5734,8 @@ "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.5", "es-errors": "^1.3.0", @@ -6110,6 +5811,8 @@ "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.3.tgz", "integrity": "sha512-sFdI5LyBiNTHjRd7cGPWapiHWMOXKyuBNX/cWJ3NfzrZQVa8GI/8cofCl74AOVqq9W5kNmguTIzJ/1s2gyI9wA==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "define-properties": "^1.1.3" }, @@ -6188,6 +5891,8 @@ "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==", "dev": true, + "optional": true, + "peer": true, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -6239,6 +5944,8 @@ "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "has-symbols": "^1.0.3" }, @@ -6422,6 +6129,8 @@ "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "es-errors": "^1.3.0", "hasown": "^2.0.0", @@ -6444,6 +6153,8 @@ "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.2", "get-intrinsic": "^1.2.1" @@ -6461,26 +6172,13 @@ "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", "dev": true }, - "node_modules/is-async-function": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.0.0.tgz", - "integrity": "sha512-Y1JXKrfykRJGdlDwdKlLpLyMIiWqWvuSd17TvZk68PLAOGOoF4Xyav1z0Xhoi+gCYjZVeC5SI+hYFOfvXmGRCA==", - "dev": true, - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-bigint": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.4.tgz", "integrity": "sha512-zB9CruMamjym81i2JZ3UMn54PKGsQzsJeo6xvN3HJJ4CAsQNB6iRutp2To77OfCNuoxspsIhzaPoO1zyCEhFOg==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "has-bigints": "^1.0.1" }, @@ -6493,6 +6191,8 @@ "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.2.tgz", "integrity": "sha512-gDYaKHJmnj4aWxyj6YHyXVpdQawtVLHU5cb+eztPGczf6cjuTdwve5ZIEfgXqH4e57An1D1AKf8CZ3kYrQRqYA==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" @@ -6518,6 +6218,8 @@ "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", "dev": true, + "optional": true, + "peer": true, "engines": { "node": ">= 0.4" }, @@ -6541,6 +6243,8 @@ "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "is-typed-array": "^1.1.13" }, @@ -6556,6 +6260,8 @@ "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.5.tgz", "integrity": "sha512-9YQaSxsAiSwcvS33MBk3wTCVnWK+HhF8VZR2jRxehM16QcVOdHqPn4VPHmRK4lSr38n9JriurInLcP90xsYNfQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -6575,18 +6281,6 @@ "node": ">=0.10.0" } }, - "node_modules/is-finalizationregistry": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.0.2.tgz", - "integrity": "sha512-0by5vtUJs8iFQb5TYUHHPudOR+qXYIMKtiUzvLIZITZUjknFmziyBJuLhVRc+Ds0dREFlskDNJKYIdIzu/9pfw==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", @@ -6604,21 +6298,6 @@ "node": ">=6" } }, - "node_modules/is-generator-function": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", - "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", - "dev": true, - "dependencies": { - "has-tostringtag": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-glob": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", @@ -6631,23 +6310,13 @@ "node": ">=0.10.0" } }, - "node_modules/is-map": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", - "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-negative-zero": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", "dev": true, + "optional": true, + "peer": true, "engines": { "node": ">= 0.4" }, @@ -6669,6 +6338,8 @@ "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.7.tgz", "integrity": "sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -6701,6 +6372,8 @@ "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.2", "has-tostringtag": "^1.0.0" @@ -6712,23 +6385,13 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-set": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz", - "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-shared-array-buffer": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7" }, @@ -6755,6 +6418,8 @@ "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "has-tostringtag": "^1.0.0" }, @@ -6770,6 +6435,8 @@ "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz", "integrity": "sha512-C/CPBqKWnvdcxqIARxyOh4v1UUEOCHpgDa0WYgpKDFMszcrPcffg5uhwSgPCLD2WWxmq6isisz87tzT01tuGhg==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "has-symbols": "^1.0.2" }, @@ -6785,6 +6452,8 @@ "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "which-typed-array": "^1.1.14" }, @@ -6795,23 +6464,13 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-weakmap": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz", - "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/is-weakref": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.2" }, @@ -6819,27 +6478,13 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/is-weakset": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.3.tgz", - "integrity": "sha512-LvIm3/KWzS9oRFHugab7d+M/GcBXuXX5xZkzPmN+NxihdQlZUQ4dWuSV1xR/sq6upL1TJEDrfBgRepHFdBtSNQ==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.7", - "get-intrinsic": "^1.2.4" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/isarray": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", - "dev": true + "dev": true, + "optional": true, + "peer": true }, "node_modules/isexe": { "version": "2.0.0", @@ -6912,19 +6557,6 @@ "node": ">=8" } }, - "node_modules/iterator.prototype": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.2.tgz", - "integrity": "sha512-DR33HMMr8EzwuRL8Y9D3u2BMj8+RqSE850jfGu59kS7tbmPLzGkZmVSfyCFSDxuZiEY6Rzt3T2NA/qU+NwVj1w==", - "dev": true, - "dependencies": { - "define-properties": "^1.2.1", - "get-intrinsic": "^1.2.1", - "has-symbols": "^1.0.3", - "reflect.getprototypeof": "^1.0.4", - "set-function-name": "^2.0.1" - } - }, "node_modules/jackspeak": { "version": "2.3.6", "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-2.3.6.tgz", @@ -7594,6 +7226,8 @@ "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "minimist": "^1.2.0" }, @@ -7601,21 +7235,6 @@ "json5": "lib/cli.js" } }, - "node_modules/jsx-ast-utils": { - "version": "3.3.5", - "resolved": "https://registry.npmjs.org/jsx-ast-utils/-/jsx-ast-utils-3.3.5.tgz", - "integrity": "sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==", - "dev": true, - "dependencies": { - "array-includes": "^3.1.6", - "array.prototype.flat": "^1.3.1", - "object.assign": "^4.1.4", - "object.values": "^1.1.6" - }, - "engines": { - "node": ">=4.0" - } - }, "node_modules/jszip": { "version": "3.10.1", "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", @@ -7682,24 +7301,6 @@ "node": ">=6" } }, - "node_modules/language-subtag-registry": { - "version": "0.3.22", - "resolved": "https://registry.npmjs.org/language-subtag-registry/-/language-subtag-registry-0.3.22.tgz", - "integrity": "sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==", - "dev": true - }, - "node_modules/language-tags": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/language-tags/-/language-tags-1.0.9.tgz", - "integrity": "sha512-MbjN408fEndfiQXbFQ1vnd+1NoLDsnQW41410oQBXiyXDMYH5z505juWa4KUE1LqxRC7DgOgZDbKLxHIwm27hA==", - "dev": true, - "dependencies": { - "language-subtag-registry": "^0.3.20" - }, - "engines": { - "node": ">=0.10" - } - }, "node_modules/lazystream": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.1.tgz", @@ -7799,18 +7400,6 @@ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, - "node_modules/lodash.camelcase": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", - "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==", - "dev": true - }, - "node_modules/lodash.kebabcase": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz", - "integrity": "sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==", - "dev": true - }, "node_modules/lodash.memoize": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", @@ -7823,18 +7412,6 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "node_modules/lodash.snakecase": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz", - "integrity": "sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw==", - "dev": true - }, - "node_modules/lodash.upperfirst": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/lodash.upperfirst/-/lodash.upperfirst-4.3.1.tgz", - "integrity": "sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg==", - "dev": true - }, "node_modules/loglevel": { "version": "1.9.1", "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.9.1.tgz", @@ -8060,9 +7637,9 @@ } }, "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "node_modules/natural-compare": { "version": "1.4.0", @@ -8080,9 +7657,9 @@ } }, "node_modules/nock": { - "version": "13.5.5", - "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.5.tgz", - "integrity": "sha512-XKYnqUrCwXC8DGG1xX4YH5yNIrlh9c065uaMZZHUoeUUINTOyt+x/G+ezYk0Ft6ExSREVIs+qBJDK503viTfFA==", + "version": "13.5.6", + "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz", + "integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==", "dev": true, "dependencies": { "debug": "^4.1.0", @@ -8119,9 +7696,9 @@ "dev": true }, "node_modules/node-releases": { - "version": "2.0.14", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", - "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==", + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", "dev": true }, "node_modules/nodejs-file-downloader": { @@ -8168,6 +7745,8 @@ "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", "dev": true, + "optional": true, + "peer": true, "engines": { "node": ">= 0.4" } @@ -8177,6 +7756,8 @@ "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.5", "define-properties": "^1.2.1", @@ -8190,25 +7771,13 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/object.entries": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/object.entries/-/object.entries-1.1.8.tgz", - "integrity": "sha512-cmopxi8VwRIAw/fkijJohSfpef5PdN0pMQJN6VC/ZKvn0LIknWD8KtgY6KlQdEc4tIjcQ3HxSMmnvtzIscdaYQ==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, "node_modules/object.fromentries": { "version": "2.0.8", "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -8227,6 +7796,8 @@ "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -8241,6 +7812,8 @@ "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.0.tgz", "integrity": "sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -8450,9 +8023,9 @@ } }, "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", "dev": true }, "node_modules/picomatch": { @@ -8545,6 +8118,8 @@ "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", "dev": true, + "optional": true, + "peer": true, "engines": { "node": ">= 0.4" } @@ -8559,9 +8134,9 @@ } }, "node_modules/prettier": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", - "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.4.2.tgz", + "integrity": "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ==", "dev": true, "bin": { "prettier": "bin/prettier.cjs" @@ -8955,38 +8530,13 @@ "node": ">= 0.10" } }, - "node_modules/reflect.getprototypeof": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.6.tgz", - "integrity": "sha512-fmfw4XgoDke3kdI6h4xcUz1dG8uaiv5q9gcEwLS4Pnth2kxT+GZ7YehS1JTMGBQmtV7Y4GFGbs2re2NqhdozUg==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.7", - "define-properties": "^1.2.1", - "es-abstract": "^1.23.1", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.4", - "globalthis": "^1.0.3", - "which-builtin-type": "^1.1.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/regenerator-runtime": { - "version": "0.14.1", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", - "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", - "dev": true - }, "node_modules/regexp.prototype.flags": { "version": "1.5.2", "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.6", "define-properties": "^1.2.1", @@ -9132,6 +8682,8 @@ "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "get-intrinsic": "^1.2.4", @@ -9169,6 +8721,8 @@ "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.6", "es-errors": "^1.3.0", @@ -9232,6 +8786,8 @@ "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "define-data-property": "^1.1.4", "es-errors": "^1.3.0", @@ -9344,6 +8900,12 @@ "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" }, + "node_modules/stable-hash": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/stable-hash/-/stable-hash-0.0.4.tgz", + "integrity": "sha512-LjdcbuBeLcdETCrPn9i8AYAZ1eCtu4ECAWtP7UleOiZ9LzVxRzzUZEoZ8zB24nhkQnDWyET0I+3sWokSDS3E7g==", + "dev": true + }, "node_modules/stack-utils": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", @@ -9440,6 +9002,8 @@ "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -9458,6 +9022,8 @@ "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -9472,6 +9038,8 @@ "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "define-properties": "^1.2.1", @@ -9512,6 +9080,8 @@ "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==", "dev": true, + "optional": true, + "peer": true, "engines": { "node": ">=4" } @@ -9560,16 +9130,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/svg-element-attributes": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/svg-element-attributes/-/svg-element-attributes-1.3.1.tgz", - "integrity": "sha512-Bh05dSOnJBf3miNMqpsormfNtfidA/GxQVakhtn0T4DECWKeXQRQUceYjJ+OxYiiLdGe4Jo9iFV8wICFapFeIA==", - "dev": true, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, "node_modules/sync-request": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/sync-request/-/sync-request-6.1.0.tgz", @@ -9867,6 +9427,8 @@ "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "@types/json5": "^0.0.29", "json5": "^1.0.2", @@ -9953,6 +9515,8 @@ "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "es-errors": "^1.3.0", @@ -9967,6 +9531,8 @@ "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "for-each": "^0.3.3", @@ -9986,6 +9552,8 @@ "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "available-typed-arrays": "^1.0.7", "call-bind": "^1.0.7", @@ -10006,6 +9574,8 @@ "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.7", "for-each": "^0.3.3", @@ -10038,9 +9608,9 @@ "dev": true }, "node_modules/typescript": { - "version": "5.5.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", - "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.2.tgz", + "integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==", "dev": true, "bin": { "tsc": "bin/tsc", @@ -10055,6 +9625,8 @@ "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "call-bind": "^1.0.2", "has-bigints": "^1.0.2", @@ -10082,9 +9654,9 @@ } }, "node_modules/undici-types": { - "version": "6.19.8", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", - "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==" + "version": "6.20.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", + "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==" }, "node_modules/universal-user-agent": { "version": "7.0.2", @@ -10102,9 +9674,9 @@ } }, "node_modules/update-browserslist-db": { - "version": "1.0.13", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.13.tgz", - "integrity": "sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", "dev": true, "funding": [ { @@ -10121,8 +9693,8 @@ } ], "dependencies": { - "escalade": "^3.1.1", - "picocolors": "^1.0.0" + "escalade": "^3.2.0", + "picocolors": "^1.1.0" }, "bin": { "update-browserslist-db": "cli.js" @@ -10255,6 +9827,8 @@ "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "is-bigint": "^1.0.1", "is-boolean-object": "^1.1.0", @@ -10266,55 +9840,13 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/which-builtin-type": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.1.3.tgz", - "integrity": "sha512-YmjsSMDBYsM1CaFiayOVT06+KJeXf0o5M/CAd4o1lTadFAtacTUM49zoYxr/oroopFDfhvN6iEcBxUyc3gvKmw==", - "dev": true, - "dependencies": { - "function.prototype.name": "^1.1.5", - "has-tostringtag": "^1.0.0", - "is-async-function": "^2.0.0", - "is-date-object": "^1.0.5", - "is-finalizationregistry": "^1.0.2", - "is-generator-function": "^1.0.10", - "is-regex": "^1.1.4", - "is-weakref": "^1.0.2", - "isarray": "^2.0.5", - "which-boxed-primitive": "^1.0.2", - "which-collection": "^1.0.1", - "which-typed-array": "^1.1.9" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/which-collection": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz", - "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==", - "dev": true, - "dependencies": { - "is-map": "^2.0.3", - "is-set": "^2.0.3", - "is-weakmap": "^2.0.2", - "is-weakset": "^2.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/which-typed-array": { "version": "1.1.15", "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", "dev": true, + "optional": true, + "peer": true, "dependencies": { "available-typed-arrays": "^1.0.7", "call-bind": "^1.0.7", @@ -10485,8 +10017,8 @@ "license": "Apache-2.0", "dependencies": { "@actions/artifact": "^2.1.11", - "@actions/cache": "^3.2.4", - "@actions/core": "^1.10.1", + "@actions/cache": "^4.0.0", + "@actions/core": "^1.11.1", "@actions/exec": "^1.1.0", "@actions/github": "^6.0.0", "@actions/tool-cache": "^2.0.1", @@ -10498,21 +10030,20 @@ "jszip": "^3.10.1" }, "devDependencies": { - "@types/jest": "^29.5.12", - "@types/node": "^22.5.2", - "@typescript-eslint/parser": "^7.18.0", - "axios": "^1.7.7", - "esbuild": "0.23.1", + "@types/jest": "^29.5.14", + "@types/node": "^22.10.1", + "@typescript-eslint/parser": "^8.18.0", + "axios": "^1.7.9", + "esbuild": "0.24.0", "eslint": "^8.57.1", - "eslint-import-resolver-typescript": "^3.6.3", - "eslint-plugin-github": "^5.0.1", - "eslint-plugin-jest": "^28.8.2", + "eslint-import-resolver-typescript": "^3.7.0", + "eslint-plugin-jest": "^28.9.0", "jest": "*", "js-yaml": "^4.1.0", - "nock": "^13.5.5", - "prettier": "3.3.3", + "nock": "^13.5.6", + "prettier": "3.4.2", "ts-jest": "*", - "typescript": "^5.5.4" + "typescript": "^5.7.2" } }, "vsts": { @@ -10525,18 +10056,17 @@ "azure-pipelines-tool-lib": "^2.0.8" }, "devDependencies": { - "@types/node": "^22.5.2", - "@typescript-eslint/parser": "^7.18.0", - "esbuild": "0.23.1", + "@types/node": "^22.10.1", + "@typescript-eslint/parser": "^8.18.0", + "esbuild": "0.24.0", "eslint": "^8.57.1", - "eslint-plugin-github": "^5.0.1", - "eslint-plugin-jest": "^28.8.2", + "eslint-plugin-jest": "^28.9.0", "jest": "^29.7.0", "js-yaml": "^4.1.0", - "prettier": "3.3.3", + "prettier": "3.4.2", "sync-request": "^6.1.0", "ts-jest": "^29.2.5", - "typescript": "^5.5.4" + "typescript": "^5.7.2" } } } diff --git a/package.json b/package.json index 5a06c5cd..73686d32 100644 --- a/package.json +++ b/package.json @@ -35,12 +35,12 @@ ], "devDependencies": { "eslint": "^8.57.1", - "eslint-plugin-github": "^5.0.1", - "eslint-plugin-jest": "^28.8.2", + "eslint-plugin-jest": "^28.9.0", + "@typescript-eslint/eslint-plugin": "^8.18.0", "eslint-plugin-prettier": "^5.2.1", - "prettier": "3.3.3", + "prettier": "3.4.2", "prettier-eslint": "^16.3.0", "ts-node": "^10.9.2", - "typescript": "^5.5.4" + "typescript": "^5.7.2" } } diff --git a/scan/.eslintignore b/scan/.eslintignore deleted file mode 100644 index 3c492bd2..00000000 --- a/scan/.eslintignore +++ /dev/null @@ -1,5 +0,0 @@ -dist/ -lib/ -node_modules/ -jest.config.js -__tests__ \ No newline at end of file diff --git a/scan/.eslintrc.json b/scan/.eslintrc.json deleted file mode 100644 index 98d0bd73..00000000 --- a/scan/.eslintrc.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "plugins": ["jest", "@typescript-eslint"], - "extends": ["plugin:github/recommended"], - "parser": "@typescript-eslint/parser", - "parserOptions": { - "ecmaVersion": 9, - "sourceType": "module", - "project": "tsconfig.json" - }, - "rules": { - "i18n-text/no-en": "off", - "eslint-comments/no-use": "off", - "import/no-namespace": "off", - "no-unused-vars": "off", - "@typescript-eslint/no-unused-vars": "error", - "@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}], - "@typescript-eslint/no-require-imports": "error", - "@typescript-eslint/array-type": "error", - "@typescript-eslint/await-thenable": "error", - "@typescript-eslint/ban-ts-comment": "error", - "camelcase": "off", - "@typescript-eslint/consistent-type-assertions": "error", - "@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}], - "@typescript-eslint/func-call-spacing": ["error", "never"], - "@typescript-eslint/no-array-constructor": "error", - "@typescript-eslint/no-empty-interface": "error", - "@typescript-eslint/no-explicit-any": "error", - "@typescript-eslint/no-extraneous-class": "error", - "@typescript-eslint/no-for-in-array": "error", - "@typescript-eslint/no-inferrable-types": "error", - "@typescript-eslint/no-misused-new": "error", - "@typescript-eslint/no-namespace": "error", - "@typescript-eslint/no-non-null-assertion": "warn", - "@typescript-eslint/no-unnecessary-qualifier": "error", - "@typescript-eslint/no-unnecessary-type-assertion": "error", - "@typescript-eslint/no-useless-constructor": "error", - "@typescript-eslint/no-var-requires": "error", - "@typescript-eslint/prefer-for-of": "warn", - "@typescript-eslint/prefer-function-type": "warn", - "@typescript-eslint/prefer-includes": "error", - "@typescript-eslint/prefer-string-starts-ends-with": "error", - "@typescript-eslint/promise-function-async": "error", - "@typescript-eslint/require-array-sort-compare": "error", - "@typescript-eslint/restrict-plus-operands": "error", - "semi": "off", - "@typescript-eslint/semi": ["error", "never"], - "@typescript-eslint/type-annotation-spacing": "error", - "@typescript-eslint/unbound-method": "error" - }, - "env": { - "node": true, - "es6": true, - "jest/globals": true - } - } \ No newline at end of file diff --git a/scan/__tests__/data/some.sarif.json b/scan/__tests__/data/some.sarif.json index 38233785..296000c6 100644 --- a/scan/__tests__/data/some.sarif.json +++ b/scan/__tests__/data/some.sarif.json @@ -8258,10 +8258,16 @@ "\n" ], "properties": { + "qodanaFailureConditions": { + "testCoverageThresholds": { + "totalCoverage": 40, + "freshCoverage": 40 + } + }, "coverage": { - "totalCoverage": 70.0, + "totalCoverage": 45.0, "totalLines": 124.0, - "totalCoveredLines": 87.0 + "totalCoveredLines": 56.0 } } } diff --git a/scan/__tests__/main.test.ts b/scan/__tests__/main.test.ts index 599fe17e..20dea478 100644 --- a/scan/__tests__/main.test.ts +++ b/scan/__tests__/main.test.ts @@ -61,13 +61,18 @@ test('qodana scan command args', () => { test('test sarif with problems to output annotations', () => { const output = annotationsDefaultFixture() - const result = parseSarif('__tests__/data/some.sarif.json') + const defaultProjectDir = '' + const result = parseSarif('__tests__/data/some.sarif.json', defaultProjectDir) expect(result.annotations).toEqual(output) }) test('test sarif with no problems to output annotations', () => { const output = outputEmptyFixture() - const result = parseSarif('__tests__/data/empty.sarif.json') + const defaultProjectDir = '' + const result = parseSarif( + '__tests__/data/empty.sarif.json', + defaultProjectDir + ) expect(result.annotations).toEqual(output) }) @@ -101,16 +106,14 @@ test('test empty summary output', () => { test('test passed coverage output', () => { const result = getCoverageStats( - getCoverageFromSarif('__tests__/data/some.sarif.json'), - 50 + getCoverageFromSarif('__tests__/data/some.sarif.json') ) expect(result).toEqual(passedCoverageFixture()) }) test('test failed coverage output', () => { const result = getCoverageStats( - getCoverageFromSarif('__tests__/data/empty.sarif.json'), - 50 + getCoverageFromSarif('__tests__/data/empty.sarif.json') ) expect(result).toEqual(failedCoverageFixture()) }) @@ -353,8 +356,8 @@ Contact us at [qodana-support@jetbrains.com](mailto:qodana-support@jetbrains.com function passedCoverageFixture(): string { return `\`\`\`diff @@ Code coverage @@ -+ 70% total lines covered -124 lines analyzed, 87 lines covered ++ 45% total lines covered +124 lines analyzed, 56 lines covered # Calculated according to the filters of your coverage tool \`\`\`` } diff --git a/scan/dist/index.js b/scan/dist/index.js index 0c498154..9dd37c09 100644 --- a/scan/dist/index.js +++ b/scan/dist/index.js @@ -74,9 +74,13 @@ var require_command = __commonJS({ "use strict"; var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; + } + Object.defineProperty(o, k2, desc); } : function(o, m, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m[k]; @@ -90,7 +94,7 @@ var require_command = __commonJS({ if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { - for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } __setModuleDefault3(result, mod); return result; @@ -147,370 +151,29 @@ var require_command = __commonJS({ } }; function escapeData(s) { - return utils_12.toCommandValue(s).replace(/%/g, "%25").replace(/\r/g, "%0D").replace(/\n/g, "%0A"); + return (0, utils_12.toCommandValue)(s).replace(/%/g, "%25").replace(/\r/g, "%0D").replace(/\n/g, "%0A"); } __name(escapeData, "escapeData"); function escapeProperty(s) { - return utils_12.toCommandValue(s).replace(/%/g, "%25").replace(/\r/g, "%0D").replace(/\n/g, "%0A").replace(/:/g, "%3A").replace(/,/g, "%2C"); + return (0, utils_12.toCommandValue)(s).replace(/%/g, "%25").replace(/\r/g, "%0D").replace(/\n/g, "%0A").replace(/:/g, "%3A").replace(/,/g, "%2C"); } __name(escapeProperty, "escapeProperty"); } }); -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/rng.js -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - import_crypto.default.randomFillSync(rnds8Pool); - poolPtr = 0; - } - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} -var import_crypto, rnds8Pool, poolPtr; -var init_rng = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/rng.js"() { - import_crypto = __toESM(require("crypto")); - rnds8Pool = new Uint8Array(256); - poolPtr = rnds8Pool.length; - __name(rng, "rng"); - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/regex.js -var regex_default; -var init_regex = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/regex.js"() { - regex_default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/validate.js -function validate(uuid) { - return typeof uuid === "string" && regex_default.test(uuid); -} -var validate_default; -var init_validate = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/validate.js"() { - init_regex(); - __name(validate, "validate"); - validate_default = validate; - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/stringify.js -function stringify(arr, offset = 0) { - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + "-" + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + "-" + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + "-" + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + "-" + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); - if (!validate_default(uuid)) { - throw TypeError("Stringified UUID is invalid"); - } - return uuid; -} -var byteToHex, stringify_default; -var init_stringify = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/stringify.js"() { - init_validate(); - byteToHex = []; - for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 256).toString(16).substr(1)); - } - __name(stringify, "stringify"); - stringify_default = stringify; - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/v1.js -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== void 0 ? options.clockseq : _clockseq; - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || rng)(); - if (node == null) { - node = _nodeId = [seedBytes[0] | 1, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } - if (clockseq == null) { - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 16383; - } - } - let msecs = options.msecs !== void 0 ? options.msecs : Date.now(); - let nsecs = options.nsecs !== void 0 ? options.nsecs : _lastNSecs + 1; - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 1e4; - if (dt < 0 && options.clockseq === void 0) { - clockseq = clockseq + 1 & 16383; - } - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === void 0) { - nsecs = 0; - } - if (nsecs >= 1e4) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; - msecs += 122192928e5; - const tl = ((msecs & 268435455) * 1e4 + nsecs) % 4294967296; - b[i++] = tl >>> 24 & 255; - b[i++] = tl >>> 16 & 255; - b[i++] = tl >>> 8 & 255; - b[i++] = tl & 255; - const tmh = msecs / 4294967296 * 1e4 & 268435455; - b[i++] = tmh >>> 8 & 255; - b[i++] = tmh & 255; - b[i++] = tmh >>> 24 & 15 | 16; - b[i++] = tmh >>> 16 & 255; - b[i++] = clockseq >>> 8 | 128; - b[i++] = clockseq & 255; - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - return buf || stringify_default(b); -} -var _nodeId, _clockseq, _lastMSecs, _lastNSecs, v1_default; -var init_v1 = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/v1.js"() { - init_rng(); - init_stringify(); - _lastMSecs = 0; - _lastNSecs = 0; - __name(v1, "v1"); - v1_default = v1; - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/parse.js -function parse(uuid) { - if (!validate_default(uuid)) { - throw TypeError("Invalid UUID"); - } - let v; - const arr = new Uint8Array(16); - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 255; - arr[2] = v >>> 8 & 255; - arr[3] = v & 255; - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 255; - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 255; - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 255; - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 1099511627776 & 255; - arr[11] = v / 4294967296 & 255; - arr[12] = v >>> 24 & 255; - arr[13] = v >>> 16 & 255; - arr[14] = v >>> 8 & 255; - arr[15] = v & 255; - return arr; -} -var parse_default; -var init_parse = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/parse.js"() { - init_validate(); - __name(parse, "parse"); - parse_default = parse; - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/v35.js -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); - const bytes = []; - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); - } - return bytes; -} -function v35_default(name, version4, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === "string") { - value = stringToBytes(value); - } - if (typeof namespace === "string") { - namespace = parse_default(namespace); - } - if (namespace.length !== 16) { - throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)"); - } - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 15 | version4; - bytes[8] = bytes[8] & 63 | 128; - if (buf) { - offset = offset || 0; - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; - } - return buf; - } - return stringify_default(bytes); - } - __name(generateUUID, "generateUUID"); - try { - generateUUID.name = name; - } catch (err) { - } - generateUUID.DNS = DNS; - generateUUID.URL = URL2; - return generateUUID; -} -var DNS, URL2; -var init_v35 = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/v35.js"() { - init_stringify(); - init_parse(); - __name(stringToBytes, "stringToBytes"); - DNS = "6ba7b810-9dad-11d1-80b4-00c04fd430c8"; - URL2 = "6ba7b811-9dad-11d1-80b4-00c04fd430c8"; - __name(v35_default, "default"); - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/md5.js -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === "string") { - bytes = Buffer.from(bytes, "utf8"); - } - return import_crypto2.default.createHash("md5").update(bytes).digest(); -} -var import_crypto2, md5_default; -var init_md5 = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/md5.js"() { - import_crypto2 = __toESM(require("crypto")); - __name(md5, "md5"); - md5_default = md5; - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/v3.js -var v3, v3_default; -var init_v3 = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/v3.js"() { - init_v35(); - init_md5(); - v3 = v35_default("v3", 48, md5_default); - v3_default = v3; - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/v4.js -function v4(options, buf, offset) { - options = options || {}; - const rnds = options.random || (options.rng || rng)(); - rnds[6] = rnds[6] & 15 | 64; - rnds[8] = rnds[8] & 63 | 128; - if (buf) { - offset = offset || 0; - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; - } - return buf; - } - return stringify_default(rnds); -} -var v4_default; -var init_v4 = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/v4.js"() { - init_rng(); - init_stringify(); - __name(v4, "v4"); - v4_default = v4; - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/sha1.js -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === "string") { - bytes = Buffer.from(bytes, "utf8"); - } - return import_crypto3.default.createHash("sha1").update(bytes).digest(); -} -var import_crypto3, sha1_default; -var init_sha1 = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/sha1.js"() { - import_crypto3 = __toESM(require("crypto")); - __name(sha1, "sha1"); - sha1_default = sha1; - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/v5.js -var v5, v5_default; -var init_v5 = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/v5.js"() { - init_v35(); - init_sha1(); - v5 = v35_default("v5", 80, sha1_default); - v5_default = v5; - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/nil.js -var nil_default; -var init_nil = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/nil.js"() { - nil_default = "00000000-0000-0000-0000-000000000000"; - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/version.js -function version(uuid) { - if (!validate_default(uuid)) { - throw TypeError("Invalid UUID"); - } - return parseInt(uuid.substr(14, 1), 16); -} -var version_default; -var init_version = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/version.js"() { - init_validate(); - __name(version, "version"); - version_default = version; - } -}); - -// ../node_modules/@actions/core/node_modules/uuid/dist/esm-node/index.js -var esm_node_exports = {}; -__export(esm_node_exports, { - NIL: () => nil_default, - parse: () => parse_default, - stringify: () => stringify_default, - v1: () => v1_default, - v3: () => v3_default, - v4: () => v4_default, - v5: () => v5_default, - validate: () => validate_default, - version: () => version_default -}); -var init_esm_node = __esm({ - "../node_modules/@actions/core/node_modules/uuid/dist/esm-node/index.js"() { - init_v1(); - init_v3(); - init_v4(); - init_v5(); - init_nil(); - init_version(); - init_validate(); - init_stringify(); - init_parse(); - } -}); - // ../node_modules/@actions/core/lib/file-command.js var require_file_command = __commonJS({ "../node_modules/@actions/core/lib/file-command.js"(exports2) { "use strict"; var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; + } + Object.defineProperty(o, k2, desc); } : function(o, m, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m[k]; @@ -524,16 +187,16 @@ var require_file_command = __commonJS({ if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) { - for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } __setModuleDefault3(result, mod); return result; }; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.prepareKeyValueMessage = exports2.issueFileCommand = void 0; + var crypto4 = __importStar3(require("crypto")); var fs2 = __importStar3(require("fs")); var os = __importStar3(require("os")); - var uuid_1 = (init_esm_node(), __toCommonJS(esm_node_exports)); var utils_12 = require_utils(); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; @@ -543,15 +206,15 @@ var require_file_command = __commonJS({ if (!fs2.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } - fs2.appendFileSync(filePath, `${utils_12.toCommandValue(message)}${os.EOL}`, { + fs2.appendFileSync(filePath, `${(0, utils_12.toCommandValue)(message)}${os.EOL}`, { encoding: "utf8" }); } __name(issueFileCommand, "issueFileCommand"); exports2.issueFileCommand = issueFileCommand; function prepareKeyValueMessage(key, value) { - const delimiter = `ghadelimiter_${uuid_1.v4()}`; - const convertedValue = utils_12.toCommandValue(value); + const delimiter = `ghadelimiter_${crypto4.randomUUID()}`; + const convertedValue = (0, utils_12.toCommandValue)(value); if (key.includes(delimiter)) { throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); } @@ -1359,7 +1022,7 @@ var require_util = __commonJS({ var { InvalidArgumentError } = require_errors(); var { Blob: Blob2 } = require("buffer"); var nodeUtil = require("util"); - var { stringify: stringify3 } = require("querystring"); + var { stringify: stringify2 } = require("querystring"); var { headerNameLowerCasedRecord } = require_constants(); var [nodeMajor, nodeMinor] = process.versions.node.split(".").map((v) => Number(v)); function nop() { @@ -1377,7 +1040,7 @@ var require_util = __commonJS({ if (url.includes("?") || url.includes("#")) { throw new Error('Query params cannot be passed when url already contains "?" or "#".'); } - const stringified = stringify3(queryParams); + const stringified = stringify2(queryParams); if (stringified) { url += "?" + stringified; } @@ -4104,11 +3767,11 @@ var require_util2 = __commonJS({ var assert = require("assert"); var { isUint8Array } = require("util/types"); var supportedHashes = []; - var crypto7; + var crypto4; try { - crypto7 = require("crypto"); + crypto4 = require("crypto"); const possibleRelevantHashes = ["sha256", "sha384", "sha512"]; - supportedHashes = crypto7.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)); + supportedHashes = crypto4.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)); } catch { } function responseURL(response) { @@ -4409,7 +4072,7 @@ var require_util2 = __commonJS({ } __name(isURLPotentiallyTrustworthy, "isURLPotentiallyTrustworthy"); function bytesMatch(bytes, metadataList) { - if (crypto7 === void 0) { + if (crypto4 === void 0) { return true; } const parsedMetadata = parseMetadata(metadataList); @@ -4424,7 +4087,7 @@ var require_util2 = __commonJS({ for (const item of metadata) { const algorithm = item.algo; const expectedValue = item.hash; - let actualValue = crypto7.createHash(algorithm).update(bytes).digest("base64"); + let actualValue = crypto4.createHash(algorithm).update(bytes).digest("base64"); if (actualValue[actualValue.length - 1] === "=") { if (actualValue[actualValue.length - 2] === "=") { actualValue = actualValue.slice(0, -2); @@ -11792,7 +11455,7 @@ var require_proxy_agent = __commonJS({ "../node_modules/undici/lib/proxy-agent.js"(exports2, module2) { "use strict"; var { kProxy, kClose, kDestroy, kInterceptors } = require_symbols(); - var { URL: URL4 } = require("url"); + var { URL: URL3 } = require("url"); var Agent = require_agent(); var Pool = require_pool(); var DispatcherBase = require_dispatcher_base(); @@ -11847,7 +11510,7 @@ var require_proxy_agent = __commonJS({ this[kRequestTls] = opts.requestTls; this[kProxyTls] = opts.proxyTls; this[kProxyHeaders] = opts.headers || {}; - const resolvedUrl = new URL4(opts.uri); + const resolvedUrl = new URL3(opts.uri); const { origin, port, host, username, password } = resolvedUrl; if (opts.auth && opts.token) { throw new InvalidArgumentError("opts.auth cannot be used in combination with opts.token"); @@ -11902,7 +11565,7 @@ var require_proxy_agent = __commonJS({ }); } dispatch(opts, handler) { - const { host } = new URL4(opts.origin); + const { host } = new URL3(opts.origin); const headers = buildHeaders(opts.headers); throwIfProxyAuthIsSent(headers); return this[kAgent].dispatch( @@ -16395,7 +16058,7 @@ var require_util6 = __commonJS({ } } __name(validateCookieMaxAge, "validateCookieMaxAge"); - function stringify3(cookie) { + function stringify2(cookie) { if (cookie.name.length === 0) { return null; } @@ -16443,7 +16106,7 @@ var require_util6 = __commonJS({ } return out.join("; "); } - __name(stringify3, "stringify"); + __name(stringify2, "stringify"); var kHeadersListNode; function getHeadersList(headers) { if (headers[kHeadersList]) { @@ -16462,7 +16125,7 @@ var require_util6 = __commonJS({ __name(getHeadersList, "getHeadersList"); module2.exports = { isCTLExcludingHtab, - stringify: stringify3, + stringify: stringify2, getHeadersList }; } @@ -16615,7 +16278,7 @@ var require_cookies = __commonJS({ "../node_modules/undici/lib/cookies/index.js"(exports2, module2) { "use strict"; var { parseSetCookie } = require_parse(); - var { stringify: stringify3, getHeadersList } = require_util6(); + var { stringify: stringify2, getHeadersList } = require_util6(); var { webidl } = require_webidl(); var { Headers: Headers2 } = require_headers(); function getCookies(headers) { @@ -16660,9 +16323,9 @@ var require_cookies = __commonJS({ webidl.argumentLengthCheck(arguments, 2, { header: "setCookie" }); webidl.brandCheck(headers, Headers2, { strict: false }); cookie = webidl.converters.Cookie(cookie); - const str = stringify3(cookie); + const str = stringify2(cookie); if (str) { - headers.append("Set-Cookie", stringify3(cookie)); + headers.append("Set-Cookie", stringify2(cookie)); } } __name(setCookie, "setCookie"); @@ -17176,9 +16839,9 @@ var require_connection = __commonJS({ channels.open = diagnosticsChannel.channel("undici:websocket:open"); channels.close = diagnosticsChannel.channel("undici:websocket:close"); channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error"); - var crypto7; + var crypto4; try { - crypto7 = require("crypto"); + crypto4 = require("crypto"); } catch { } function establishWebSocketConnection(url, protocols, ws, onEstablish, options) { @@ -17197,7 +16860,7 @@ var require_connection = __commonJS({ const headersList = new Headers2(options.headers)[kHeadersList]; request.headersList = headersList; } - const keyValue = crypto7.randomBytes(16).toString("base64"); + const keyValue = crypto4.randomBytes(16).toString("base64"); request.headersList.append("sec-websocket-key", keyValue); request.headersList.append("sec-websocket-version", "13"); for (const protocol of protocols) { @@ -17226,7 +16889,7 @@ var require_connection = __commonJS({ return; } const secWSAccept = response.headersList.get("Sec-WebSocket-Accept"); - const digest = crypto7.createHash("sha1").update(keyValue + uid).digest("base64"); + const digest = crypto4.createHash("sha1").update(keyValue + uid).digest("base64"); if (secWSAccept !== digest) { failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header."); return; @@ -17310,9 +16973,9 @@ var require_frame = __commonJS({ "../node_modules/undici/lib/websocket/frame.js"(exports2, module2) { "use strict"; var { maxUnsigned16Bit } = require_constants5(); - var crypto7; + var crypto4; try { - crypto7 = require("crypto"); + crypto4 = require("crypto"); } catch { } var WebsocketFrameSend = class { @@ -17324,7 +16987,7 @@ var require_frame = __commonJS({ */ constructor(data) { this.frameData = data; - this.maskKey = crypto7.randomBytes(4); + this.maskKey = crypto4.randomBytes(4); } createFrame(opcode) { const bodyLength = this.frameData?.byteLength ?? 0; @@ -18997,9 +18660,9 @@ var require_oidc_utils = __commonJS({ const encodedAudience = encodeURIComponent(audience); id_token_url = `${id_token_url}&audience=${encodedAudience}`; } - core_1.debug(`ID token url is ${id_token_url}`); + (0, core_1.debug)(`ID token url is ${id_token_url}`); const id_token = yield _OidcClient.getCall(id_token_url); - core_1.setSecret(id_token); + (0, core_1.setSecret)(id_token); return id_token; } catch (error) { throw new Error(`Error message: ${error.message}`); @@ -19315,364 +18978,6 @@ var require_summary = __commonJS({ // ../node_modules/@actions/core/lib/path-utils.js var require_path_utils = __commonJS({ "../node_modules/@actions/core/lib/path-utils.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; - var path2 = __importStar3(require("path")); - function toPosixPath(pth) { - return pth.replace(/[\\]/g, "/"); - } - __name(toPosixPath, "toPosixPath"); - exports2.toPosixPath = toPosixPath; - function toWin32Path(pth) { - return pth.replace(/[/]/g, "\\"); - } - __name(toWin32Path, "toWin32Path"); - exports2.toWin32Path = toWin32Path; - function toPlatformPath(pth) { - return pth.replace(/[/\\]/g, path2.sep); - } - __name(toPlatformPath, "toPlatformPath"); - exports2.toPlatformPath = toPlatformPath; - } -}); - -// ../node_modules/@actions/core/lib/core.js -var require_core = __commonJS({ - "../node_modules/@actions/core/lib/core.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getIDToken = exports2.getState = exports2.saveState = exports2.group = exports2.endGroup = exports2.startGroup = exports2.info = exports2.notice = exports2.warning = exports2.error = exports2.debug = exports2.isDebug = exports2.setFailed = exports2.setCommandEcho = exports2.setOutput = exports2.getBooleanInput = exports2.getMultilineInput = exports2.getInput = exports2.addPath = exports2.setSecret = exports2.exportVariable = exports2.ExitCode = void 0; - var command_1 = require_command(); - var file_command_1 = require_file_command(); - var utils_12 = require_utils(); - var os = __importStar3(require("os")); - var path2 = __importStar3(require("path")); - var oidc_utils_1 = require_oidc_utils(); - var ExitCode; - (function(ExitCode2) { - ExitCode2[ExitCode2["Success"] = 0] = "Success"; - ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; - })(ExitCode = exports2.ExitCode || (exports2.ExitCode = {})); - function exportVariable(name, val) { - const convertedVal = utils_12.toCommandValue(val); - process.env[name] = convertedVal; - const filePath = process.env["GITHUB_ENV"] || ""; - if (filePath) { - return file_command_1.issueFileCommand("ENV", file_command_1.prepareKeyValueMessage(name, val)); - } - command_1.issueCommand("set-env", { name }, convertedVal); - } - __name(exportVariable, "exportVariable"); - exports2.exportVariable = exportVariable; - function setSecret(secret) { - command_1.issueCommand("add-mask", {}, secret); - } - __name(setSecret, "setSecret"); - exports2.setSecret = setSecret; - function addPath(inputPath) { - const filePath = process.env["GITHUB_PATH"] || ""; - if (filePath) { - file_command_1.issueFileCommand("PATH", inputPath); - } else { - command_1.issueCommand("add-path", {}, inputPath); - } - process.env["PATH"] = `${inputPath}${path2.delimiter}${process.env["PATH"]}`; - } - __name(addPath, "addPath"); - exports2.addPath = addPath; - function getInput(name, options) { - const val = process.env[`INPUT_${name.replace(/ /g, "_").toUpperCase()}`] || ""; - if (options && options.required && !val) { - throw new Error(`Input required and not supplied: ${name}`); - } - if (options && options.trimWhitespace === false) { - return val; - } - return val.trim(); - } - __name(getInput, "getInput"); - exports2.getInput = getInput; - function getMultilineInput(name, options) { - const inputs = getInput(name, options).split("\n").filter((x) => x !== ""); - if (options && options.trimWhitespace === false) { - return inputs; - } - return inputs.map((input) => input.trim()); - } - __name(getMultilineInput, "getMultilineInput"); - exports2.getMultilineInput = getMultilineInput; - function getBooleanInput(name, options) { - const trueValue = ["true", "True", "TRUE"]; - const falseValue = ["false", "False", "FALSE"]; - const val = getInput(name, options); - if (trueValue.includes(val)) - return true; - if (falseValue.includes(val)) - return false; - throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name} -Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); - } - __name(getBooleanInput, "getBooleanInput"); - exports2.getBooleanInput = getBooleanInput; - function setOutput(name, value) { - const filePath = process.env["GITHUB_OUTPUT"] || ""; - if (filePath) { - return file_command_1.issueFileCommand("OUTPUT", file_command_1.prepareKeyValueMessage(name, value)); - } - process.stdout.write(os.EOL); - command_1.issueCommand("set-output", { name }, utils_12.toCommandValue(value)); - } - __name(setOutput, "setOutput"); - exports2.setOutput = setOutput; - function setCommandEcho(enabled) { - command_1.issue("echo", enabled ? "on" : "off"); - } - __name(setCommandEcho, "setCommandEcho"); - exports2.setCommandEcho = setCommandEcho; - function setFailed2(message) { - process.exitCode = ExitCode.Failure; - error(message); - } - __name(setFailed2, "setFailed"); - exports2.setFailed = setFailed2; - function isDebug() { - return process.env["RUNNER_DEBUG"] === "1"; - } - __name(isDebug, "isDebug"); - exports2.isDebug = isDebug; - function debug(message) { - command_1.issueCommand("debug", {}, message); - } - __name(debug, "debug"); - exports2.debug = debug; - function error(message, properties = {}) { - command_1.issueCommand("error", utils_12.toCommandProperties(properties), message instanceof Error ? message.toString() : message); - } - __name(error, "error"); - exports2.error = error; - function warning(message, properties = {}) { - command_1.issueCommand("warning", utils_12.toCommandProperties(properties), message instanceof Error ? message.toString() : message); - } - __name(warning, "warning"); - exports2.warning = warning; - function notice(message, properties = {}) { - command_1.issueCommand("notice", utils_12.toCommandProperties(properties), message instanceof Error ? message.toString() : message); - } - __name(notice, "notice"); - exports2.notice = notice; - function info(message) { - process.stdout.write(message + os.EOL); - } - __name(info, "info"); - exports2.info = info; - function startGroup(name) { - command_1.issue("group", name); - } - __name(startGroup, "startGroup"); - exports2.startGroup = startGroup; - function endGroup() { - command_1.issue("endgroup"); - } - __name(endGroup, "endGroup"); - exports2.endGroup = endGroup; - function group(name, fn) { - return __awaiter3(this, void 0, void 0, function* () { - startGroup(name); - let result; - try { - result = yield fn(); - } finally { - endGroup(); - } - return result; - }); - } - __name(group, "group"); - exports2.group = group; - function saveState(name, value) { - const filePath = process.env["GITHUB_STATE"] || ""; - if (filePath) { - return file_command_1.issueFileCommand("STATE", file_command_1.prepareKeyValueMessage(name, value)); - } - command_1.issueCommand("save-state", { name }, utils_12.toCommandValue(value)); - } - __name(saveState, "saveState"); - exports2.saveState = saveState; - function getState(name) { - return process.env[`STATE_${name}`] || ""; - } - __name(getState, "getState"); - exports2.getState = getState; - function getIDToken(aud) { - return __awaiter3(this, void 0, void 0, function* () { - return yield oidc_utils_1.OidcClient.getIDToken(aud); - }); - } - __name(getIDToken, "getIDToken"); - exports2.getIDToken = getIDToken; - var summary_1 = require_summary(); - Object.defineProperty(exports2, "summary", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return summary_1.summary; - }, "get") }); - var summary_2 = require_summary(); - Object.defineProperty(exports2, "markdownSummary", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return summary_2.markdownSummary; - }, "get") }); - var path_utils_1 = require_path_utils(); - Object.defineProperty(exports2, "toPosixPath", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return path_utils_1.toPosixPath; - }, "get") }); - Object.defineProperty(exports2, "toWin32Path", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return path_utils_1.toWin32Path; - }, "get") }); - Object.defineProperty(exports2, "toPlatformPath", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return path_utils_1.toPlatformPath; - }, "get") }); - } -}); - -// ../node_modules/@actions/github/lib/context.js -var require_context = __commonJS({ - "../node_modules/@actions/github/lib/context.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Context = void 0; - var fs_1 = require("fs"); - var os_1 = require("os"); - var Context = class { - static { - __name(this, "Context"); - } - /** - * Hydrate the context from the environment - */ - constructor() { - var _a, _b, _c; - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); - } else { - const path2 = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path2} does not exist${os_1.EOL}`); - } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - this.job = process.env.GITHUB_JOB; - this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); - this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); - this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; - this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; - this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); - } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split("/"); - return { owner, repo }; - } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; - } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); - } - }; - exports2.Context = Context; - } -}); - -// ../node_modules/@actions/github/lib/internal/utils.js -var require_utils3 = __commonJS({ - "../node_modules/@actions/github/lib/internal/utils.js"(exports2) { "use strict"; var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { if (k2 === void 0) k2 = k; @@ -19701,6 +19006,54 @@ var require_utils3 = __commonJS({ __setModuleDefault3(result, mod); return result; }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = void 0; + var path2 = __importStar3(require("path")); + function toPosixPath(pth) { + return pth.replace(/[\\]/g, "/"); + } + __name(toPosixPath, "toPosixPath"); + exports2.toPosixPath = toPosixPath; + function toWin32Path(pth) { + return pth.replace(/[/]/g, "\\"); + } + __name(toWin32Path, "toWin32Path"); + exports2.toWin32Path = toWin32Path; + function toPlatformPath(pth) { + return pth.replace(/[/\\]/g, path2.sep); + } + __name(toPlatformPath, "toPlatformPath"); + exports2.toPlatformPath = toPlatformPath; + } +}); + +// ../node_modules/@actions/io/lib/io-util.js +var require_io_util = __commonJS({ + "../node_modules/@actions/io/lib/io-util.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -19732,1384 +19085,1718 @@ var require_utils3 = __commonJS({ step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; + var _a; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getApiBaseUrl = exports2.getProxyFetch = exports2.getProxyAgentDispatcher = exports2.getProxyAgent = exports2.getAuthString = void 0; - var httpClient = __importStar3(require_lib()); - var undici_1 = require_undici(); - function getAuthString(token, options) { - if (!token && !options.auth) { - throw new Error("Parameter token or opts.auth is required"); - } else if (token && options.auth) { - throw new Error("Parameters token and opts.auth may not both be specified"); - } - return typeof options.auth === "string" ? options.auth : `token ${token}`; - } - __name(getAuthString, "getAuthString"); - exports2.getAuthString = getAuthString; - function getProxyAgent(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); - } - __name(getProxyAgent, "getProxyAgent"); - exports2.getProxyAgent = getProxyAgent; - function getProxyAgentDispatcher(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgentDispatcher(destinationUrl); - } - __name(getProxyAgentDispatcher, "getProxyAgentDispatcher"); - exports2.getProxyAgentDispatcher = getProxyAgentDispatcher; - function getProxyFetch(destinationUrl) { - const httpDispatcher = getProxyAgentDispatcher(destinationUrl); - const proxyFetch = /* @__PURE__ */ __name((url, opts) => __awaiter3(this, void 0, void 0, function* () { - return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); - }), "proxyFetch"); - return proxyFetch; - } - __name(getProxyFetch, "getProxyFetch"); - exports2.getProxyFetch = getProxyFetch; - function getApiBaseUrl() { - return process.env["GITHUB_API_URL"] || "https://api.github.com"; - } - __name(getApiBaseUrl, "getApiBaseUrl"); - exports2.getApiBaseUrl = getApiBaseUrl; - } -}); - -// ../node_modules/@actions/github/node_modules/universal-user-agent/dist-node/index.js -var require_dist_node = __commonJS({ - "../node_modules/@actions/github/node_modules/universal-user-agent/dist-node/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - function getUserAgent() { - if (typeof navigator === "object" && "userAgent" in navigator) { - return navigator.userAgent; - } - if (typeof process === "object" && process.version !== void 0) { - return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; - } - return ""; - } - __name(getUserAgent, "getUserAgent"); - exports2.getUserAgent = getUserAgent; - } -}); - -// ../node_modules/@actions/github/node_modules/before-after-hook/lib/register.js -var require_register = __commonJS({ - "../node_modules/@actions/github/node_modules/before-after-hook/lib/register.js"(exports2, module2) { - module2.exports = register; - function register(state, name, method, options) { - if (typeof method !== "function") { - throw new Error("method for before hook must be a function"); - } - if (!options) { - options = {}; - } - if (Array.isArray(name)) { - return name.reverse().reduce(function(callback, name2) { - return register.bind(null, state, name2, callback, options); - }, method)(); - } - return Promise.resolve().then(function() { - if (!state.registry[name]) { - return method(options); + exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; + var fs2 = __importStar3(require("fs")); + var path2 = __importStar3(require("path")); + _a = fs2.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; + exports2.IS_WINDOWS = process.platform === "win32"; + exports2.UV_FS_O_EXLOCK = 268435456; + exports2.READONLY = fs2.constants.O_RDONLY; + function exists(fsPath) { + return __awaiter3(this, void 0, void 0, function* () { + try { + yield exports2.stat(fsPath); + } catch (err) { + if (err.code === "ENOENT") { + return false; + } + throw err; } - return state.registry[name].reduce(function(method2, registered) { - return registered.hook.bind(null, method2, options); - }, method)(); + return true; }); } - __name(register, "register"); - } -}); - -// ../node_modules/@actions/github/node_modules/before-after-hook/lib/add.js -var require_add = __commonJS({ - "../node_modules/@actions/github/node_modules/before-after-hook/lib/add.js"(exports2, module2) { - module2.exports = addHook; - function addHook(state, kind, name, hook) { - var orig = hook; - if (!state.registry[name]) { - state.registry[name] = []; - } - if (kind === "before") { - hook = /* @__PURE__ */ __name(function(method, options) { - return Promise.resolve().then(orig.bind(null, options)).then(method.bind(null, options)); - }, "hook"); - } - if (kind === "after") { - hook = /* @__PURE__ */ __name(function(method, options) { - var result; - return Promise.resolve().then(method.bind(null, options)).then(function(result_) { - result = result_; - return orig(result, options); - }).then(function() { - return result; - }); - }, "hook"); - } - if (kind === "error") { - hook = /* @__PURE__ */ __name(function(method, options) { - return Promise.resolve().then(method.bind(null, options)).catch(function(error) { - return orig(error, options); - }); - }, "hook"); - } - state.registry[name].push({ - hook, - orig + __name(exists, "exists"); + exports2.exists = exists; + function isDirectory(fsPath, useStat = false) { + return __awaiter3(this, void 0, void 0, function* () { + const stats = useStat ? yield exports2.stat(fsPath) : yield exports2.lstat(fsPath); + return stats.isDirectory(); }); } - __name(addHook, "addHook"); - } -}); - -// ../node_modules/@actions/github/node_modules/before-after-hook/lib/remove.js -var require_remove = __commonJS({ - "../node_modules/@actions/github/node_modules/before-after-hook/lib/remove.js"(exports2, module2) { - module2.exports = removeHook; - function removeHook(state, name, method) { - if (!state.registry[name]) { - return; + __name(isDirectory, "isDirectory"); + exports2.isDirectory = isDirectory; + function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); } - var index = state.registry[name].map(function(registered) { - return registered.orig; - }).indexOf(method); - if (index === -1) { - return; + if (exports2.IS_WINDOWS) { + return p.startsWith("\\") || /^[A-Z]:/i.test(p); } - state.registry[name].splice(index, 1); + return p.startsWith("/"); } - __name(removeHook, "removeHook"); - } -}); - -// ../node_modules/@actions/github/node_modules/before-after-hook/index.js -var require_before_after_hook = __commonJS({ - "../node_modules/@actions/github/node_modules/before-after-hook/index.js"(exports2, module2) { - var register = require_register(); - var addHook = require_add(); - var removeHook = require_remove(); - var bind = Function.bind; - var bindable = bind.bind(bind); - function bindApi(hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply( - null, - name ? [state, name] : [state] - ); - hook.api = { remove: removeHookRef }; - hook.remove = removeHookRef; - ["before", "error", "after", "wrap"].forEach(function(kind) { - var args = name ? [state, kind, name] : [state, kind]; - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); + __name(isRooted, "isRooted"); + exports2.isRooted = isRooted; + function tryGetExecutablePath(filePath, extensions) { + return __awaiter3(this, void 0, void 0, function* () { + let stats = void 0; + try { + stats = yield exports2.stat(filePath); + } catch (err) { + if (err.code !== "ENOENT") { + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports2.IS_WINDOWS) { + const upperExt = path2.extname(filePath).toUpperCase(); + if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = void 0; + try { + stats = yield exports2.stat(filePath); + } catch (err) { + if (err.code !== "ENOENT") { + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports2.IS_WINDOWS) { + try { + const directory = path2.dirname(filePath); + const upperName = path2.basename(filePath).toUpperCase(); + for (const actualName of yield exports2.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path2.join(directory, actualName); + break; + } + } + } catch (err) { + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ""; }); } - __name(bindApi, "bindApi"); - function HookSingular() { - var singularHookName = "h"; - var singularHookState = { - registry: {} - }; - var singularHook = register.bind(null, singularHookState, singularHookName); - bindApi(singularHook, singularHookState, singularHookName); - return singularHook; + __name(tryGetExecutablePath, "tryGetExecutablePath"); + exports2.tryGetExecutablePath = tryGetExecutablePath; + function normalizeSeparators(p) { + p = p || ""; + if (exports2.IS_WINDOWS) { + p = p.replace(/\//g, "\\"); + return p.replace(/\\\\+/g, "\\"); + } + return p.replace(/\/\/+/g, "/"); } - __name(HookSingular, "HookSingular"); - function HookCollection() { - var state = { - registry: {} - }; - var hook = register.bind(null, state); - bindApi(hook, state); - return hook; + __name(normalizeSeparators, "normalizeSeparators"); + function isUnixExecutable(stats) { + return (stats.mode & 1) > 0 || (stats.mode & 8) > 0 && stats.gid === process.getgid() || (stats.mode & 64) > 0 && stats.uid === process.getuid(); } - __name(HookCollection, "HookCollection"); - var collectionHookDeprecationMessageDisplayed = false; - function Hook() { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn( - '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' - ); - collectionHookDeprecationMessageDisplayed = true; - } - return HookCollection(); + __name(isUnixExecutable, "isUnixExecutable"); + function getCmdPath() { + var _a2; + return (_a2 = process.env["COMSPEC"]) !== null && _a2 !== void 0 ? _a2 : `cmd.exe`; } - __name(Hook, "Hook"); - Hook.Singular = HookSingular.bind(); - Hook.Collection = HookCollection.bind(); - module2.exports = Hook; - module2.exports.Hook = Hook; - module2.exports.Singular = Hook.Singular; - module2.exports.Collection = Hook.Collection; + __name(getCmdPath, "getCmdPath"); + exports2.getCmdPath = getCmdPath; } }); -// ../node_modules/@actions/github/node_modules/@octokit/endpoint/dist-node/index.js -var require_dist_node2 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/endpoint/dist-node/index.js"(exports2, module2) { +// ../node_modules/@actions/io/lib/io.js +var require_io = __commonJS({ + "../node_modules/@actions/io/lib/io.js"(exports2) { "use strict"; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); - } - return to; - }, "__copyProps"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - endpoint: /* @__PURE__ */ __name(() => endpoint, "endpoint") + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; }); - module2.exports = __toCommonJS2(dist_src_exports); - var import_universal_user_agent = require_dist_node(); - var VERSION3 = "9.0.5"; - var userAgent = `octokit-endpoint.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}`; - var DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "" + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } + __setModuleDefault3(result, mod); + return result; }; - function lowercaseKeys(object) { - if (!object) { - return {}; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); - } - __name(lowercaseKeys, "lowercaseKeys"); - function isPlainObject(value) { - if (typeof value !== "object" || value === null) - return false; - if (Object.prototype.toString.call(value) !== "[object Object]") - return false; - const proto = Object.getPrototypeOf(value); - if (proto === null) - return true; - const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; - return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); - } - __name(isPlainObject, "isPlainObject"); - function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach((key) => { - if (isPlainObject(options[key])) { - if (!(key in defaults)) - Object.assign(result, { [key]: options[key] }); - else - result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { [key]: options[key] }); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - }); - return result; - } - __name(mergeDeep, "mergeDeep"); - function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === void 0) { - delete obj[key]; + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - } - return obj; - } - __name(removeUndefinedProperties, "removeUndefinedProperties"); - function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { method, url } : { url: method }, options); - } else { - options = Object.assign({}, route); - } - options.headers = lowercaseKeys(options.headers); - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); - if (options.url === "/graphql") { - if (defaults && defaults.mediaType.previews?.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( - (preview) => !mergedOptions.mediaType.previews.includes(preview) - ).concat(mergedOptions.mediaType.previews); + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); - } - return mergedOptions; - } - __name(merge, "merge"); - function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - if (names.length === 0) { - return url; - } - return url + separator + names.map((name) => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; + var assert_1 = require("assert"); + var path2 = __importStar3(require("path")); + var ioUtil = __importStar3(require_io_util()); + function cp(source, dest, options = {}) { + return __awaiter3(this, void 0, void 0, function* () { + const { force, recursive, copySourceDirectory } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + if (destStat && destStat.isFile() && !force) { + return; } - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); - } - __name(addQueryParameters, "addQueryParameters"); - var urlVariableRegex = /\{[^}]+\}/g; - function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); - } - __name(removeNonChars, "removeNonChars"); - function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - if (!matches) { - return []; - } - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); - } - __name(extractUrlVariableNames, "extractUrlVariableNames"); - function omit(object, keysToOmit) { - const result = { __proto__: null }; - for (const key of Object.keys(object)) { - if (keysToOmit.indexOf(key) === -1) { - result[key] = object[key]; + const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path2.join(dest, path2.basename(source)) : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); } - } - return result; - } - __name(omit, "omit"); - function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } else { + yield cpDirRecursive(source, newDest, 0, force); + } + } else { + if (path2.relative(source, newDest) === "") { + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); } - return part; - }).join(""); - } - __name(encodeReserved, "encodeReserved"); - function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); }); } - __name(encodeUnreserved, "encodeUnreserved"); - function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } - } - __name(encodeValue, "encodeValue"); - function isDefined(value) { - return value !== void 0 && value !== null; - } - __name(isDefined, "isDefined"); - function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; - } - __name(isKeyOperator, "isKeyOperator"); - function getValues(context2, operator, key, modifier) { - var value = context2[key], result = []; - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); + __name(cp, "cp"); + exports2.cp = cp; + function mv(source, dest, options = {}) { + return __awaiter3(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + dest = path2.join(dest, path2.basename(source)); + destExists = yield ioUtil.exists(dest); } - result.push( - encodeValue(operator, value, isKeyOperator(operator) ? key : "") - ); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function(value2) { - result.push( - encodeValue(operator, value2, isKeyOperator(operator) ? key : "") - ); - }); - } else { - Object.keys(value).forEach(function(k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function(value2) { - tmp.push(encodeValue(operator, value2)); - }); + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); } else { - Object.keys(value).forEach(function(k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); + throw new Error("Destination already exists"); } } } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); + yield mkdirP(path2.dirname(dest)); + yield ioUtil.rename(source, dest); + }); + } + __name(mv, "mv"); + exports2.mv = mv; + function rmRF(inputPath) { + return __awaiter3(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + if (/[*"<>|]/.test(inputPath)) { + throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); } - } - return result; + try { + yield ioUtil.rm(inputPath, { + force: true, + maxRetries: 3, + recursive: true, + retryDelay: 300 + }); + } catch (err) { + throw new Error(`File was unable to be removed ${err}`); + } + }); } - __name(getValues, "getValues"); - function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; + __name(rmRF, "rmRF"); + exports2.rmRF = rmRF; + function mkdirP(fsPath) { + return __awaiter3(this, void 0, void 0, function* () { + assert_1.ok(fsPath, "a path argument must be provided"); + yield ioUtil.mkdir(fsPath, { recursive: true }); + }); } - __name(parseUrl, "parseUrl"); - function expand(template, context2) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - template = template.replace( - /\{([^\{\}]+)\}|([^\{\}]+)/g, - function(_2, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - expression.split(/,/g).forEach(function(variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context2, operator, tmp[1], tmp[2] || tmp[3])); - }); - if (operator && operator !== "+") { - var separator = ","; - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - return (values.length !== 0 ? operator : "") + values.join(separator); + __name(mkdirP, "mkdirP"); + exports2.mkdirP = mkdirP; + function which(tool, check) { + return __awaiter3(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); } else { - return values.join(","); + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); } - } else { - return encodeReserved(literal); } + return result; } - ); - if (template === "/") { - return template; - } else { - return template.replace(/\/$/, ""); - } + const matches = yield findInPath(tool); + if (matches && matches.length > 0) { + return matches[0]; + } + return ""; + }); } - __name(expand, "expand"); - function parse3(options) { - let method = options.method.toUpperCase(); - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, [ - "method", - "baseUrl", - "url", - "headers", - "request", - "mediaType" - ]); - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - if (!isBinaryRequest) { - if (options.mediaType.format) { - headers.accept = headers.accept.split(/,/).map( - (format) => format.replace( - /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, - `application/vnd$1$2.${options.mediaType.format}` - ) - ).join(","); + __name(which, "which"); + exports2.which = which; + function findInPath(tool) { + return __awaiter3(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); } - if (url.endsWith("/graphql")) { - if (options.mediaType.previews?.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { + for (const extension of process.env["PATHEXT"].split(path2.delimiter)) { + if (extension) { + extensions.push(extension); + } } } - } - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return [filePath]; } + return []; } - } - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } - return Object.assign( - { method, url, headers }, - typeof body !== "undefined" ? { body } : null, - options.request ? { request: options.request } : null - ); + if (tool.includes(path2.sep)) { + return []; + } + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path2.delimiter)) { + if (p) { + directories.push(p); + } + } + } + const matches = []; + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(path2.join(directory, tool), extensions); + if (filePath) { + matches.push(filePath); + } + } + return matches; + }); } - __name(parse3, "parse"); - function endpointWithDefaults(defaults, route, options) { - return parse3(merge(defaults, route, options)); + __name(findInPath, "findInPath"); + exports2.findInPath = findInPath; + function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + const copySourceDirectory = options.copySourceDirectory == null ? true : Boolean(options.copySourceDirectory); + return { force, recursive, copySourceDirectory }; } - __name(endpointWithDefaults, "endpointWithDefaults"); - function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS2 = merge(oldDefaults, newDefaults); - const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); - return Object.assign(endpoint2, { - DEFAULTS: DEFAULTS2, - defaults: withDefaults.bind(null, DEFAULTS2), - merge: merge.bind(null, DEFAULTS2), - parse: parse3 + __name(readCopyOptions, "readCopyOptions"); + function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter3(this, void 0, void 0, function* () { + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + yield cpDirRecursive(srcFile, destFile, currentDepth, force); + } else { + yield copyFile(srcFile, destFile, force); + } + } + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); }); } - __name(withDefaults, "withDefaults"); - var endpoint = withDefaults(null, DEFAULTS); + __name(cpDirRecursive, "cpDirRecursive"); + function copyFile(srcFile, destFile, force) { + return __awaiter3(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } catch (e) { + if (e.code === "EPERM") { + yield ioUtil.chmod(destFile, "0666"); + yield ioUtil.unlink(destFile); + } + } + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? "junction" : null); + } else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } + }); + } + __name(copyFile, "copyFile"); } }); -// ../node_modules/deprecation/dist-node/index.js -var require_dist_node3 = __commonJS({ - "../node_modules/deprecation/dist-node/index.js"(exports2) { +// ../node_modules/@actions/exec/lib/toolrunner.js +var require_toolrunner = __commonJS({ + "../node_modules/@actions/exec/lib/toolrunner.js"(exports2) { "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - var Deprecation = class extends Error { + exports2.argStringToArray = exports2.ToolRunner = void 0; + var os = __importStar3(require("os")); + var events = __importStar3(require("events")); + var child = __importStar3(require("child_process")); + var path2 = __importStar3(require("path")); + var io2 = __importStar3(require_io()); + var ioUtil = __importStar3(require_io_util()); + var timers_1 = require("timers"); + var IS_WINDOWS = process.platform === "win32"; + var ToolRunner = class extends events.EventEmitter { static { - __name(this, "Deprecation"); + __name(this, "ToolRunner"); } - constructor(message) { - super(message); - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); } - this.name = "Deprecation"; + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; } - }; - exports2.Deprecation = Deprecation; - } -}); - -// ../node_modules/wrappy/wrappy.js -var require_wrappy = __commonJS({ - "../node_modules/wrappy/wrappy.js"(exports2, module2) { - module2.exports = wrappy; - function wrappy(fn, cb) { - if (fn && cb) return wrappy(fn)(cb); - if (typeof fn !== "function") - throw new TypeError("need wrapper function"); - Object.keys(fn).forEach(function(k) { - wrapper[k] = fn[k]; - }); - return wrapper; - function wrapper() { - var args = new Array(arguments.length); - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i]; - } - var ret = fn.apply(this, args); - var cb2 = args[args.length - 1]; - if (typeof ret === "function" && ret !== cb2) { - Object.keys(cb2).forEach(function(k) { - ret[k] = cb2[k]; - }); + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); } - return ret; } - __name(wrapper, "wrapper"); - } - __name(wrappy, "wrappy"); - } -}); - -// ../node_modules/once/once.js -var require_once = __commonJS({ - "../node_modules/once/once.js"(exports2, module2) { - var wrappy = require_wrappy(); - module2.exports = wrappy(once); - module2.exports.strict = wrappy(onceStrict); - once.proto = once(function() { - Object.defineProperty(Function.prototype, "once", { - value: /* @__PURE__ */ __name(function() { - return once(this); - }, "value"), - configurable: true - }); - Object.defineProperty(Function.prototype, "onceStrict", { - value: /* @__PURE__ */ __name(function() { - return onceStrict(this); - }, "value"), - configurable: true - }); - }); - function once(fn) { - var f = /* @__PURE__ */ __name(function() { - if (f.called) return f.value; - f.called = true; - return f.value = fn.apply(this, arguments); - }, "f"); - f.called = false; - return f; - } - __name(once, "once"); - function onceStrict(fn) { - var f = /* @__PURE__ */ __name(function() { - if (f.called) - throw new Error(f.onceError); - f.called = true; - return f.value = fn.apply(this, arguments); - }, "f"); - var name = fn.name || "Function wrapped with `once`"; - f.onceError = name + " shouldn't be called more than once"; - f.called = false; - return f; - } - __name(onceStrict, "onceStrict"); - } -}); - -// ../node_modules/@octokit/request-error/dist-node/index.js -var require_dist_node4 = __commonJS({ - "../node_modules/@octokit/request-error/dist-node/index.js"(exports2, module2) { - "use strict"; - var __create2 = Object.create; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __getProtoOf2 = Object.getPrototypeOf; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? "" : "[command]"; + if (IS_WINDOWS) { + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } else { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; } - return to; - }, "__copyProps"); - var __toESM2 = /* @__PURE__ */ __name((mod, isNodeMode, target) => (target = mod != null ? __create2(__getProtoOf2(mod)) : {}, __copyProps2( - // If the importer is in node compatibility mode or this is not an ESM - // file that has been converted to a CommonJS file using a Babel- - // compatible transform (i.e. "__esModule" has not been set), then set - // "default" to the CommonJS "module.exports" for node compatibility. - isNodeMode || !mod || !mod.__esModule ? __defProp2(target, "default", { value: mod, enumerable: true }) : target, - mod - )), "__toESM"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - RequestError: /* @__PURE__ */ __name(() => RequestError, "RequestError") - }); - module2.exports = __toCommonJS2(dist_src_exports); - var import_deprecation = require_dist_node3(); - var import_once = __toESM2(require_once()); - var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); - var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); - var RequestError = class extends Error { - static { - __name(this, "RequestError"); + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + return s; + } catch (err) { + this._debug(`error processing line. Failed with error ${err}`); + return ""; + } } - constructor(message, statusCode, options) { - super(message); - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env["COMSPEC"] || "cmd.exe"; + } } - this.name = "HttpError"; - this.status = statusCode; - let headers; - if ("headers" in options && typeof options.headers !== "undefined") { - headers = options.headers; + return this.toolPath; + } + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += " "; + argline += options.windowsVerbatimArguments ? a : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } } - if ("response" in options) { - this.response = options.response; - headers = options.response.headers; + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return this._endsWith(upperToolPath, ".CMD") || this._endsWith(upperToolPath, ".BAT"); + } + _windowsQuoteCmdArg(arg) { + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); } - const requestCopy = Object.assign({}, options.request); - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace( - / .*$/, - " [REDACTED]" - ) - }); + if (!arg) { + return '""'; } - requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; - Object.defineProperty(this, "code", { - get() { - logOnceCode( - new import_deprecation.Deprecation( - "[@octokit/request-error] `error.code` is deprecated, use `error.status`." - ) - ); - return statusCode; + const cmdSpecialChars = [ + " ", + " ", + "&", + "(", + ")", + "[", + "]", + "{", + "}", + "^", + "=", + ";", + "!", + "'", + "+", + ",", + "`", + "~", + "|", + "<", + ">", + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some((x) => x === char)) { + needsQuotes = true; + break; } - }); - Object.defineProperty(this, "headers", { - get() { - logOnceHeaders( - new import_deprecation.Deprecation( - "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." - ) - ); - return headers || {}; + } + if (!needsQuotes) { + return arg; + } + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === "\\") { + reverse += "\\"; + } else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; + } else { + quoteHit = false; } - }); + } + reverse += '"'; + return reverse.split("").reverse().join(""); } - }; - } -}); - -// ../node_modules/@actions/github/node_modules/@octokit/request/dist-node/index.js -var require_dist_node5 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/request/dist-node/index.js"(exports2, module2) { - "use strict"; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + _uvQuoteCmdArg(arg) { + if (!arg) { + return '""'; + } + if (!arg.includes(" ") && !arg.includes(" ") && !arg.includes('"')) { + return arg; + } + if (!arg.includes('"') && !arg.includes("\\")) { + return `"${arg}"`; + } + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === "\\") { + reverse += "\\"; + } else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += "\\"; + } else { + quoteHit = false; + } + } + reverse += '"'; + return reverse.split("").reverse().join(""); } - return to; - }, "__copyProps"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - request: /* @__PURE__ */ __name(() => request, "request") - }); - module2.exports = __toCommonJS2(dist_src_exports); - var import_endpoint = require_dist_node2(); - var import_universal_user_agent = require_dist_node(); - var VERSION3 = "8.4.0"; - function isPlainObject(value) { - if (typeof value !== "object" || value === null) - return false; - if (Object.prototype.toString.call(value) !== "[object Object]") - return false; - const proto = Object.getPrototypeOf(value); - if (proto === null) - return true; - const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; - return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); - } - __name(isPlainObject, "isPlainObject"); - var import_request_error = require_dist_node4(); - function getBufferResponse(response) { - return response.arrayBuffer(); - } - __name(getBufferResponse, "getBufferResponse"); - function fetchWrapper(requestOptions) { - var _a, _b, _c, _d; - const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; - const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; - if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 1e4 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; } - let headers = {}; - let status; - let url; - let { fetch: fetch2 } = globalThis; - if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { - fetch2 = requestOptions.request.fetch; + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result["windowsVerbatimArguments"] = options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; } - if (!fetch2) { - throw new Error( - "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" - ); + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter3(this, void 0, void 0, function* () { + if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { + this.toolPath = path2.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + this.toolPath = yield io2.which(this.toolPath, true); + return new Promise((resolve, reject) => __awaiter3(this, void 0, void 0, function* () { + this._debug(`exec tool: ${this.toolPath}`); + this._debug("arguments:"); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on("debug", (message) => { + this._debug(message); + }); + if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { + return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); + } + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + let stdbuffer = ""; + if (cp.stdout) { + cp.stdout.on("data", (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + let errbuffer = ""; + if (cp.stderr) { + cp.stderr.on("data", (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && optionsNonNull.errStream && optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr ? optionsNonNull.errStream : optionsNonNull.outStream; + s.write(data); + } + errbuffer = this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on("error", (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on("exit", (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on("close", (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on("done", (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit("stdline", stdbuffer); + } + if (errbuffer.length > 0) { + this.emit("errline", errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } else { + resolve(exitCode); + } + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error("child process missing stdin"); + } + cp.stdin.end(this.options.input); + } + })); + }); } - return fetch2(requestOptions.url, { - method: requestOptions.method, - body: requestOptions.body, - redirect: (_c = requestOptions.request) == null ? void 0 : _c.redirect, - headers: requestOptions.headers, - signal: (_d = requestOptions.request) == null ? void 0 : _d.signal, - // duplex must be set if request.body is ReadableStream or Async Iterables. - // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. - ...requestOptions.body && { duplex: "half" } - }).then(async (response) => { - url = response.url; - status = response.status; - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; - } - if ("deprecation" in headers) { - const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); - const deprecationLink = matches && matches.pop(); - log.warn( - `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` - ); - } - if (status === 204 || status === 205) { - return; + }; + exports2.ToolRunner = ToolRunner; + function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ""; + function append(c) { + if (escaped && c !== '"') { + arg += "\\"; } - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; + arg += c; + escaped = false; + } + __name(append, "append"); + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } else { + append(c); } - throw new import_request_error.RequestError(response.statusText, status, { - response: { - url, - status, - headers, - data: void 0 - }, - request: requestOptions - }); + continue; } - if (status === 304) { - throw new import_request_error.RequestError("Not modified", status, { - response: { - url, - status, - headers, - data: await getResponseData(response) - }, - request: requestOptions - }); + if (c === "\\" && escaped) { + append(c); + continue; } - if (status >= 400) { - const data = await getResponseData(response); - const error = new import_request_error.RequestError(toErrorMessage(data), status, { - response: { - url, - status, - headers, - data - }, - request: requestOptions - }); - throw error; + if (c === "\\" && inQuotes) { + escaped = true; + continue; } - return parseSuccessResponseBody ? await getResponseData(response) : response.body; - }).then((data) => { - return { - status, - url, - headers, - data - }; - }).catch((error) => { - if (error instanceof import_request_error.RequestError) - throw error; - else if (error.name === "AbortError") - throw error; - let message = error.message; - if (error.name === "TypeError" && "cause" in error) { - if (error.cause instanceof Error) { - message = error.cause.message; - } else if (typeof error.cause === "string") { - message = error.cause; + if (c === " " && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ""; } + continue; } - throw new import_request_error.RequestError(message, 500, { - request: requestOptions - }); - }); - } - __name(fetchWrapper, "fetchWrapper"); - async function getResponseData(response) { - const contentType = response.headers.get("content-type"); - if (/application\/json/.test(contentType)) { - return response.json().catch(() => response.text()).catch(() => ""); + append(c); } - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); + if (arg.length > 0) { + args.push(arg.trim()); } - return getBufferResponse(response); + return args; } - __name(getResponseData, "getResponseData"); - function toErrorMessage(data) { - if (typeof data === "string") - return data; - let suffix; - if ("documentation_url" in data) { - suffix = ` - ${data.documentation_url}`; - } else { - suffix = ""; + __name(argStringToArray, "argStringToArray"); + exports2.argStringToArray = argStringToArray; + var ExecState = class _ExecState extends events.EventEmitter { + static { + __name(this, "ExecState"); } - if ("message" in data) { - if (Array.isArray(data.errors)) { - return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; + constructor(options, toolPath) { + super(); + this.processClosed = false; + this.processError = ""; + this.processExitCode = 0; + this.processExited = false; + this.processStderr = false; + this.delay = 1e4; + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error("toolPath must not be empty"); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; } - return `${data.message}${suffix}`; } - return `Unknown error: ${JSON.stringify(data)}`; - } - __name(toErrorMessage, "toErrorMessage"); - function withDefaults(oldEndpoint, newDefaults) { - const endpoint2 = oldEndpoint.defaults(newDefaults); - const newApi = /* @__PURE__ */ __name(function(route, parameters) { - const endpointOptions = endpoint2.merge(route, parameters); - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint2.parse(endpointOptions)); + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } else if (this.processExited) { + this.timeout = timers_1.setTimeout(_ExecState.HandleTimeout, this.delay, this); } - const request2 = /* @__PURE__ */ __name((route2, parameters2) => { - return fetchWrapper( - endpoint2.parse(endpoint2.merge(route2, parameters2)) - ); - }, "request2"); - Object.assign(request2, { - endpoint: endpoint2, - defaults: withDefaults.bind(null, endpoint2) - }); - return endpointOptions.request.hook(request2, endpointOptions); - }, "newApi"); - return Object.assign(newApi, { - endpoint: endpoint2, - defaults: withDefaults.bind(null, endpoint2) - }); - } - __name(withDefaults, "withDefaults"); - var request = withDefaults(import_endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}` } - }); + _debug(message) { + this.emit("debug", message); + } + _setResult() { + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit("done", error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / 1e3} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } + }; } }); -// ../node_modules/@actions/github/node_modules/@octokit/graphql/dist-node/index.js -var require_dist_node6 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/graphql/dist-node/index.js"(exports2, module2) { +// ../node_modules/@actions/exec/lib/exec.js +var require_exec = __commonJS({ + "../node_modules/@actions/exec/lib/exec.js"(exports2) { "use strict"; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); - } - return to; - }, "__copyProps"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - GraphqlResponseError: /* @__PURE__ */ __name(() => GraphqlResponseError, "GraphqlResponseError"), - graphql: /* @__PURE__ */ __name(() => graphql2, "graphql"), - withCustomRequest: /* @__PURE__ */ __name(() => withCustomRequest, "withCustomRequest") + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; }); - module2.exports = __toCommonJS2(dist_src_exports); - var import_request3 = require_dist_node5(); - var import_universal_user_agent = require_dist_node(); - var VERSION3 = "7.1.0"; - var import_request2 = require_dist_node5(); - var import_request = require_dist_node5(); - function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors: -` + data.errors.map((e) => ` - ${e.message}`).join("\n"); - } - __name(_buildMessageForResponseErrors, "_buildMessageForResponseErrors"); - var GraphqlResponseError = class extends Error { - static { - __name(this, "GraphqlResponseError"); - } - constructor(request2, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request2; - this.headers = headers; - this.response = response; - this.name = "GraphqlResponseError"; - this.errors = response.errors; - this.data = response.data; - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } + __setModuleDefault3(result, mod); + return result; }; - var NON_VARIABLE_OPTIONS = [ - "method", - "baseUrl", - "url", - "headers", - "request", - "query", - "mediaType" - ]; - var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; - var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; - function graphql(request2, query, options) { - if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject( - new Error(`[@octokit/graphql] "query" cannot be used as variable name`) - ); - } - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) - continue; - return Promise.reject( - new Error( - `[@octokit/graphql] "${key}" cannot be used as variable name` - ) - ); - } + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; - const requestOptions = Object.keys( - parsedOptions - ).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; - } - if (!result.variables) { - result.variables = {}; + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - result.variables[key] = parsedOptions[key]; - return result; - }, {}); - const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - return request2(requestOptions).then((response) => { - if (response.data.errors) { - const headers = {}; - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - throw new GraphqlResponseError( - requestOptions, - headers, - response.data - ); } - return response.data.data; + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); - } - __name(graphql, "graphql"); - function withDefaults(request2, newDefaults) { - const newRequest = request2.defaults(newDefaults); - const newApi = /* @__PURE__ */ __name((query, options) => { - return graphql(newRequest, query, options); - }, "newApi"); - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: newRequest.endpoint + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getExecOutput = exports2.exec = void 0; + var string_decoder_1 = require("string_decoder"); + var tr = __importStar3(require_toolrunner()); + function exec(commandLine, args, options) { + return __awaiter3(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); }); } - __name(withDefaults, "withDefaults"); - var graphql2 = withDefaults(import_request3.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}` - }, - method: "POST", - url: "/graphql" - }); - function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" + __name(exec, "exec"); + exports2.exec = exec; + function getExecOutput(commandLine, args, options) { + var _a, _b; + return __awaiter3(this, void 0, void 0, function* () { + let stdout = ""; + let stderr = ""; + const stdoutDecoder = new string_decoder_1.StringDecoder("utf8"); + const stderrDecoder = new string_decoder_1.StringDecoder("utf8"); + const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; + const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; + const stdErrListener = /* @__PURE__ */ __name((data) => { + stderr += stderrDecoder.write(data); + if (originalStdErrListener) { + originalStdErrListener(data); + } + }, "stdErrListener"); + const stdOutListener = /* @__PURE__ */ __name((data) => { + stdout += stdoutDecoder.write(data); + if (originalStdoutListener) { + originalStdoutListener(data); + } + }, "stdOutListener"); + const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); + const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); + stdout += stdoutDecoder.end(); + stderr += stderrDecoder.end(); + return { + exitCode, + stdout, + stderr + }; }); } - __name(withCustomRequest, "withCustomRequest"); + __name(getExecOutput, "getExecOutput"); + exports2.getExecOutput = getExecOutput; } }); -// ../node_modules/@actions/github/node_modules/@octokit/auth-token/dist-node/index.js -var require_dist_node7 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/auth-token/dist-node/index.js"(exports2, module2) { +// ../node_modules/@actions/core/lib/platform.js +var require_platform = __commonJS({ + "../node_modules/@actions/core/lib/platform.js"(exports2) { "use strict"; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; } - return to; - }, "__copyProps"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - createTokenAuth: /* @__PURE__ */ __name(() => createTokenAuth, "createTokenAuth") + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; }); - module2.exports = __toCommonJS2(dist_src_exports); - var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; - var REGEX_IS_INSTALLATION = /^ghs_/; - var REGEX_IS_USER_TO_SERVER = /^ghu_/; - async function auth(token) { - const isApp = token.split(/\./).length === 3; - const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); - const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token, - tokenType - }; - } - __name(auth, "auth"); - function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - return `token ${token}`; - } - __name(withAuthorizationPrefix, "withAuthorizationPrefix"); - async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge( - route, - parameters - ); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); - } - __name(hook, "hook"); - var createTokenAuth = /* @__PURE__ */ __name(function createTokenAuth2(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } - if (typeof token !== "string") { - throw new Error( - "[@octokit/auth-token] Token passed to createTokenAuth is not a string" - ); + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); - }, "createTokenAuth2"); - } -}); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getDetails = exports2.isLinux = exports2.isMacOS = exports2.isWindows = exports2.arch = exports2.platform = void 0; + var os_1 = __importDefault2(require("os")); + var exec = __importStar3(require_exec()); + var getWindowsInfo = /* @__PURE__ */ __name(() => __awaiter3(void 0, void 0, void 0, function* () { + const { stdout: version3 } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', void 0, { + silent: true + }); + const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', void 0, { + silent: true + }); + return { + name: name.trim(), + version: version3.trim() + }; + }), "getWindowsInfo"); + var getMacOsInfo = /* @__PURE__ */ __name(() => __awaiter3(void 0, void 0, void 0, function* () { + var _a, _b, _c, _d; + const { stdout } = yield exec.getExecOutput("sw_vers", void 0, { + silent: true + }); + const version3 = (_b = (_a = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : ""; + const name = (_d = (_c = stdout.match(/ProductName:\s*(.+)/)) === null || _c === void 0 ? void 0 : _c[1]) !== null && _d !== void 0 ? _d : ""; + return { + name, + version: version3 + }; + }), "getMacOsInfo"); + var getLinuxInfo = /* @__PURE__ */ __name(() => __awaiter3(void 0, void 0, void 0, function* () { + const { stdout } = yield exec.getExecOutput("lsb_release", ["-i", "-r", "-s"], { + silent: true + }); + const [name, version3] = stdout.trim().split("\n"); + return { + name, + version: version3 + }; + }), "getLinuxInfo"); + exports2.platform = os_1.default.platform(); + exports2.arch = os_1.default.arch(); + exports2.isWindows = exports2.platform === "win32"; + exports2.isMacOS = exports2.platform === "darwin"; + exports2.isLinux = exports2.platform === "linux"; + function getDetails() { + return __awaiter3(this, void 0, void 0, function* () { + return Object.assign(Object.assign({}, yield exports2.isWindows ? getWindowsInfo() : exports2.isMacOS ? getMacOsInfo() : getLinuxInfo()), { + platform: exports2.platform, + arch: exports2.arch, + isWindows: exports2.isWindows, + isMacOS: exports2.isMacOS, + isLinux: exports2.isLinux + }); + }); + } + __name(getDetails, "getDetails"); + exports2.getDetails = getDetails; + } +}); -// ../node_modules/@actions/github/node_modules/@octokit/core/dist-node/index.js -var require_dist_node8 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/core/dist-node/index.js"(exports2, module2) { +// ../node_modules/@actions/core/lib/core.js +var require_core = __commonJS({ + "../node_modules/@actions/core/lib/core.js"(exports2) { "use strict"; - var __defProp2 = Object.defineProperty; - var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; - var __getOwnPropNames2 = Object.getOwnPropertyNames; - var __hasOwnProp2 = Object.prototype.hasOwnProperty; - var __export2 = /* @__PURE__ */ __name((target, all) => { - for (var name in all) - __defProp2(target, name, { get: all[name], enumerable: true }); - }, "__export"); - var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames2(from)) - if (!__hasOwnProp2.call(to, key) && key !== except) - __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; } - return to; - }, "__copyProps"); - var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); - var dist_src_exports = {}; - __export2(dist_src_exports, { - Octokit: /* @__PURE__ */ __name(() => Octokit, "Octokit") + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; }); - module2.exports = __toCommonJS2(dist_src_exports); - var import_universal_user_agent = require_dist_node(); - var import_before_after_hook = require_before_after_hook(); - var import_request = require_dist_node5(); - var import_graphql = require_dist_node6(); - var import_auth_token = require_dist_node7(); - var VERSION3 = "5.2.0"; - var noop = /* @__PURE__ */ __name(() => { - }, "noop"); - var consoleWarn = console.warn.bind(console); - var consoleError = console.error.bind(console); - var userAgentTrail = `octokit-core.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}`; - var Octokit = class { - static { - __name(this, "Octokit"); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } - static { - this.VERSION = VERSION3; + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - static { - __name(this, "OctokitWithDefaults"); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - constructor(...args) { - const options = args[0] || {}; - if (typeof defaults === "function") { - super(defaults(options)); - return; - } - super( - Object.assign( - {}, - defaults, - options, - options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null - ) - ); + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - }; - return OctokitWithDefaults; + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.platform = exports2.toPlatformPath = exports2.toWin32Path = exports2.toPosixPath = exports2.markdownSummary = exports2.summary = exports2.getIDToken = exports2.getState = exports2.saveState = exports2.group = exports2.endGroup = exports2.startGroup = exports2.info = exports2.notice = exports2.warning = exports2.error = exports2.debug = exports2.isDebug = exports2.setFailed = exports2.setCommandEcho = exports2.setOutput = exports2.getBooleanInput = exports2.getMultilineInput = exports2.getInput = exports2.addPath = exports2.setSecret = exports2.exportVariable = exports2.ExitCode = void 0; + var command_1 = require_command(); + var file_command_1 = require_file_command(); + var utils_12 = require_utils(); + var os = __importStar3(require("os")); + var path2 = __importStar3(require("path")); + var oidc_utils_1 = require_oidc_utils(); + var ExitCode; + (function(ExitCode2) { + ExitCode2[ExitCode2["Success"] = 0] = "Success"; + ExitCode2[ExitCode2["Failure"] = 1] = "Failure"; + })(ExitCode || (exports2.ExitCode = ExitCode = {})); + function exportVariable(name, val) { + const convertedVal = (0, utils_12.toCommandValue)(val); + process.env[name] = convertedVal; + const filePath = process.env["GITHUB_ENV"] || ""; + if (filePath) { + return (0, file_command_1.issueFileCommand)("ENV", (0, file_command_1.prepareKeyValueMessage)(name, val)); + } + (0, command_1.issueCommand)("set-env", { name }, convertedVal); + } + __name(exportVariable, "exportVariable"); + exports2.exportVariable = exportVariable; + function setSecret(secret) { + (0, command_1.issueCommand)("add-mask", {}, secret); + } + __name(setSecret, "setSecret"); + exports2.setSecret = setSecret; + function addPath(inputPath) { + const filePath = process.env["GITHUB_PATH"] || ""; + if (filePath) { + (0, file_command_1.issueFileCommand)("PATH", inputPath); + } else { + (0, command_1.issueCommand)("add-path", {}, inputPath); + } + process.env["PATH"] = `${inputPath}${path2.delimiter}${process.env["PATH"]}`; + } + __name(addPath, "addPath"); + exports2.addPath = addPath; + function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, "_").toUpperCase()}`] || ""; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + if (options && options.trimWhitespace === false) { + return val; + } + return val.trim(); + } + __name(getInput, "getInput"); + exports2.getInput = getInput; + function getMultilineInput(name, options) { + const inputs = getInput(name, options).split("\n").filter((x) => x !== ""); + if (options && options.trimWhitespace === false) { + return inputs; + } + return inputs.map((input) => input.trim()); + } + __name(getMultilineInput, "getMultilineInput"); + exports2.getMultilineInput = getMultilineInput; + function getBooleanInput(name, options) { + const trueValue = ["true", "True", "TRUE"]; + const falseValue = ["false", "False", "FALSE"]; + const val = getInput(name, options); + if (trueValue.includes(val)) + return true; + if (falseValue.includes(val)) + return false; + throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name} +Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); + } + __name(getBooleanInput, "getBooleanInput"); + exports2.getBooleanInput = getBooleanInput; + function setOutput(name, value) { + const filePath = process.env["GITHUB_OUTPUT"] || ""; + if (filePath) { + return (0, file_command_1.issueFileCommand)("OUTPUT", (0, file_command_1.prepareKeyValueMessage)(name, value)); + } + process.stdout.write(os.EOL); + (0, command_1.issueCommand)("set-output", { name }, (0, utils_12.toCommandValue)(value)); + } + __name(setOutput, "setOutput"); + exports2.setOutput = setOutput; + function setCommandEcho(enabled) { + (0, command_1.issue)("echo", enabled ? "on" : "off"); + } + __name(setCommandEcho, "setCommandEcho"); + exports2.setCommandEcho = setCommandEcho; + function setFailed2(message) { + process.exitCode = ExitCode.Failure; + error(message); + } + __name(setFailed2, "setFailed"); + exports2.setFailed = setFailed2; + function isDebug() { + return process.env["RUNNER_DEBUG"] === "1"; + } + __name(isDebug, "isDebug"); + exports2.isDebug = isDebug; + function debug(message) { + (0, command_1.issueCommand)("debug", {}, message); + } + __name(debug, "debug"); + exports2.debug = debug; + function error(message, properties = {}) { + (0, command_1.issueCommand)("error", (0, utils_12.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); + } + __name(error, "error"); + exports2.error = error; + function warning(message, properties = {}) { + (0, command_1.issueCommand)("warning", (0, utils_12.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); + } + __name(warning, "warning"); + exports2.warning = warning; + function notice(message, properties = {}) { + (0, command_1.issueCommand)("notice", (0, utils_12.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); + } + __name(notice, "notice"); + exports2.notice = notice; + function info(message) { + process.stdout.write(message + os.EOL); + } + __name(info, "info"); + exports2.info = info; + function startGroup(name) { + (0, command_1.issue)("group", name); + } + __name(startGroup, "startGroup"); + exports2.startGroup = startGroup; + function endGroup() { + (0, command_1.issue)("endgroup"); + } + __name(endGroup, "endGroup"); + exports2.endGroup = endGroup; + function group(name, fn) { + return __awaiter3(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } finally { + endGroup(); + } + return result; + }); + } + __name(group, "group"); + exports2.group = group; + function saveState(name, value) { + const filePath = process.env["GITHUB_STATE"] || ""; + if (filePath) { + return (0, file_command_1.issueFileCommand)("STATE", (0, file_command_1.prepareKeyValueMessage)(name, value)); } + (0, command_1.issueCommand)("save-state", { name }, (0, utils_12.toCommandValue)(value)); + } + __name(saveState, "saveState"); + exports2.saveState = saveState; + function getState(name) { + return process.env[`STATE_${name}`] || ""; + } + __name(getState, "getState"); + exports2.getState = getState; + function getIDToken(aud) { + return __awaiter3(this, void 0, void 0, function* () { + return yield oidc_utils_1.OidcClient.getIDToken(aud); + }); + } + __name(getIDToken, "getIDToken"); + exports2.getIDToken = getIDToken; + var summary_1 = require_summary(); + Object.defineProperty(exports2, "summary", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return summary_1.summary; + }, "get") }); + var summary_2 = require_summary(); + Object.defineProperty(exports2, "markdownSummary", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return summary_2.markdownSummary; + }, "get") }); + var path_utils_1 = require_path_utils(); + Object.defineProperty(exports2, "toPosixPath", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return path_utils_1.toPosixPath; + }, "get") }); + Object.defineProperty(exports2, "toWin32Path", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return path_utils_1.toWin32Path; + }, "get") }); + Object.defineProperty(exports2, "toPlatformPath", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return path_utils_1.toPlatformPath; + }, "get") }); + exports2.platform = __importStar3(require_platform()); + } +}); + +// ../node_modules/@actions/github/lib/context.js +var require_context = __commonJS({ + "../node_modules/@actions/github/lib/context.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Context = void 0; + var fs_1 = require("fs"); + var os_1 = require("os"); + var Context = class { static { - this.plugins = []; + __name(this, "Context"); } /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + * Hydrate the context from the environment */ - static plugin(...newPlugins) { - const currentPlugins = this.plugins; - const NewOctokit = class extends this { - static { - __name(this, "NewOctokit"); - } - static { - this.plugins = currentPlugins.concat( - newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) - ); + constructor() { + var _a, _b, _c; + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: "utf8" })); + } else { + const path2 = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path2} does not exist${os_1.EOL}`); } - }; - return NewOctokit; + } + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; + this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; + this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } - constructor(options = {}) { - const hook = new import_before_after_hook.Collection(); - const requestDefaults = { - baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; - requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); + } + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split("/"); + return { owner, repo }; } - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; } - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + } + }; + exports2.Context = Context; + } +}); + +// ../node_modules/@actions/github/lib/internal/utils.js +var require_utils3 = __commonJS({ + "../node_modules/@actions/github/lib/internal/utils.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; + } + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - this.request = import_request.request.defaults(requestDefaults); - this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); - this.log = Object.assign( - { - debug: noop, - info: noop, - warn: consoleWarn, - error: consoleError - }, - options.log - ); - this.hook = hook; - if (!options.authStrategy) { - if (!options.auth) { - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - const auth = (0, import_auth_token.createTokenAuth)(options.auth); - hook.wrap("request", auth.hook); - this.auth = auth; + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - } else { - const { authStrategy, ...otherOptions } = options; - const auth = authStrategy( - Object.assign( - { - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, - options.auth - ) - ); - hook.wrap("request", auth.hook); - this.auth = auth; } - const classConstructor = this.constructor; - for (let i = 0; i < classConstructor.plugins.length; ++i) { - Object.assign(this, classConstructor.plugins[i](this, options)); + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getApiBaseUrl = exports2.getProxyFetch = exports2.getProxyAgentDispatcher = exports2.getProxyAgent = exports2.getAuthString = void 0; + var httpClient = __importStar3(require_lib()); + var undici_1 = require_undici(); + function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error("Parameter token or opts.auth is required"); + } else if (token && options.auth) { + throw new Error("Parameters token and opts.auth may not both be specified"); + } + return typeof options.auth === "string" ? options.auth : `token ${token}`; + } + __name(getAuthString, "getAuthString"); + exports2.getAuthString = getAuthString; + function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); + } + __name(getProxyAgent, "getProxyAgent"); + exports2.getProxyAgent = getProxyAgent; + function getProxyAgentDispatcher(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgentDispatcher(destinationUrl); + } + __name(getProxyAgentDispatcher, "getProxyAgentDispatcher"); + exports2.getProxyAgentDispatcher = getProxyAgentDispatcher; + function getProxyFetch(destinationUrl) { + const httpDispatcher = getProxyAgentDispatcher(destinationUrl); + const proxyFetch = /* @__PURE__ */ __name((url, opts) => __awaiter3(this, void 0, void 0, function* () { + return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); + }), "proxyFetch"); + return proxyFetch; + } + __name(getProxyFetch, "getProxyFetch"); + exports2.getProxyFetch = getProxyFetch; + function getApiBaseUrl() { + return process.env["GITHUB_API_URL"] || "https://api.github.com"; + } + __name(getApiBaseUrl, "getApiBaseUrl"); + exports2.getApiBaseUrl = getApiBaseUrl; } }); -// ../node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js -var require_dist_node9 = __commonJS({ - "../node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js"(exports2, module2) { +// ../node_modules/@actions/github/node_modules/universal-user-agent/dist-node/index.js +var require_dist_node = __commonJS({ + "../node_modules/@actions/github/node_modules/universal-user-agent/dist-node/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + function getUserAgent() { + if (typeof navigator === "object" && "userAgent" in navigator) { + return navigator.userAgent; + } + if (typeof process === "object" && process.version !== void 0) { + return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; + } + return ""; + } + __name(getUserAgent, "getUserAgent"); + exports2.getUserAgent = getUserAgent; + } +}); + +// ../node_modules/@actions/github/node_modules/before-after-hook/lib/register.js +var require_register = __commonJS({ + "../node_modules/@actions/github/node_modules/before-after-hook/lib/register.js"(exports2, module2) { + module2.exports = register; + function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); + } + if (!options) { + options = {}; + } + if (Array.isArray(name)) { + return name.reverse().reduce(function(callback, name2) { + return register.bind(null, state, name2, callback, options); + }, method)(); + } + return Promise.resolve().then(function() { + if (!state.registry[name]) { + return method(options); + } + return state.registry[name].reduce(function(method2, registered) { + return registered.hook.bind(null, method2, options); + }, method)(); + }); + } + __name(register, "register"); + } +}); + +// ../node_modules/@actions/github/node_modules/before-after-hook/lib/add.js +var require_add = __commonJS({ + "../node_modules/@actions/github/node_modules/before-after-hook/lib/add.js"(exports2, module2) { + module2.exports = addHook; + function addHook(state, kind, name, hook) { + var orig = hook; + if (!state.registry[name]) { + state.registry[name] = []; + } + if (kind === "before") { + hook = /* @__PURE__ */ __name(function(method, options) { + return Promise.resolve().then(orig.bind(null, options)).then(method.bind(null, options)); + }, "hook"); + } + if (kind === "after") { + hook = /* @__PURE__ */ __name(function(method, options) { + var result; + return Promise.resolve().then(method.bind(null, options)).then(function(result_) { + result = result_; + return orig(result, options); + }).then(function() { + return result; + }); + }, "hook"); + } + if (kind === "error") { + hook = /* @__PURE__ */ __name(function(method, options) { + return Promise.resolve().then(method.bind(null, options)).catch(function(error) { + return orig(error, options); + }); + }, "hook"); + } + state.registry[name].push({ + hook, + orig + }); + } + __name(addHook, "addHook"); + } +}); + +// ../node_modules/@actions/github/node_modules/before-after-hook/lib/remove.js +var require_remove = __commonJS({ + "../node_modules/@actions/github/node_modules/before-after-hook/lib/remove.js"(exports2, module2) { + module2.exports = removeHook; + function removeHook(state, name, method) { + if (!state.registry[name]) { + return; + } + var index = state.registry[name].map(function(registered) { + return registered.orig; + }).indexOf(method); + if (index === -1) { + return; + } + state.registry[name].splice(index, 1); + } + __name(removeHook, "removeHook"); + } +}); + +// ../node_modules/@actions/github/node_modules/before-after-hook/index.js +var require_before_after_hook = __commonJS({ + "../node_modules/@actions/github/node_modules/before-after-hook/index.js"(exports2, module2) { + var register = require_register(); + var addHook = require_add(); + var removeHook = require_remove(); + var bind = Function.bind; + var bindable = bind.bind(bind); + function bindApi(hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply( + null, + name ? [state, name] : [state] + ); + hook.api = { remove: removeHookRef }; + hook.remove = removeHookRef; + ["before", "error", "after", "wrap"].forEach(function(kind) { + var args = name ? [state, kind, name] : [state, kind]; + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); + }); + } + __name(bindApi, "bindApi"); + function HookSingular() { + var singularHookName = "h"; + var singularHookState = { + registry: {} + }; + var singularHook = register.bind(null, singularHookState, singularHookName); + bindApi(singularHook, singularHookState, singularHookName); + return singularHook; + } + __name(HookSingular, "HookSingular"); + function HookCollection() { + var state = { + registry: {} + }; + var hook = register.bind(null, state); + bindApi(hook, state); + return hook; + } + __name(HookCollection, "HookCollection"); + var collectionHookDeprecationMessageDisplayed = false; + function Hook() { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn( + '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' + ); + collectionHookDeprecationMessageDisplayed = true; + } + return HookCollection(); + } + __name(Hook, "Hook"); + Hook.Singular = HookSingular.bind(); + Hook.Collection = HookCollection.bind(); + module2.exports = Hook; + module2.exports.Hook = Hook; + module2.exports.Singular = Hook.Singular; + module2.exports.Collection = Hook.Collection; + } +}); + +// ../node_modules/@actions/github/node_modules/@octokit/endpoint/dist-node/index.js +var require_dist_node2 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/endpoint/dist-node/index.js"(exports2, module2) { "use strict"; var __defProp2 = Object.defineProperty; var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; @@ -21130,286 +20817,1449 @@ var require_dist_node9 = __commonJS({ var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); var dist_src_exports = {}; __export2(dist_src_exports, { - legacyRestEndpointMethods: /* @__PURE__ */ __name(() => legacyRestEndpointMethods, "legacyRestEndpointMethods"), - restEndpointMethods: /* @__PURE__ */ __name(() => restEndpointMethods, "restEndpointMethods") + endpoint: /* @__PURE__ */ __name(() => endpoint, "endpoint") }); module2.exports = __toCommonJS2(dist_src_exports); - var VERSION3 = "10.4.1"; - var Endpoints = { - actions: { - addCustomLabelsToSelfHostedRunnerForOrg: [ - "POST /orgs/{org}/actions/runners/{runner_id}/labels" - ], - addCustomLabelsToSelfHostedRunnerForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" - ], - addSelectedRepoToOrgVariable: [ - "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" - ], - approveWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" - ], - cancelWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" - ], - createEnvironmentVariable: [ - "POST /repositories/{repository_id}/environments/{environment_name}/variables" - ], - createOrUpdateEnvironmentSecret: [ - "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" - ], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" - ], - createOrgVariable: ["POST /orgs/{org}/actions/variables"], - createRegistrationTokenForOrg: [ - "POST /orgs/{org}/actions/runners/registration-token" - ], - createRegistrationTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/registration-token" - ], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/remove-token" - ], - createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], - createWorkflowDispatch: [ - "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" - ], - deleteActionsCacheById: [ - "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" - ], - deleteActionsCacheByKey: [ - "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" - ], - deleteArtifact: [ - "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" - ], - deleteEnvironmentSecret: [ - "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" - ], - deleteEnvironmentVariable: [ - "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" - ], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" - ], - deleteRepoVariable: [ - "DELETE /repos/{owner}/{repo}/actions/variables/{name}" - ], - deleteSelfHostedRunnerFromOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}" - ], - deleteSelfHostedRunnerFromRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" - ], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: [ - "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" - ], - disableSelectedRepositoryGithubActionsOrganization: [ - "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" - ], - disableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" - ], - downloadArtifact: [ - "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" - ], - downloadJobLogsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" - ], - downloadWorkflowRunAttemptLogs: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" - ], - downloadWorkflowRunLogs: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" - ], - enableSelectedRepositoryGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" - ], - enableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" - ], - forceCancelWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" - ], - generateRunnerJitconfigForOrg: [ - "POST /orgs/{org}/actions/runners/generate-jitconfig" - ], - generateRunnerJitconfigForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" - ], - getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], - getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: [ - "GET /orgs/{org}/actions/cache/usage-by-repository" - ], - getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/selected-actions" - ], - getAllowedActionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" - ], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getCustomOidcSubClaimForRepo: [ - "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" - ], - getEnvironmentPublicKey: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" - ], - getEnvironmentSecret: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" - ], - getEnvironmentVariable: [ - "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" - ], - getGithubActionsDefaultWorkflowPermissionsOrganization: [ - "GET /orgs/{org}/actions/permissions/workflow" - ], - getGithubActionsDefaultWorkflowPermissionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/workflow" - ], - getGithubActionsPermissionsOrganization: [ - "GET /orgs/{org}/actions/permissions" - ], - getGithubActionsPermissionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions" - ], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], - getPendingDeploymentsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" - ], - getRepoPermissions: [ - "GET /repos/{owner}/{repo}/actions/permissions", - {}, - { renamed: ["actions", "getGithubActionsPermissionsRepository"] } - ], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], - getReviewsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" - ], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" - ], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/access" - ], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" - ], - getWorkflowRunUsage: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" - ], - getWorkflowUsage: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" - ], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets" - ], - listEnvironmentVariables: [ - "GET /repositories/{repository_id}/environments/{environment_name}/variables" - ], - listJobsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" - ], - listJobsForWorkflowRunAttempt: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" - ], - listLabelsForSelfHostedRunnerForOrg: [ - "GET /orgs/{org}/actions/runners/{runner_id}/labels" - ], - listLabelsForSelfHostedRunnerForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listOrgVariables: ["GET /orgs/{org}/actions/variables"], - listRepoOrganizationSecrets: [ - "GET /repos/{owner}/{repo}/actions/organization-secrets" - ], - listRepoOrganizationVariables: [ - "GET /repos/{owner}/{repo}/actions/organization-variables" - ], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/downloads" - ], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" - ], - listSelectedReposForOrgVariable: [ - "GET /orgs/{org}/actions/variables/{name}/repositories" - ], - listSelectedRepositoriesEnabledGithubActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/repositories" - ], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" - ], - listWorkflowRuns: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" - ], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" - ], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" - ], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" - ], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - removeCustomLabelFromSelfHostedRunnerForOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" - ], - removeCustomLabelFromSelfHostedRunnerForRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" - ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" - ], - removeSelectedRepoFromOrgVariable: [ - "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" - ], - reviewCustomGatesForRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" - ], - reviewPendingDeploymentsForRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" - ], - setAllowedActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/selected-actions" - ], - setAllowedActionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" - ], - setCustomLabelsForSelfHostedRunnerForOrg: [ - "PUT /orgs/{org}/actions/runners/{runner_id}/labels" + var import_universal_user_agent = require_dist_node(); + var VERSION3 = "9.0.5"; + var userAgent = `octokit-endpoint.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}`; + var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" + } + }; + function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); + } + __name(lowercaseKeys, "lowercaseKeys"); + function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); + } + __name(isPlainObject, "isPlainObject"); + function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; + } + __name(mergeDeep, "mergeDeep"); + function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; + } + } + return obj; + } + __name(removeUndefinedProperties, "removeUndefinedProperties"); + function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); + } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); + } + return mergedOptions; + } + __name(merge, "merge"); + function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); + } + __name(addQueryParameters, "addQueryParameters"); + var urlVariableRegex = /\{[^}]+\}/g; + function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); + } + __name(removeNonChars, "removeNonChars"); + function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); + } + __name(extractUrlVariableNames, "extractUrlVariableNames"); + function omit(object, keysToOmit) { + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; + } + } + return result; + } + __name(omit, "omit"); + function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }).join(""); + } + __name(encodeReserved, "encodeReserved"); + function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); + } + __name(encodeUnreserved, "encodeUnreserved"); + function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } + } + __name(encodeValue, "encodeValue"); + function isDefined(value) { + return value !== void 0 && value !== null; + } + __name(isDefined, "isDefined"); + function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; + } + __name(isKeyOperator, "isKeyOperator"); + function getValues(context2, operator, key, modifier) { + var value = context2[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; + } + __name(getValues, "getValues"); + function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; + } + __name(parseUrl, "parseUrl"); + function expand(template, context2) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_2, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context2, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + } + ); + if (template === "/") { + return template; + } else { + return template.replace(/\/$/, ""); + } + } + __name(expand, "expand"); + function parse2(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } + } + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + } + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); + } + __name(parse2, "parse"); + function endpointWithDefaults(defaults, route, options) { + return parse2(merge(defaults, route, options)); + } + __name(endpointWithDefaults, "endpointWithDefaults"); + function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse: parse2 + }); + } + __name(withDefaults, "withDefaults"); + var endpoint = withDefaults(null, DEFAULTS); + } +}); + +// ../node_modules/deprecation/dist-node/index.js +var require_dist_node3 = __commonJS({ + "../node_modules/deprecation/dist-node/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + var Deprecation = class extends Error { + static { + __name(this, "Deprecation"); + } + constructor(message) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "Deprecation"; + } + }; + exports2.Deprecation = Deprecation; + } +}); + +// ../node_modules/wrappy/wrappy.js +var require_wrappy = __commonJS({ + "../node_modules/wrappy/wrappy.js"(exports2, module2) { + module2.exports = wrappy; + function wrappy(fn, cb) { + if (fn && cb) return wrappy(fn)(cb); + if (typeof fn !== "function") + throw new TypeError("need wrapper function"); + Object.keys(fn).forEach(function(k) { + wrapper[k] = fn[k]; + }); + return wrapper; + function wrapper() { + var args = new Array(arguments.length); + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i]; + } + var ret = fn.apply(this, args); + var cb2 = args[args.length - 1]; + if (typeof ret === "function" && ret !== cb2) { + Object.keys(cb2).forEach(function(k) { + ret[k] = cb2[k]; + }); + } + return ret; + } + __name(wrapper, "wrapper"); + } + __name(wrappy, "wrappy"); + } +}); + +// ../node_modules/once/once.js +var require_once = __commonJS({ + "../node_modules/once/once.js"(exports2, module2) { + var wrappy = require_wrappy(); + module2.exports = wrappy(once); + module2.exports.strict = wrappy(onceStrict); + once.proto = once(function() { + Object.defineProperty(Function.prototype, "once", { + value: /* @__PURE__ */ __name(function() { + return once(this); + }, "value"), + configurable: true + }); + Object.defineProperty(Function.prototype, "onceStrict", { + value: /* @__PURE__ */ __name(function() { + return onceStrict(this); + }, "value"), + configurable: true + }); + }); + function once(fn) { + var f = /* @__PURE__ */ __name(function() { + if (f.called) return f.value; + f.called = true; + return f.value = fn.apply(this, arguments); + }, "f"); + f.called = false; + return f; + } + __name(once, "once"); + function onceStrict(fn) { + var f = /* @__PURE__ */ __name(function() { + if (f.called) + throw new Error(f.onceError); + f.called = true; + return f.value = fn.apply(this, arguments); + }, "f"); + var name = fn.name || "Function wrapped with `once`"; + f.onceError = name + " shouldn't be called more than once"; + f.called = false; + return f; + } + __name(onceStrict, "onceStrict"); + } +}); + +// ../node_modules/@octokit/request-error/dist-node/index.js +var require_dist_node4 = __commonJS({ + "../node_modules/@octokit/request-error/dist-node/index.js"(exports2, module2) { + "use strict"; + var __create2 = Object.create; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __getProtoOf2 = Object.getPrototypeOf; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + } + return to; + }, "__copyProps"); + var __toESM2 = /* @__PURE__ */ __name((mod, isNodeMode, target) => (target = mod != null ? __create2(__getProtoOf2(mod)) : {}, __copyProps2( + // If the importer is in node compatibility mode or this is not an ESM + // file that has been converted to a CommonJS file using a Babel- + // compatible transform (i.e. "__esModule" has not been set), then set + // "default" to the CommonJS "module.exports" for node compatibility. + isNodeMode || !mod || !mod.__esModule ? __defProp2(target, "default", { value: mod, enumerable: true }) : target, + mod + )), "__toESM"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + RequestError: /* @__PURE__ */ __name(() => RequestError, "RequestError") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var import_deprecation = require_dist_node3(); + var import_once = __toESM2(require_once()); + var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); + var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); + var RequestError = class extends Error { + static { + __name(this, "RequestError"); + } + constructor(message, statusCode, options) { + super(message); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + this.name = "HttpError"; + this.status = statusCode; + let headers; + if ("headers" in options && typeof options.headers !== "undefined") { + headers = options.headers; + } + if ("response" in options) { + this.response = options.response; + headers = options.response.headers; + } + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) + }); + } + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + Object.defineProperty(this, "code", { + get() { + logOnceCode( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.code` is deprecated, use `error.status`." + ) + ); + return statusCode; + } + }); + Object.defineProperty(this, "headers", { + get() { + logOnceHeaders( + new import_deprecation.Deprecation( + "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." + ) + ); + return headers || {}; + } + }); + } + }; + } +}); + +// ../node_modules/@actions/github/node_modules/@octokit/request/dist-node/index.js +var require_dist_node5 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/request/dist-node/index.js"(exports2, module2) { + "use strict"; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + } + return to; + }, "__copyProps"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + request: /* @__PURE__ */ __name(() => request, "request") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var import_endpoint = require_dist_node2(); + var import_universal_user_agent = require_dist_node(); + var VERSION3 = "8.4.0"; + function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); + } + __name(isPlainObject, "isPlainObject"); + var import_request_error = require_dist_node4(); + function getBufferResponse(response) { + return response.arrayBuffer(); + } + __name(getBufferResponse, "getBufferResponse"); + function fetchWrapper(requestOptions) { + var _a, _b, _c, _d; + const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; + const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + let headers = {}; + let status; + let url; + let { fetch: fetch2 } = globalThis; + if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { + fetch2 = requestOptions.request.fetch; + } + if (!fetch2) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + return fetch2(requestOptions.url, { + method: requestOptions.method, + body: requestOptions.body, + redirect: (_c = requestOptions.request) == null ? void 0 : _c.redirect, + headers: requestOptions.headers, + signal: (_d = requestOptions.request) == null ? void 0 : _d.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }).then(async (response) => { + url = response.url; + status = response.status; + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + if ("deprecation" in headers) { + const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); + } + if (status === 204 || status === 205) { + return; + } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } + throw new import_request_error.RequestError(response.statusText, status, { + response: { + url, + status, + headers, + data: void 0 + }, + request: requestOptions + }); + } + if (status === 304) { + throw new import_request_error.RequestError("Not modified", status, { + response: { + url, + status, + headers, + data: await getResponseData(response) + }, + request: requestOptions + }); + } + if (status >= 400) { + const data = await getResponseData(response); + const error = new import_request_error.RequestError(toErrorMessage(data), status, { + response: { + url, + status, + headers, + data + }, + request: requestOptions + }); + throw error; + } + return parseSuccessResponseBody ? await getResponseData(response) : response.body; + }).then((data) => { + return { + status, + url, + headers, + data + }; + }).catch((error) => { + if (error instanceof import_request_error.RequestError) + throw error; + else if (error.name === "AbortError") + throw error; + let message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; + } + } + throw new import_request_error.RequestError(message, 500, { + request: requestOptions + }); + }); + } + __name(fetchWrapper, "fetchWrapper"); + async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json().catch(() => response.text()).catch(() => ""); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return getBufferResponse(response); + } + __name(getResponseData, "getResponseData"); + function toErrorMessage(data) { + if (typeof data === "string") + return data; + let suffix; + if ("documentation_url" in data) { + suffix = ` - ${data.documentation_url}`; + } else { + suffix = ""; + } + if ("message" in data) { + if (Array.isArray(data.errors)) { + return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; + } + return `${data.message}${suffix}`; + } + return `Unknown error: ${JSON.stringify(data)}`; + } + __name(toErrorMessage, "toErrorMessage"); + function withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = /* @__PURE__ */ __name(function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); + } + const request2 = /* @__PURE__ */ __name((route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }, "request2"); + Object.assign(request2, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }, "newApi"); + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: withDefaults.bind(null, endpoint2) + }); + } + __name(withDefaults, "withDefaults"); + var request = withDefaults(import_endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}` + } + }); + } +}); + +// ../node_modules/@actions/github/node_modules/@octokit/graphql/dist-node/index.js +var require_dist_node6 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/graphql/dist-node/index.js"(exports2, module2) { + "use strict"; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + } + return to; + }, "__copyProps"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + GraphqlResponseError: /* @__PURE__ */ __name(() => GraphqlResponseError, "GraphqlResponseError"), + graphql: /* @__PURE__ */ __name(() => graphql2, "graphql"), + withCustomRequest: /* @__PURE__ */ __name(() => withCustomRequest, "withCustomRequest") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var import_request3 = require_dist_node5(); + var import_universal_user_agent = require_dist_node(); + var VERSION3 = "7.1.0"; + var import_request2 = require_dist_node5(); + var import_request = require_dist_node5(); + function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); + } + __name(_buildMessageForResponseErrors, "_buildMessageForResponseErrors"); + var GraphqlResponseError = class extends Error { + static { + __name(this, "GraphqlResponseError"); + } + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.name = "GraphqlResponseError"; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + } + }; + var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" + ]; + var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; + var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; + function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); + } + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); + } + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); + } + return response.data.data; + }); + } + __name(graphql, "graphql"); + function withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = /* @__PURE__ */ __name((query, options) => { + return graphql(newRequest, query, options); + }, "newApi"); + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); + } + __name(withDefaults, "withDefaults"); + var graphql2 = withDefaults(import_request3.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}` + }, + method: "POST", + url: "/graphql" + }); + function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); + } + __name(withCustomRequest, "withCustomRequest"); + } +}); + +// ../node_modules/@actions/github/node_modules/@octokit/auth-token/dist-node/index.js +var require_dist_node7 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/auth-token/dist-node/index.js"(exports2, module2) { + "use strict"; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + } + return to; + }, "__copyProps"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + createTokenAuth: /* @__PURE__ */ __name(() => createTokenAuth, "createTokenAuth") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; + var REGEX_IS_INSTALLATION = /^ghs_/; + var REGEX_IS_USER_TO_SERVER = /^ghu_/; + async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; + } + __name(auth, "auth"); + function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + return `token ${token}`; + } + __name(withAuthorizationPrefix, "withAuthorizationPrefix"); + async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); + } + __name(hook, "hook"); + var createTokenAuth = /* @__PURE__ */ __name(function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); + }, "createTokenAuth2"); + } +}); + +// ../node_modules/@actions/github/node_modules/@octokit/core/dist-node/index.js +var require_dist_node8 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/core/dist-node/index.js"(exports2, module2) { + "use strict"; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + } + return to; + }, "__copyProps"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + Octokit: /* @__PURE__ */ __name(() => Octokit, "Octokit") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var import_universal_user_agent = require_dist_node(); + var import_before_after_hook = require_before_after_hook(); + var import_request = require_dist_node5(); + var import_graphql = require_dist_node6(); + var import_auth_token = require_dist_node7(); + var VERSION3 = "5.2.0"; + var noop = /* @__PURE__ */ __name(() => { + }, "noop"); + var consoleWarn = console.warn.bind(console); + var consoleError = console.error.bind(console); + var userAgentTrail = `octokit-core.js/${VERSION3} ${(0, import_universal_user_agent.getUserAgent)()}`; + var Octokit = class { + static { + __name(this, "Octokit"); + } + static { + this.VERSION = VERSION3; + } + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + static { + __name(this, "OctokitWithDefaults"); + } + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + static { + this.plugins = []; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static { + __name(this, "NewOctokit"); + } + static { + this.plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + } + }; + return NewOctokit; + } + constructor(options = {}) { + const hook = new import_before_after_hook.Collection(); + const requestDefaults = { + baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = import_request.request.defaults(requestDefaults); + this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = (0, import_auth_token.createTokenAuth)(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); + } + } + }; + } +}); + +// ../node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js +var require_dist_node9 = __commonJS({ + "../node_modules/@actions/github/node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js"(exports2, module2) { + "use strict"; + var __defProp2 = Object.defineProperty; + var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor; + var __getOwnPropNames2 = Object.getOwnPropertyNames; + var __hasOwnProp2 = Object.prototype.hasOwnProperty; + var __export2 = /* @__PURE__ */ __name((target, all) => { + for (var name in all) + __defProp2(target, name, { get: all[name], enumerable: true }); + }, "__export"); + var __copyProps2 = /* @__PURE__ */ __name((to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames2(from)) + if (!__hasOwnProp2.call(to, key) && key !== except) + __defProp2(to, key, { get: /* @__PURE__ */ __name(() => from[key], "get"), enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable }); + } + return to; + }, "__copyProps"); + var __toCommonJS2 = /* @__PURE__ */ __name((mod) => __copyProps2(__defProp2({}, "__esModule", { value: true }), mod), "__toCommonJS"); + var dist_src_exports = {}; + __export2(dist_src_exports, { + legacyRestEndpointMethods: /* @__PURE__ */ __name(() => legacyRestEndpointMethods, "legacyRestEndpointMethods"), + restEndpointMethods: /* @__PURE__ */ __name(() => restEndpointMethods, "restEndpointMethods") + }); + module2.exports = __toCommonJS2(dist_src_exports); + var VERSION3 = "10.4.1"; + var Endpoints = { + actions: { + addCustomLabelsToSelfHostedRunnerForOrg: [ + "POST /orgs/{org}/actions/runners/{runner_id}/labels" + ], + addCustomLabelsToSelfHostedRunnerForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + addSelectedRepoToOrgSecret: [ + "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + addSelectedRepoToOrgVariable: [ + "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + approveWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" + ], + cancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" + ], + createEnvironmentVariable: [ + "POST /repositories/{repository_id}/environments/{environment_name}/variables" + ], + createOrUpdateEnvironmentSecret: [ + "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: [ + "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + createOrgVariable: ["POST /orgs/{org}/actions/variables"], + createRegistrationTokenForOrg: [ + "POST /orgs/{org}/actions/runners/registration-token" + ], + createRegistrationTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/registration-token" + ], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/remove-token" + ], + createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], + createWorkflowDispatch: [ + "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" + ], + deleteActionsCacheById: [ + "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" + ], + deleteActionsCacheByKey: [ + "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" + ], + deleteArtifact: [ + "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" + ], + deleteEnvironmentSecret: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + deleteEnvironmentVariable: [ + "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], + deleteRepoSecret: [ + "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" + ], + deleteRepoVariable: [ + "DELETE /repos/{owner}/{repo}/actions/variables/{name}" + ], + deleteSelfHostedRunnerFromOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}" + ], + deleteSelfHostedRunnerFromRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: [ + "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + disableSelectedRepositoryGithubActionsOrganization: [ + "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + disableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" + ], + downloadArtifact: [ + "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" + ], + downloadJobLogsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" + ], + downloadWorkflowRunAttemptLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" + ], + downloadWorkflowRunLogs: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" + ], + enableSelectedRepositoryGithubActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" + ], + enableWorkflow: [ + "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" + ], + forceCancelWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" + ], + generateRunnerJitconfigForOrg: [ + "POST /orgs/{org}/actions/runners/generate-jitconfig" + ], + generateRunnerJitconfigForRepo: [ + "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" + ], + getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], + getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], + getActionsCacheUsageByRepoForOrg: [ + "GET /orgs/{org}/actions/cache/usage-by-repository" + ], + getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], + getAllowedActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/selected-actions" + ], + getAllowedActionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getCustomOidcSubClaimForRepo: [ + "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" + ], + getEnvironmentPublicKey: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" + ], + getEnvironmentSecret: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" + ], + getEnvironmentVariable: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" + ], + getGithubActionsDefaultWorkflowPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions/workflow" + ], + getGithubActionsDefaultWorkflowPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/workflow" + ], + getGithubActionsPermissionsOrganization: [ + "GET /orgs/{org}/actions/permissions" + ], + getGithubActionsPermissionsRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions" + ], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], + getPendingDeploymentsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + getRepoPermissions: [ + "GET /repos/{owner}/{repo}/actions/permissions", + {}, + { renamed: ["actions", "getGithubActionsPermissionsRepository"] } + ], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], + getReviewsForRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" + ], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" + ], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowAccessToRepository: [ + "GET /repos/{owner}/{repo}/actions/permissions/access" + ], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" + ], + getWorkflowRunUsage: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" + ], + getWorkflowUsage: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" + ], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listEnvironmentSecrets: [ + "GET /repositories/{repository_id}/environments/{environment_name}/secrets" + ], + listEnvironmentVariables: [ + "GET /repositories/{repository_id}/environments/{environment_name}/variables" + ], + listJobsForWorkflowRun: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" + ], + listJobsForWorkflowRunAttempt: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" + ], + listLabelsForSelfHostedRunnerForOrg: [ + "GET /orgs/{org}/actions/runners/{runner_id}/labels" + ], + listLabelsForSelfHostedRunnerForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listOrgVariables: ["GET /orgs/{org}/actions/variables"], + listRepoOrganizationSecrets: [ + "GET /repos/{owner}/{repo}/actions/organization-secrets" + ], + listRepoOrganizationVariables: [ + "GET /repos/{owner}/{repo}/actions/organization-variables" + ], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: [ + "GET /repos/{owner}/{repo}/actions/runners/downloads" + ], + listSelectedReposForOrgSecret: [ + "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" + ], + listSelectedReposForOrgVariable: [ + "GET /orgs/{org}/actions/variables/{name}/repositories" + ], + listSelectedRepositoriesEnabledGithubActionsOrganization: [ + "GET /orgs/{org}/actions/permissions/repositories" + ], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: [ + "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" + ], + listWorkflowRuns: [ + "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" + ], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunJobForWorkflowRun: [ + "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" + ], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + reRunWorkflowFailedJobs: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" + ], + removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" + ], + removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" + ], + removeCustomLabelFromSelfHostedRunnerForOrg: [ + "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" + ], + removeCustomLabelFromSelfHostedRunnerForRepo: [ + "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" + ], + removeSelectedRepoFromOrgSecret: [ + "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" + ], + removeSelectedRepoFromOrgVariable: [ + "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" + ], + reviewCustomGatesForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" + ], + reviewPendingDeploymentsForRun: [ + "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" + ], + setAllowedActionsOrganization: [ + "PUT /orgs/{org}/actions/permissions/selected-actions" + ], + setAllowedActionsRepository: [ + "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" + ], + setCustomLabelsForSelfHostedRunnerForOrg: [ + "PUT /orgs/{org}/actions/runners/{runner_id}/labels" ], setCustomLabelsForSelfHostedRunnerForRepo: [ "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" @@ -23760,456 +24610,11 @@ var require_github = __commonJS({ } }); -// ../node_modules/@actions/io/lib/io-util.js -var require_io_util = __commonJS({ - "../node_modules/@actions/io/lib/io-util.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - var _a; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getCmdPath = exports2.tryGetExecutablePath = exports2.isRooted = exports2.isDirectory = exports2.exists = exports2.READONLY = exports2.UV_FS_O_EXLOCK = exports2.IS_WINDOWS = exports2.unlink = exports2.symlink = exports2.stat = exports2.rmdir = exports2.rm = exports2.rename = exports2.readlink = exports2.readdir = exports2.open = exports2.mkdir = exports2.lstat = exports2.copyFile = exports2.chmod = void 0; - var fs2 = __importStar3(require("fs")); - var path2 = __importStar3(require("path")); - _a = fs2.promises, exports2.chmod = _a.chmod, exports2.copyFile = _a.copyFile, exports2.lstat = _a.lstat, exports2.mkdir = _a.mkdir, exports2.open = _a.open, exports2.readdir = _a.readdir, exports2.readlink = _a.readlink, exports2.rename = _a.rename, exports2.rm = _a.rm, exports2.rmdir = _a.rmdir, exports2.stat = _a.stat, exports2.symlink = _a.symlink, exports2.unlink = _a.unlink; - exports2.IS_WINDOWS = process.platform === "win32"; - exports2.UV_FS_O_EXLOCK = 268435456; - exports2.READONLY = fs2.constants.O_RDONLY; - function exists(fsPath) { - return __awaiter3(this, void 0, void 0, function* () { - try { - yield exports2.stat(fsPath); - } catch (err) { - if (err.code === "ENOENT") { - return false; - } - throw err; - } - return true; - }); - } - __name(exists, "exists"); - exports2.exists = exists; - function isDirectory(fsPath, useStat = false) { - return __awaiter3(this, void 0, void 0, function* () { - const stats = useStat ? yield exports2.stat(fsPath) : yield exports2.lstat(fsPath); - return stats.isDirectory(); - }); - } - __name(isDirectory, "isDirectory"); - exports2.isDirectory = isDirectory; - function isRooted(p) { - p = normalizeSeparators(p); - if (!p) { - throw new Error('isRooted() parameter "p" cannot be empty'); - } - if (exports2.IS_WINDOWS) { - return p.startsWith("\\") || /^[A-Z]:/i.test(p); - } - return p.startsWith("/"); - } - __name(isRooted, "isRooted"); - exports2.isRooted = isRooted; - function tryGetExecutablePath(filePath, extensions) { - return __awaiter3(this, void 0, void 0, function* () { - let stats = void 0; - try { - stats = yield exports2.stat(filePath); - } catch (err) { - if (err.code !== "ENOENT") { - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports2.IS_WINDOWS) { - const upperExt = path2.extname(filePath).toUpperCase(); - if (extensions.some((validExt) => validExt.toUpperCase() === upperExt)) { - return filePath; - } - } else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - const originalFilePath = filePath; - for (const extension of extensions) { - filePath = originalFilePath + extension; - stats = void 0; - try { - stats = yield exports2.stat(filePath); - } catch (err) { - if (err.code !== "ENOENT") { - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports2.IS_WINDOWS) { - try { - const directory = path2.dirname(filePath); - const upperName = path2.basename(filePath).toUpperCase(); - for (const actualName of yield exports2.readdir(directory)) { - if (upperName === actualName.toUpperCase()) { - filePath = path2.join(directory, actualName); - break; - } - } - } catch (err) { - console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); - } - return filePath; - } else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - } - return ""; - }); - } - __name(tryGetExecutablePath, "tryGetExecutablePath"); - exports2.tryGetExecutablePath = tryGetExecutablePath; - function normalizeSeparators(p) { - p = p || ""; - if (exports2.IS_WINDOWS) { - p = p.replace(/\//g, "\\"); - return p.replace(/\\\\+/g, "\\"); - } - return p.replace(/\/\/+/g, "/"); - } - __name(normalizeSeparators, "normalizeSeparators"); - function isUnixExecutable(stats) { - return (stats.mode & 1) > 0 || (stats.mode & 8) > 0 && stats.gid === process.getgid() || (stats.mode & 64) > 0 && stats.uid === process.getuid(); - } - __name(isUnixExecutable, "isUnixExecutable"); - function getCmdPath() { - var _a2; - return (_a2 = process.env["COMSPEC"]) !== null && _a2 !== void 0 ? _a2 : `cmd.exe`; - } - __name(getCmdPath, "getCmdPath"); - exports2.getCmdPath = getCmdPath; - } -}); - -// ../node_modules/@actions/io/lib/io.js -var require_io = __commonJS({ - "../node_modules/@actions/io/lib/io.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.findInPath = exports2.which = exports2.mkdirP = exports2.rmRF = exports2.mv = exports2.cp = void 0; - var assert_1 = require("assert"); - var path2 = __importStar3(require("path")); - var ioUtil = __importStar3(require_io_util()); - function cp(source, dest, options = {}) { - return __awaiter3(this, void 0, void 0, function* () { - const { force, recursive, copySourceDirectory } = readCopyOptions(options); - const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; - if (destStat && destStat.isFile() && !force) { - return; - } - const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path2.join(dest, path2.basename(source)) : dest; - if (!(yield ioUtil.exists(source))) { - throw new Error(`no such file or directory: ${source}`); - } - const sourceStat = yield ioUtil.stat(source); - if (sourceStat.isDirectory()) { - if (!recursive) { - throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); - } else { - yield cpDirRecursive(source, newDest, 0, force); - } - } else { - if (path2.relative(source, newDest) === "") { - throw new Error(`'${newDest}' and '${source}' are the same file`); - } - yield copyFile(source, newDest, force); - } - }); - } - __name(cp, "cp"); - exports2.cp = cp; - function mv(source, dest, options = {}) { - return __awaiter3(this, void 0, void 0, function* () { - if (yield ioUtil.exists(dest)) { - let destExists = true; - if (yield ioUtil.isDirectory(dest)) { - dest = path2.join(dest, path2.basename(source)); - destExists = yield ioUtil.exists(dest); - } - if (destExists) { - if (options.force == null || options.force) { - yield rmRF(dest); - } else { - throw new Error("Destination already exists"); - } - } - } - yield mkdirP(path2.dirname(dest)); - yield ioUtil.rename(source, dest); - }); - } - __name(mv, "mv"); - exports2.mv = mv; - function rmRF(inputPath) { - return __awaiter3(this, void 0, void 0, function* () { - if (ioUtil.IS_WINDOWS) { - if (/[*"<>|]/.test(inputPath)) { - throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); - } - } - try { - yield ioUtil.rm(inputPath, { - force: true, - maxRetries: 3, - recursive: true, - retryDelay: 300 - }); - } catch (err) { - throw new Error(`File was unable to be removed ${err}`); - } - }); - } - __name(rmRF, "rmRF"); - exports2.rmRF = rmRF; - function mkdirP(fsPath) { - return __awaiter3(this, void 0, void 0, function* () { - assert_1.ok(fsPath, "a path argument must be provided"); - yield ioUtil.mkdir(fsPath, { recursive: true }); - }); - } - __name(mkdirP, "mkdirP"); - exports2.mkdirP = mkdirP; - function which(tool, check) { - return __awaiter3(this, void 0, void 0, function* () { - if (!tool) { - throw new Error("parameter 'tool' is required"); - } - if (check) { - const result = yield which(tool, false); - if (!result) { - if (ioUtil.IS_WINDOWS) { - throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); - } else { - throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); - } - } - return result; - } - const matches = yield findInPath(tool); - if (matches && matches.length > 0) { - return matches[0]; - } - return ""; - }); - } - __name(which, "which"); - exports2.which = which; - function findInPath(tool) { - return __awaiter3(this, void 0, void 0, function* () { - if (!tool) { - throw new Error("parameter 'tool' is required"); - } - const extensions = []; - if (ioUtil.IS_WINDOWS && process.env["PATHEXT"]) { - for (const extension of process.env["PATHEXT"].split(path2.delimiter)) { - if (extension) { - extensions.push(extension); - } - } - } - if (ioUtil.isRooted(tool)) { - const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); - if (filePath) { - return [filePath]; - } - return []; - } - if (tool.includes(path2.sep)) { - return []; - } - const directories = []; - if (process.env.PATH) { - for (const p of process.env.PATH.split(path2.delimiter)) { - if (p) { - directories.push(p); - } - } - } - const matches = []; - for (const directory of directories) { - const filePath = yield ioUtil.tryGetExecutablePath(path2.join(directory, tool), extensions); - if (filePath) { - matches.push(filePath); - } - } - return matches; - }); - } - __name(findInPath, "findInPath"); - exports2.findInPath = findInPath; - function readCopyOptions(options) { - const force = options.force == null ? true : options.force; - const recursive = Boolean(options.recursive); - const copySourceDirectory = options.copySourceDirectory == null ? true : Boolean(options.copySourceDirectory); - return { force, recursive, copySourceDirectory }; - } - __name(readCopyOptions, "readCopyOptions"); - function cpDirRecursive(sourceDir, destDir, currentDepth, force) { - return __awaiter3(this, void 0, void 0, function* () { - if (currentDepth >= 255) - return; - currentDepth++; - yield mkdirP(destDir); - const files = yield ioUtil.readdir(sourceDir); - for (const fileName of files) { - const srcFile = `${sourceDir}/${fileName}`; - const destFile = `${destDir}/${fileName}`; - const srcFileStat = yield ioUtil.lstat(srcFile); - if (srcFileStat.isDirectory()) { - yield cpDirRecursive(srcFile, destFile, currentDepth, force); - } else { - yield copyFile(srcFile, destFile, force); - } - } - yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); - }); - } - __name(cpDirRecursive, "cpDirRecursive"); - function copyFile(srcFile, destFile, force) { - return __awaiter3(this, void 0, void 0, function* () { - if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { - try { - yield ioUtil.lstat(destFile); - yield ioUtil.unlink(destFile); - } catch (e) { - if (e.code === "EPERM") { - yield ioUtil.chmod(destFile, "0666"); - yield ioUtil.unlink(destFile); - } - } - const symlinkFull = yield ioUtil.readlink(srcFile); - yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? "junction" : null); - } else if (!(yield ioUtil.exists(destFile)) || force) { - yield ioUtil.copyFile(srcFile, destFile); - } - }); - } - __name(copyFile, "copyFile"); - } -}); - // ../common/cli.json -var version2, checksum; +var version, checksum; var init_cli = __esm({ "../common/cli.json"() { - version2 = "2024.2.6"; + version = "2024.2.6"; checksum = { windows_x86_64: "a63c618048aaab42c7448e5307f53cc8046d744f27cc769074529daf4bb09a8f", linux_arm64: "77189751f3c04205613dde5890bae7c47143cb68c1cf542524d8c5b59aeccf8b", @@ -34274,7 +34679,7 @@ function getQodanaUrl(arch, platform, nightly = false) { throw new Error(`Unsupported architecture: ${arch}`); } const archive = platform === "windows" ? "zip" : "tar.gz"; - const cli_version = nightly ? "nightly" : `v${version2}`; + const cli_version = nightly ? "nightly" : `v${version}`; return `https://github.com/JetBrains/qodana-cli/releases/download/${cli_version}/qodana_${platform}_${arch}.${archive}`; } function isExecutionSuccessful(exitCode) { @@ -34337,7 +34742,9 @@ function getCoverageFromSarif(sarifPath) { totalCoveredLines: sarifContents.runs[0].properties["coverage"]["totalCoveredLines"] || 0, freshCoverage: sarifContents.runs[0].properties["coverage"]["freshCoverage"] || 0, freshLines: sarifContents.runs[0].properties["coverage"]["freshLines"] || 0, - freshCoveredLines: sarifContents.runs[0].properties["coverage"]["freshCoveredLines"] || 0 + freshCoveredLines: sarifContents.runs[0].properties["coverage"]["freshCoveredLines"] || 0, + totalCoverageThreshold: sarifContents.runs[0].properties["qodanaFailureConditions"]?.["testCoverageThresholds"]?.["totalCoverage"] || COVERAGE_THRESHOLD, + freshCoverageThreshold: sarifContents.runs[0].properties["qodanaFailureConditions"]?.["testCoverageThresholds"]?.["freshCoverage"] || COVERAGE_THRESHOLD }; } else { return { @@ -34346,14 +34753,16 @@ function getCoverageFromSarif(sarifPath) { totalCoveredLines: 0, freshCoverage: 0, freshLines: 0, - freshCoveredLines: 0 + freshCoveredLines: 0, + totalCoverageThreshold: COVERAGE_THRESHOLD, + freshCoverageThreshold: COVERAGE_THRESHOLD }; } } throw new Error(`SARIF file not found: ${sarifPath}`); } function sha256sum(file) { - const hash = (0, import_crypto4.createHash)("sha256"); + const hash = (0, import_crypto.createHash)("sha256"); hash.update(fs.readFileSync(file)); return hash.digest("hex"); } @@ -34403,12 +34812,12 @@ async function compressFolder(srcDir, destFile) { zip.generateNodeStream({ streamFiles: true, compression: "DEFLATE" }).pipe(fs.createWriteStream(destFile)).on("error", (err) => reject(err)).on("finish", resolve); }); } -var import_crypto4, fs, import_path, import_jszip, import_util, readdir2, stat2, mkdir2, SUPPORTED_PLATFORMS, SUPPORTED_ARCHS, FAIL_THRESHOLD_OUTPUT, QODANA_SARIF_NAME, QODANA_SHORT_SARIF_NAME, QODANA_REPORT_URL_NAME, QODANA_OPEN_IN_IDE_NAME, QODANA_LICENSES_MD, QODANA_LICENSES_JSON, EXECUTABLE, VERSION, COVERAGE_THRESHOLD, QodanaExitCode, NONE, BRANCH, PULL_REQUEST; +var import_crypto, fs, import_path, import_jszip, import_util, readdir2, stat2, mkdir2, SUPPORTED_PLATFORMS, SUPPORTED_ARCHS, FAIL_THRESHOLD_OUTPUT, QODANA_SARIF_NAME, QODANA_SHORT_SARIF_NAME, QODANA_REPORT_URL_NAME, QODANA_OPEN_IN_IDE_NAME, QODANA_LICENSES_MD, QODANA_LICENSES_JSON, EXECUTABLE, VERSION, COVERAGE_THRESHOLD, QodanaExitCode, NONE, BRANCH, PULL_REQUEST; var init_qodana = __esm({ "../common/qodana.ts"() { "use strict"; init_cli(); - import_crypto4 = require("crypto"); + import_crypto = require("crypto"); fs = __toESM(require("fs")); import_path = __toESM(require("path")); import_jszip = __toESM(require_lib4()); @@ -34426,7 +34835,7 @@ var init_qodana = __esm({ QODANA_LICENSES_MD = "thirdPartySoftwareList.md"; QODANA_LICENSES_JSON = "third-party-libraries.json"; EXECUTABLE = "qodana"; - VERSION = version2; + VERSION = version; COVERAGE_THRESHOLD = 50; __name(getQodanaSha256, "getQodanaSha256"); __name(getProcessArchName, "getProcessArchName"); @@ -34455,615 +34864,6 @@ var init_qodana = __esm({ } }); -// ../node_modules/@actions/exec/lib/toolrunner.js -var require_toolrunner = __commonJS({ - "../node_modules/@actions/exec/lib/toolrunner.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.argStringToArray = exports2.ToolRunner = void 0; - var os = __importStar3(require("os")); - var events = __importStar3(require("events")); - var child = __importStar3(require("child_process")); - var path2 = __importStar3(require("path")); - var io2 = __importStar3(require_io()); - var ioUtil = __importStar3(require_io_util()); - var timers_1 = require("timers"); - var IS_WINDOWS = process.platform === "win32"; - var ToolRunner = class extends events.EventEmitter { - static { - __name(this, "ToolRunner"); - } - constructor(toolPath, args, options) { - super(); - if (!toolPath) { - throw new Error("Parameter 'toolPath' cannot be null or empty."); - } - this.toolPath = toolPath; - this.args = args || []; - this.options = options || {}; - } - _debug(message) { - if (this.options.listeners && this.options.listeners.debug) { - this.options.listeners.debug(message); - } - } - _getCommandString(options, noPrefix) { - const toolPath = this._getSpawnFileName(); - const args = this._getSpawnArgs(options); - let cmd = noPrefix ? "" : "[command]"; - if (IS_WINDOWS) { - if (this._isCmdFile()) { - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; - } - } else if (options.windowsVerbatimArguments) { - cmd += `"${toolPath}"`; - for (const a of args) { - cmd += ` ${a}`; - } - } else { - cmd += this._windowsQuoteCmdArg(toolPath); - for (const a of args) { - cmd += ` ${this._windowsQuoteCmdArg(a)}`; - } - } - } else { - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; - } - } - return cmd; - } - _processLineBuffer(data, strBuffer, onLine) { - try { - let s = strBuffer + data.toString(); - let n = s.indexOf(os.EOL); - while (n > -1) { - const line = s.substring(0, n); - onLine(line); - s = s.substring(n + os.EOL.length); - n = s.indexOf(os.EOL); - } - return s; - } catch (err) { - this._debug(`error processing line. Failed with error ${err}`); - return ""; - } - } - _getSpawnFileName() { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - return process.env["COMSPEC"] || "cmd.exe"; - } - } - return this.toolPath; - } - _getSpawnArgs(options) { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; - for (const a of this.args) { - argline += " "; - argline += options.windowsVerbatimArguments ? a : this._windowsQuoteCmdArg(a); - } - argline += '"'; - return [argline]; - } - } - return this.args; - } - _endsWith(str, end) { - return str.endsWith(end); - } - _isCmdFile() { - const upperToolPath = this.toolPath.toUpperCase(); - return this._endsWith(upperToolPath, ".CMD") || this._endsWith(upperToolPath, ".BAT"); - } - _windowsQuoteCmdArg(arg) { - if (!this._isCmdFile()) { - return this._uvQuoteCmdArg(arg); - } - if (!arg) { - return '""'; - } - const cmdSpecialChars = [ - " ", - " ", - "&", - "(", - ")", - "[", - "]", - "{", - "}", - "^", - "=", - ";", - "!", - "'", - "+", - ",", - "`", - "~", - "|", - "<", - ">", - '"' - ]; - let needsQuotes = false; - for (const char of arg) { - if (cmdSpecialChars.some((x) => x === char)) { - needsQuotes = true; - break; - } - } - if (!needsQuotes) { - return arg; - } - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === "\\") { - reverse += "\\"; - } else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '"'; - } else { - quoteHit = false; - } - } - reverse += '"'; - return reverse.split("").reverse().join(""); - } - _uvQuoteCmdArg(arg) { - if (!arg) { - return '""'; - } - if (!arg.includes(" ") && !arg.includes(" ") && !arg.includes('"')) { - return arg; - } - if (!arg.includes('"') && !arg.includes("\\")) { - return `"${arg}"`; - } - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === "\\") { - reverse += "\\"; - } else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += "\\"; - } else { - quoteHit = false; - } - } - reverse += '"'; - return reverse.split("").reverse().join(""); - } - _cloneExecOptions(options) { - options = options || {}; - const result = { - cwd: options.cwd || process.cwd(), - env: options.env || process.env, - silent: options.silent || false, - windowsVerbatimArguments: options.windowsVerbatimArguments || false, - failOnStdErr: options.failOnStdErr || false, - ignoreReturnCode: options.ignoreReturnCode || false, - delay: options.delay || 1e4 - }; - result.outStream = options.outStream || process.stdout; - result.errStream = options.errStream || process.stderr; - return result; - } - _getSpawnOptions(options, toolPath) { - options = options || {}; - const result = {}; - result.cwd = options.cwd; - result.env = options.env; - result["windowsVerbatimArguments"] = options.windowsVerbatimArguments || this._isCmdFile(); - if (options.windowsVerbatimArguments) { - result.argv0 = `"${toolPath}"`; - } - return result; - } - /** - * Exec a tool. - * Output will be streamed to the live console. - * Returns promise with return code - * - * @param tool path to tool to exec - * @param options optional exec options. See ExecOptions - * @returns number - */ - exec() { - return __awaiter3(this, void 0, void 0, function* () { - if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes("/") || IS_WINDOWS && this.toolPath.includes("\\"))) { - this.toolPath = path2.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); - } - this.toolPath = yield io2.which(this.toolPath, true); - return new Promise((resolve, reject) => __awaiter3(this, void 0, void 0, function* () { - this._debug(`exec tool: ${this.toolPath}`); - this._debug("arguments:"); - for (const arg of this.args) { - this._debug(` ${arg}`); - } - const optionsNonNull = this._cloneExecOptions(this.options); - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); - } - const state = new ExecState(optionsNonNull, this.toolPath); - state.on("debug", (message) => { - this._debug(message); - }); - if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { - return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); - } - const fileName = this._getSpawnFileName(); - const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); - let stdbuffer = ""; - if (cp.stdout) { - cp.stdout.on("data", (data) => { - if (this.options.listeners && this.options.listeners.stdout) { - this.options.listeners.stdout(data); - } - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(data); - } - stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { - if (this.options.listeners && this.options.listeners.stdline) { - this.options.listeners.stdline(line); - } - }); - }); - } - let errbuffer = ""; - if (cp.stderr) { - cp.stderr.on("data", (data) => { - state.processStderr = true; - if (this.options.listeners && this.options.listeners.stderr) { - this.options.listeners.stderr(data); - } - if (!optionsNonNull.silent && optionsNonNull.errStream && optionsNonNull.outStream) { - const s = optionsNonNull.failOnStdErr ? optionsNonNull.errStream : optionsNonNull.outStream; - s.write(data); - } - errbuffer = this._processLineBuffer(data, errbuffer, (line) => { - if (this.options.listeners && this.options.listeners.errline) { - this.options.listeners.errline(line); - } - }); - }); - } - cp.on("error", (err) => { - state.processError = err.message; - state.processExited = true; - state.processClosed = true; - state.CheckComplete(); - }); - cp.on("exit", (code) => { - state.processExitCode = code; - state.processExited = true; - this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); - state.CheckComplete(); - }); - cp.on("close", (code) => { - state.processExitCode = code; - state.processExited = true; - state.processClosed = true; - this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); - state.CheckComplete(); - }); - state.on("done", (error, exitCode) => { - if (stdbuffer.length > 0) { - this.emit("stdline", stdbuffer); - } - if (errbuffer.length > 0) { - this.emit("errline", errbuffer); - } - cp.removeAllListeners(); - if (error) { - reject(error); - } else { - resolve(exitCode); - } - }); - if (this.options.input) { - if (!cp.stdin) { - throw new Error("child process missing stdin"); - } - cp.stdin.end(this.options.input); - } - })); - }); - } - }; - exports2.ToolRunner = ToolRunner; - function argStringToArray(argString) { - const args = []; - let inQuotes = false; - let escaped = false; - let arg = ""; - function append(c) { - if (escaped && c !== '"') { - arg += "\\"; - } - arg += c; - escaped = false; - } - __name(append, "append"); - for (let i = 0; i < argString.length; i++) { - const c = argString.charAt(i); - if (c === '"') { - if (!escaped) { - inQuotes = !inQuotes; - } else { - append(c); - } - continue; - } - if (c === "\\" && escaped) { - append(c); - continue; - } - if (c === "\\" && inQuotes) { - escaped = true; - continue; - } - if (c === " " && !inQuotes) { - if (arg.length > 0) { - args.push(arg); - arg = ""; - } - continue; - } - append(c); - } - if (arg.length > 0) { - args.push(arg.trim()); - } - return args; - } - __name(argStringToArray, "argStringToArray"); - exports2.argStringToArray = argStringToArray; - var ExecState = class _ExecState extends events.EventEmitter { - static { - __name(this, "ExecState"); - } - constructor(options, toolPath) { - super(); - this.processClosed = false; - this.processError = ""; - this.processExitCode = 0; - this.processExited = false; - this.processStderr = false; - this.delay = 1e4; - this.done = false; - this.timeout = null; - if (!toolPath) { - throw new Error("toolPath must not be empty"); - } - this.options = options; - this.toolPath = toolPath; - if (options.delay) { - this.delay = options.delay; - } - } - CheckComplete() { - if (this.done) { - return; - } - if (this.processClosed) { - this._setResult(); - } else if (this.processExited) { - this.timeout = timers_1.setTimeout(_ExecState.HandleTimeout, this.delay, this); - } - } - _debug(message) { - this.emit("debug", message); - } - _setResult() { - let error; - if (this.processExited) { - if (this.processError) { - error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); - } else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { - error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); - } else if (this.processStderr && this.options.failOnStdErr) { - error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); - } - } - if (this.timeout) { - clearTimeout(this.timeout); - this.timeout = null; - } - this.done = true; - this.emit("done", error, this.processExitCode); - } - static HandleTimeout(state) { - if (state.done) { - return; - } - if (!state.processClosed && state.processExited) { - const message = `The STDIO streams did not close within ${state.delay / 1e3} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; - state._debug(message); - } - state._setResult(); - } - }; - } -}); - -// ../node_modules/@actions/exec/lib/exec.js -var require_exec = __commonJS({ - "../node_modules/@actions/exec/lib/exec.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getExecOutput = exports2.exec = void 0; - var string_decoder_1 = require("string_decoder"); - var tr = __importStar3(require_toolrunner()); - function exec(commandLine, args, options) { - return __awaiter3(this, void 0, void 0, function* () { - const commandArgs = tr.argStringToArray(commandLine); - if (commandArgs.length === 0) { - throw new Error(`Parameter 'commandLine' cannot be null or empty.`); - } - const toolPath = commandArgs[0]; - args = commandArgs.slice(1).concat(args || []); - const runner = new tr.ToolRunner(toolPath, args, options); - return runner.exec(); - }); - } - __name(exec, "exec"); - exports2.exec = exec; - function getExecOutput(commandLine, args, options) { - var _a, _b; - return __awaiter3(this, void 0, void 0, function* () { - let stdout = ""; - let stderr = ""; - const stdoutDecoder = new string_decoder_1.StringDecoder("utf8"); - const stderrDecoder = new string_decoder_1.StringDecoder("utf8"); - const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; - const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; - const stdErrListener = /* @__PURE__ */ __name((data) => { - stderr += stderrDecoder.write(data); - if (originalStdErrListener) { - originalStdErrListener(data); - } - }, "stdErrListener"); - const stdOutListener = /* @__PURE__ */ __name((data) => { - stdout += stdoutDecoder.write(data); - if (originalStdoutListener) { - originalStdoutListener(data); - } - }, "stdOutListener"); - const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); - const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); - stdout += stdoutDecoder.end(); - stderr += stderrDecoder.end(); - return { - exitCode, - stdout, - stderr - }; - }); - } - __name(getExecOutput, "getExecOutput"); - exports2.getExecOutput = getExecOutput; - } -}); - // ../node_modules/@actions/glob/lib/internal-glob-options-helper.js var require_internal_glob_options_helper = __commonJS({ "../node_modules/@actions/glob/lib/internal-glob-options-helper.js"(exports2) { @@ -35792,9 +35592,9 @@ var require_minimatch = __commonJS({ throw new TypeError("pattern is too long"); } }, "assertValidPattern"); - Minimatch.prototype.parse = parse3; + Minimatch.prototype.parse = parse2; var SUBPARSE = {}; - function parse3(pattern, isSub) { + function parse2(pattern, isSub) { assertValidPattern(pattern); var options = this.options; if (pattern === "**") { @@ -36028,7 +35828,7 @@ var require_minimatch = __commonJS({ regExp._src = re2; return regExp; } - __name(parse3, "parse"); + __name(parse2, "parse"); minimatch.makeRe = function(pattern, options) { return new Minimatch(pattern, options || {}).makeRe(); }; @@ -36967,77 +36767,77 @@ var require_semver = __commonJS({ } } var i; - exports2.parse = parse3; - function parse3(version4, options) { + exports2.parse = parse2; + function parse2(version3, options) { if (!options || typeof options !== "object") { options = { loose: !!options, includePrerelease: false }; } - if (version4 instanceof SemVer) { - return version4; + if (version3 instanceof SemVer) { + return version3; } - if (typeof version4 !== "string") { + if (typeof version3 !== "string") { return null; } - if (version4.length > MAX_LENGTH) { + if (version3.length > MAX_LENGTH) { return null; } var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]; - if (!r.test(version4)) { + if (!r.test(version3)) { return null; } try { - return new SemVer(version4, options); + return new SemVer(version3, options); } catch (er) { return null; } } - __name(parse3, "parse"); + __name(parse2, "parse"); exports2.valid = valid; - function valid(version4, options) { - var v = parse3(version4, options); + function valid(version3, options) { + var v = parse2(version3, options); return v ? v.version : null; } __name(valid, "valid"); exports2.clean = clean; - function clean(version4, options) { - var s = parse3(version4.trim().replace(/^[=v]+/, ""), options); + function clean(version3, options) { + var s = parse2(version3.trim().replace(/^[=v]+/, ""), options); return s ? s.version : null; } __name(clean, "clean"); exports2.SemVer = SemVer; - function SemVer(version4, options) { + function SemVer(version3, options) { if (!options || typeof options !== "object") { options = { loose: !!options, includePrerelease: false }; } - if (version4 instanceof SemVer) { - if (version4.loose === options.loose) { - return version4; + if (version3 instanceof SemVer) { + if (version3.loose === options.loose) { + return version3; } else { - version4 = version4.version; + version3 = version3.version; } - } else if (typeof version4 !== "string") { - throw new TypeError("Invalid Version: " + version4); + } else if (typeof version3 !== "string") { + throw new TypeError("Invalid Version: " + version3); } - if (version4.length > MAX_LENGTH) { + if (version3.length > MAX_LENGTH) { throw new TypeError("version is longer than " + MAX_LENGTH + " characters"); } if (!(this instanceof SemVer)) { - return new SemVer(version4, options); + return new SemVer(version3, options); } - debug("SemVer", version4, options); + debug("SemVer", version3, options); this.options = options; this.loose = !!options.loose; - var m = version4.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]); + var m = version3.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]); if (!m) { - throw new TypeError("Invalid Version: " + version4); + throw new TypeError("Invalid Version: " + version3); } - this.raw = version4; + this.raw = version3; this.major = +m[1]; this.minor = +m[2]; this.patch = +m[3]; @@ -37225,13 +37025,13 @@ var require_semver = __commonJS({ return this; }; exports2.inc = inc; - function inc(version4, release, loose, identifier) { + function inc(version3, release, loose, identifier) { if (typeof loose === "string") { identifier = loose; loose = void 0; } try { - return new SemVer(version4, loose).inc(release, identifier).version; + return new SemVer(version3, loose).inc(release, identifier).version; } catch (er) { return null; } @@ -37242,16 +37042,16 @@ var require_semver = __commonJS({ if (eq(version1, version22)) { return null; } else { - var v13 = parse3(version1); - var v2 = parse3(version22); + var v12 = parse2(version1); + var v2 = parse2(version22); var prefix = ""; - if (v13.prerelease.length || v2.prerelease.length) { + if (v12.prerelease.length || v2.prerelease.length) { prefix = "pre"; var defaultResult = "prerelease"; } - for (var key in v13) { + for (var key in v12) { if (key === "major" || key === "minor" || key === "patch") { - if (v13[key] !== v2[key]) { + if (v12[key] !== v2[key]) { return prefix + key; } } @@ -37443,19 +37243,19 @@ var require_semver = __commonJS({ Comparator.prototype.toString = function() { return this.value; }; - Comparator.prototype.test = function(version4) { - debug("Comparator.test", version4, this.options.loose); - if (this.semver === ANY || version4 === ANY) { + Comparator.prototype.test = function(version3) { + debug("Comparator.test", version3, this.options.loose); + if (this.semver === ANY || version3 === ANY) { return true; } - if (typeof version4 === "string") { + if (typeof version3 === "string") { try { - version4 = new SemVer(version4, this.options); + version3 = new SemVer(version3, this.options); } catch (er) { return false; } } - return cmp(version4, this.operator, this.semver, this.options); + return cmp(version3, this.operator, this.semver, this.options); }; Comparator.prototype.intersects = function(comp, options) { if (!(comp instanceof Comparator)) { @@ -37778,31 +37578,31 @@ var require_semver = __commonJS({ return (from + " " + to).trim(); } __name(hyphenReplace, "hyphenReplace"); - Range.prototype.test = function(version4) { - if (!version4) { + Range.prototype.test = function(version3) { + if (!version3) { return false; } - if (typeof version4 === "string") { + if (typeof version3 === "string") { try { - version4 = new SemVer(version4, this.options); + version3 = new SemVer(version3, this.options); } catch (er) { return false; } } for (var i2 = 0; i2 < this.set.length; i2++) { - if (testSet(this.set[i2], version4, this.options)) { + if (testSet(this.set[i2], version3, this.options)) { return true; } } return false; }; - function testSet(set, version4, options) { + function testSet(set, version3, options) { for (var i2 = 0; i2 < set.length; i2++) { - if (!set[i2].test(version4)) { + if (!set[i2].test(version3)) { return false; } } - if (version4.prerelease.length && !options.includePrerelease) { + if (version3.prerelease.length && !options.includePrerelease) { for (i2 = 0; i2 < set.length; i2++) { debug(set[i2].semver); if (set[i2].semver === ANY) { @@ -37810,7 +37610,7 @@ var require_semver = __commonJS({ } if (set[i2].semver.prerelease.length > 0) { var allowed = set[i2].semver; - if (allowed.major === version4.major && allowed.minor === version4.minor && allowed.patch === version4.patch) { + if (allowed.major === version3.major && allowed.minor === version3.minor && allowed.patch === version3.patch) { return true; } } @@ -37821,13 +37621,13 @@ var require_semver = __commonJS({ } __name(testSet, "testSet"); exports2.satisfies = satisfies; - function satisfies(version4, range, options) { + function satisfies(version3, range, options) { try { range = new Range(range, options); } catch (er) { return false; } - return range.test(version4); + return range.test(version3); } __name(satisfies, "satisfies"); exports2.maxSatisfying = maxSatisfying; @@ -37926,18 +37726,18 @@ var require_semver = __commonJS({ } __name(validRange, "validRange"); exports2.ltr = ltr; - function ltr(version4, range, options) { - return outside(version4, range, "<", options); + function ltr(version3, range, options) { + return outside(version3, range, "<", options); } __name(ltr, "ltr"); exports2.gtr = gtr; - function gtr(version4, range, options) { - return outside(version4, range, ">", options); + function gtr(version3, range, options) { + return outside(version3, range, ">", options); } __name(gtr, "gtr"); exports2.outside = outside; - function outside(version4, range, hilo, options) { - version4 = new SemVer(version4, options); + function outside(version3, range, hilo, options) { + version3 = new SemVer(version3, options); range = new Range(range, options); var gtfn, ltefn, ltfn, comp, ecomp; switch (hilo) { @@ -37958,7 +37758,7 @@ var require_semver = __commonJS({ default: throw new TypeError('Must provide a hilo val of "<" or ">"'); } - if (satisfies(version4, range, options)) { + if (satisfies(version3, range, options)) { return false; } for (var i2 = 0; i2 < range.set.length; ++i2) { @@ -37980,9 +37780,9 @@ var require_semver = __commonJS({ if (high.operator === comp || high.operator === ecomp) { return false; } - if ((!low.operator || low.operator === comp) && ltefn(version4, low.semver)) { + if ((!low.operator || low.operator === comp) && ltefn(version3, low.semver)) { return false; - } else if (low.operator === ecomp && ltfn(version4, low.semver)) { + } else if (low.operator === ecomp && ltfn(version3, low.semver)) { return false; } } @@ -37990,8 +37790,8 @@ var require_semver = __commonJS({ } __name(outside, "outside"); exports2.prerelease = prerelease; - function prerelease(version4, options) { - var parsed = parse3(version4, options); + function prerelease(version3, options) { + var parsed = parse2(version3, options); return parsed && parsed.prerelease.length ? parsed.prerelease : null; } __name(prerelease, "prerelease"); @@ -38003,23 +37803,23 @@ var require_semver = __commonJS({ } __name(intersects, "intersects"); exports2.coerce = coerce; - function coerce(version4, options) { - if (version4 instanceof SemVer) { - return version4; + function coerce(version3, options) { + if (version3 instanceof SemVer) { + return version3; } - if (typeof version4 === "number") { - version4 = String(version4); + if (typeof version3 === "number") { + version3 = String(version3); } - if (typeof version4 !== "string") { + if (typeof version3 !== "string") { return null; } options = options || {}; var match = null; if (!options.rtl) { - match = version4.match(safeRe[t.COERCE]); + match = version3.match(safeRe[t.COERCE]); } else { var next; - while ((next = safeRe[t.COERCERTL].exec(version4)) && (!match || match.index + match[0].length !== version4.length)) { + while ((next = safeRe[t.COERCERTL].exec(version3)) && (!match || match.index + match[0].length !== version3.length)) { if (!match || next.index + next[0].length !== match.index + match[0].length) { match = next; } @@ -38030,175 +37830,18 @@ var require_semver = __commonJS({ if (match === null) { return null; } - return parse3(match[2] + "." + (match[3] || "0") + "." + (match[4] || "0"), options); + return parse2(match[2] + "." + (match[3] || "0") + "." + (match[4] || "0"), options); } __name(coerce, "coerce"); } }); -// ../node_modules/uuid/lib/rng.js -var require_rng = __commonJS({ - "../node_modules/uuid/lib/rng.js"(exports2, module2) { - var crypto7 = require("crypto"); - module2.exports = /* @__PURE__ */ __name(function nodeRNG() { - return crypto7.randomBytes(16); - }, "nodeRNG"); - } -}); - -// ../node_modules/uuid/lib/bytesToUuid.js -var require_bytesToUuid = __commonJS({ - "../node_modules/uuid/lib/bytesToUuid.js"(exports2, module2) { - var byteToHex3 = []; - for (i = 0; i < 256; ++i) { - byteToHex3[i] = (i + 256).toString(16).substr(1); - } - var i; - function bytesToUuid(buf, offset) { - var i2 = offset || 0; - var bth = byteToHex3; - return [ - bth[buf[i2++]], - bth[buf[i2++]], - bth[buf[i2++]], - bth[buf[i2++]], - "-", - bth[buf[i2++]], - bth[buf[i2++]], - "-", - bth[buf[i2++]], - bth[buf[i2++]], - "-", - bth[buf[i2++]], - bth[buf[i2++]], - "-", - bth[buf[i2++]], - bth[buf[i2++]], - bth[buf[i2++]], - bth[buf[i2++]], - bth[buf[i2++]], - bth[buf[i2++]] - ].join(""); - } - __name(bytesToUuid, "bytesToUuid"); - module2.exports = bytesToUuid; - } -}); - -// ../node_modules/uuid/v1.js -var require_v1 = __commonJS({ - "../node_modules/uuid/v1.js"(exports2, module2) { - var rng3 = require_rng(); - var bytesToUuid = require_bytesToUuid(); - var _nodeId3; - var _clockseq3; - var _lastMSecs3 = 0; - var _lastNSecs3 = 0; - function v13(options, buf, offset) { - var i = buf && offset || 0; - var b = buf || []; - options = options || {}; - var node = options.node || _nodeId3; - var clockseq = options.clockseq !== void 0 ? options.clockseq : _clockseq3; - if (node == null || clockseq == null) { - var seedBytes = rng3(); - if (node == null) { - node = _nodeId3 = [ - seedBytes[0] | 1, - seedBytes[1], - seedBytes[2], - seedBytes[3], - seedBytes[4], - seedBytes[5] - ]; - } - if (clockseq == null) { - clockseq = _clockseq3 = (seedBytes[6] << 8 | seedBytes[7]) & 16383; - } - } - var msecs = options.msecs !== void 0 ? options.msecs : (/* @__PURE__ */ new Date()).getTime(); - var nsecs = options.nsecs !== void 0 ? options.nsecs : _lastNSecs3 + 1; - var dt = msecs - _lastMSecs3 + (nsecs - _lastNSecs3) / 1e4; - if (dt < 0 && options.clockseq === void 0) { - clockseq = clockseq + 1 & 16383; - } - if ((dt < 0 || msecs > _lastMSecs3) && options.nsecs === void 0) { - nsecs = 0; - } - if (nsecs >= 1e4) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } - _lastMSecs3 = msecs; - _lastNSecs3 = nsecs; - _clockseq3 = clockseq; - msecs += 122192928e5; - var tl = ((msecs & 268435455) * 1e4 + nsecs) % 4294967296; - b[i++] = tl >>> 24 & 255; - b[i++] = tl >>> 16 & 255; - b[i++] = tl >>> 8 & 255; - b[i++] = tl & 255; - var tmh = msecs / 4294967296 * 1e4 & 268435455; - b[i++] = tmh >>> 8 & 255; - b[i++] = tmh & 255; - b[i++] = tmh >>> 24 & 15 | 16; - b[i++] = tmh >>> 16 & 255; - b[i++] = clockseq >>> 8 | 128; - b[i++] = clockseq & 255; - for (var n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } - return buf ? buf : bytesToUuid(b); - } - __name(v13, "v1"); - module2.exports = v13; - } -}); - -// ../node_modules/uuid/v4.js -var require_v4 = __commonJS({ - "../node_modules/uuid/v4.js"(exports2, module2) { - var rng3 = require_rng(); - var bytesToUuid = require_bytesToUuid(); - function v43(options, buf, offset) { - var i = buf && offset || 0; - if (typeof options == "string") { - buf = options === "binary" ? new Array(16) : null; - options = null; - } - options = options || {}; - var rnds = options.random || (options.rng || rng3)(); - rnds[6] = rnds[6] & 15 | 64; - rnds[8] = rnds[8] & 63 | 128; - if (buf) { - for (var ii = 0; ii < 16; ++ii) { - buf[i + ii] = rnds[ii]; - } - } - return buf || bytesToUuid(rnds); - } - __name(v43, "v4"); - module2.exports = v43; - } -}); - -// ../node_modules/uuid/index.js -var require_uuid = __commonJS({ - "../node_modules/uuid/index.js"(exports2, module2) { - var v13 = require_v1(); - var v43 = require_v4(); - var uuid = v43; - uuid.v1 = v13; - uuid.v4 = v43; - module2.exports = uuid; - } -}); - // ../node_modules/@actions/cache/lib/internal/constants.js var require_constants7 = __commonJS({ "../node_modules/@actions/cache/lib/internal/constants.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ManifestFilename = exports2.TarFilename = exports2.SystemTarPathOnWindows = exports2.GnuTarPathOnWindows = exports2.SocketTimeout = exports2.DefaultRetryDelay = exports2.DefaultRetryAttempts = exports2.ArchiveToolType = exports2.CompressionMethod = exports2.CacheFilename = void 0; + exports2.CacheFileSizeLimit = exports2.ManifestFilename = exports2.TarFilename = exports2.SystemTarPathOnWindows = exports2.GnuTarPathOnWindows = exports2.SocketTimeout = exports2.DefaultRetryDelay = exports2.DefaultRetryAttempts = exports2.ArchiveToolType = exports2.CompressionMethod = exports2.CacheFilename = void 0; var CacheFilename; (function(CacheFilename2) { CacheFilename2["Gzip"] = "cache.tgz"; @@ -38222,6 +37865,7 @@ var require_constants7 = __commonJS({ exports2.SystemTarPathOnWindows = `${process.env["SYSTEMDRIVE"]}\\Windows\\System32\\tar.exe`; exports2.TarFilename = "cache.tar"; exports2.ManifestFilename = "manifest.txt"; + exports2.CacheFileSizeLimit = 10 * Math.pow(1024, 3); } }); @@ -38309,17 +37953,18 @@ var require_cacheUtils = __commonJS({ __name(settle, "settle"); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isGhes = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; + exports2.getRuntimeToken = exports2.getCacheVersion = exports2.assertDefined = exports2.getGnuTarPathOnWindows = exports2.getCacheFileName = exports2.getCompressionMethod = exports2.unlinkFile = exports2.resolvePaths = exports2.getArchiveFileSizeInBytes = exports2.createTempDirectory = void 0; var core2 = __importStar3(require_core()); var exec = __importStar3(require_exec()); var glob = __importStar3(require_glob()); var io2 = __importStar3(require_io()); + var crypto4 = __importStar3(require("crypto")); var fs2 = __importStar3(require("fs")); var path2 = __importStar3(require("path")); var semver = __importStar3(require_semver()); var util = __importStar3(require("util")); - var uuid_1 = require_uuid(); var constants_1 = require_constants7(); + var versionSalt = "1.0"; function createTempDirectory() { return __awaiter3(this, void 0, void 0, function* () { const IS_WINDOWS = process.platform === "win32"; @@ -38337,7 +37982,7 @@ var require_cacheUtils = __commonJS({ } tempDirectory = path2.join(baseLocation, "actions", "temp"); } - const dest = path2.join(tempDirectory, (0, uuid_1.v4)()); + const dest = path2.join(tempDirectory, crypto4.randomUUID()); yield io2.mkdirP(dest); return dest; }); @@ -38418,8 +38063,8 @@ var require_cacheUtils = __commonJS({ function getCompressionMethod() { return __awaiter3(this, void 0, void 0, function* () { const versionOutput = yield getVersion("zstd", ["--quiet"]); - const version4 = semver.clean(versionOutput); - core2.debug(`zstd version: ${version4}`); + const version3 = semver.clean(versionOutput); + core2.debug(`zstd version: ${version3}`); if (versionOutput === "") { return constants_1.CompressionMethod.Gzip; } else { @@ -38453,109 +38098,122 @@ var require_cacheUtils = __commonJS({ } __name(assertDefined, "assertDefined"); exports2.assertDefined = assertDefined; - function isGhes() { - const ghUrl = new URL(process.env["GITHUB_SERVER_URL"] || "https://github.com"); - const hostname = ghUrl.hostname.trimEnd().toUpperCase(); - const isGitHubHost = hostname === "GITHUB.COM"; - const isGheHost = hostname.endsWith(".GHE.COM") || hostname.endsWith(".GHE.LOCALHOST"); - return !isGitHubHost && !isGheHost; + function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { + const components = paths.slice(); + if (compressionMethod) { + components.push(compressionMethod); + } + if (process.platform === "win32" && !enableCrossOsArchive) { + components.push("windows-only"); + } + components.push(versionSalt); + return crypto4.createHash("sha256").update(components.join("|")).digest("hex"); } - __name(isGhes, "isGhes"); - exports2.isGhes = isGhes; + __name(getCacheVersion, "getCacheVersion"); + exports2.getCacheVersion = getCacheVersion; + function getRuntimeToken() { + const token = process.env["ACTIONS_RUNTIME_TOKEN"]; + if (!token) { + throw new Error("Unable to get the ACTIONS_RUNTIME_TOKEN env variable"); + } + return token; + } + __name(getRuntimeToken, "getRuntimeToken"); + exports2.getRuntimeToken = getRuntimeToken; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/rng.js -function rng2() { - if (poolPtr2 > rnds8Pool2.length - 16) { - import_crypto5.default.randomFillSync(rnds8Pool2); - poolPtr2 = 0; +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + import_crypto2.default.randomFillSync(rnds8Pool); + poolPtr = 0; } - return rnds8Pool2.slice(poolPtr2, poolPtr2 += 16); + return rnds8Pool.slice(poolPtr, poolPtr += 16); } -var import_crypto5, rnds8Pool2, poolPtr2; -var init_rng2 = __esm({ +var import_crypto2, rnds8Pool, poolPtr; +var init_rng = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/rng.js"() { - import_crypto5 = __toESM(require("crypto")); - rnds8Pool2 = new Uint8Array(256); - poolPtr2 = rnds8Pool2.length; - __name(rng2, "rng"); + import_crypto2 = __toESM(require("crypto")); + rnds8Pool = new Uint8Array(256); + poolPtr = rnds8Pool.length; + __name(rng, "rng"); } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/regex.js -var regex_default2; -var init_regex2 = __esm({ +var regex_default; +var init_regex = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/regex.js"() { - regex_default2 = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; + regex_default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/validate.js -function validate2(uuid) { - return typeof uuid === "string" && regex_default2.test(uuid); +function validate(uuid) { + return typeof uuid === "string" && regex_default.test(uuid); } -var validate_default2; -var init_validate2 = __esm({ +var validate_default; +var init_validate = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/validate.js"() { - init_regex2(); - __name(validate2, "validate"); - validate_default2 = validate2; + init_regex(); + __name(validate, "validate"); + validate_default = validate; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/stringify.js -function stringify2(arr, offset = 0) { - const uuid = (byteToHex2[arr[offset + 0]] + byteToHex2[arr[offset + 1]] + byteToHex2[arr[offset + 2]] + byteToHex2[arr[offset + 3]] + "-" + byteToHex2[arr[offset + 4]] + byteToHex2[arr[offset + 5]] + "-" + byteToHex2[arr[offset + 6]] + byteToHex2[arr[offset + 7]] + "-" + byteToHex2[arr[offset + 8]] + byteToHex2[arr[offset + 9]] + "-" + byteToHex2[arr[offset + 10]] + byteToHex2[arr[offset + 11]] + byteToHex2[arr[offset + 12]] + byteToHex2[arr[offset + 13]] + byteToHex2[arr[offset + 14]] + byteToHex2[arr[offset + 15]]).toLowerCase(); - if (!validate_default2(uuid)) { +function stringify(arr, offset = 0) { + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + "-" + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + "-" + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + "-" + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + "-" + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); + if (!validate_default(uuid)) { throw TypeError("Stringified UUID is invalid"); } return uuid; } -var byteToHex2, stringify_default2; -var init_stringify2 = __esm({ +var byteToHex, stringify_default; +var init_stringify = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/stringify.js"() { - init_validate2(); - byteToHex2 = []; + init_validate(); + byteToHex = []; for (let i = 0; i < 256; ++i) { - byteToHex2.push((i + 256).toString(16).substr(1)); + byteToHex.push((i + 256).toString(16).substr(1)); } - __name(stringify2, "stringify"); - stringify_default2 = stringify2; + __name(stringify, "stringify"); + stringify_default = stringify; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v1.js -function v12(options, buf, offset) { +function v1(options, buf, offset) { let i = buf && offset || 0; const b = buf || new Array(16); options = options || {}; - let node = options.node || _nodeId2; - let clockseq = options.clockseq !== void 0 ? options.clockseq : _clockseq2; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== void 0 ? options.clockseq : _clockseq; if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || rng2)(); + const seedBytes = options.random || (options.rng || rng)(); if (node == null) { - node = _nodeId2 = [seedBytes[0] | 1, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + node = _nodeId = [seedBytes[0] | 1, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; } if (clockseq == null) { - clockseq = _clockseq2 = (seedBytes[6] << 8 | seedBytes[7]) & 16383; + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 16383; } } let msecs = options.msecs !== void 0 ? options.msecs : Date.now(); - let nsecs = options.nsecs !== void 0 ? options.nsecs : _lastNSecs2 + 1; - const dt = msecs - _lastMSecs2 + (nsecs - _lastNSecs2) / 1e4; + let nsecs = options.nsecs !== void 0 ? options.nsecs : _lastNSecs + 1; + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 1e4; if (dt < 0 && options.clockseq === void 0) { clockseq = clockseq + 1 & 16383; } - if ((dt < 0 || msecs > _lastMSecs2) && options.nsecs === void 0) { + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === void 0) { nsecs = 0; } if (nsecs >= 1e4) { throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); } - _lastMSecs2 = msecs; - _lastNSecs2 = nsecs; - _clockseq2 = clockseq; + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; msecs += 122192928e5; const tl = ((msecs & 268435455) * 1e4 + nsecs) % 4294967296; b[i++] = tl >>> 24 & 255; @@ -38572,23 +38230,23 @@ function v12(options, buf, offset) { for (let n = 0; n < 6; ++n) { b[i + n] = node[n]; } - return buf || stringify_default2(b); + return buf || stringify_default(b); } -var _nodeId2, _clockseq2, _lastMSecs2, _lastNSecs2, v1_default2; -var init_v12 = __esm({ +var _nodeId, _clockseq, _lastMSecs, _lastNSecs, v1_default; +var init_v1 = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v1.js"() { - init_rng2(); - init_stringify2(); - _lastMSecs2 = 0; - _lastNSecs2 = 0; - __name(v12, "v1"); - v1_default2 = v12; + init_rng(); + init_stringify(); + _lastMSecs = 0; + _lastNSecs = 0; + __name(v1, "v1"); + v1_default = v1; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/parse.js -function parse2(uuid) { - if (!validate_default2(uuid)) { +function parse(uuid) { + if (!validate_default(uuid)) { throw TypeError("Invalid UUID"); } let v; @@ -38611,17 +38269,17 @@ function parse2(uuid) { arr[15] = v & 255; return arr; } -var parse_default2; -var init_parse2 = __esm({ +var parse_default; +var init_parse = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/parse.js"() { - init_validate2(); - __name(parse2, "parse"); - parse_default2 = parse2; + init_validate(); + __name(parse, "parse"); + parse_default = parse; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v35.js -function stringToBytes2(str) { +function stringToBytes(str) { str = unescape(encodeURIComponent(str)); const bytes = []; for (let i = 0; i < str.length; ++i) { @@ -38629,13 +38287,13 @@ function stringToBytes2(str) { } return bytes; } -function v35_default2(name, version4, hashfunc) { +function v35_default(name, version3, hashfunc) { function generateUUID(value, namespace, buf, offset) { if (typeof value === "string") { - value = stringToBytes2(value); + value = stringToBytes(value); } if (typeof namespace === "string") { - namespace = parse_default2(namespace); + namespace = parse_default(namespace); } if (namespace.length !== 16) { throw TypeError("Namespace must be array-like (16 iterable integer values, 0-255)"); @@ -38644,7 +38302,7 @@ function v35_default2(name, version4, hashfunc) { bytes.set(namespace); bytes.set(value, namespace.length); bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 15 | version4; + bytes[6] = bytes[6] & 15 | version3; bytes[8] = bytes[8] & 63 | 128; if (buf) { offset = offset || 0; @@ -38653,62 +38311,62 @@ function v35_default2(name, version4, hashfunc) { } return buf; } - return stringify_default2(bytes); + return stringify_default(bytes); } __name(generateUUID, "generateUUID"); try { generateUUID.name = name; } catch (err) { } - generateUUID.DNS = DNS2; - generateUUID.URL = URL3; + generateUUID.DNS = DNS; + generateUUID.URL = URL2; return generateUUID; } -var DNS2, URL3; -var init_v352 = __esm({ +var DNS, URL2; +var init_v35 = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v35.js"() { - init_stringify2(); - init_parse2(); - __name(stringToBytes2, "stringToBytes"); - DNS2 = "6ba7b810-9dad-11d1-80b4-00c04fd430c8"; - URL3 = "6ba7b811-9dad-11d1-80b4-00c04fd430c8"; - __name(v35_default2, "default"); + init_stringify(); + init_parse(); + __name(stringToBytes, "stringToBytes"); + DNS = "6ba7b810-9dad-11d1-80b4-00c04fd430c8"; + URL2 = "6ba7b811-9dad-11d1-80b4-00c04fd430c8"; + __name(v35_default, "default"); } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/md5.js -function md52(bytes) { +function md5(bytes) { if (Array.isArray(bytes)) { bytes = Buffer.from(bytes); } else if (typeof bytes === "string") { bytes = Buffer.from(bytes, "utf8"); } - return import_crypto6.default.createHash("md5").update(bytes).digest(); + return import_crypto3.default.createHash("md5").update(bytes).digest(); } -var import_crypto6, md5_default2; -var init_md52 = __esm({ +var import_crypto3, md5_default; +var init_md5 = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/md5.js"() { - import_crypto6 = __toESM(require("crypto")); - __name(md52, "md5"); - md5_default2 = md52; + import_crypto3 = __toESM(require("crypto")); + __name(md5, "md5"); + md5_default = md5; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v3.js -var v32, v3_default2; -var init_v32 = __esm({ +var v3, v3_default; +var init_v3 = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v3.js"() { - init_v352(); - init_md52(); - v32 = v35_default2("v3", 48, md5_default2); - v3_default2 = v32; + init_v35(); + init_md5(); + v3 = v35_default("v3", 48, md5_default); + v3_default = v3; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v4.js -function v42(options, buf, offset) { +function v4(options, buf, offset) { options = options || {}; - const rnds = options.random || (options.rng || rng2)(); + const rnds = options.random || (options.rng || rng)(); rnds[6] = rnds[6] & 15 | 64; rnds[8] = rnds[8] & 63 | 128; if (buf) { @@ -38718,95 +38376,95 @@ function v42(options, buf, offset) { } return buf; } - return stringify_default2(rnds); + return stringify_default(rnds); } -var v4_default2; -var init_v42 = __esm({ +var v4_default; +var init_v4 = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v4.js"() { - init_rng2(); - init_stringify2(); - __name(v42, "v4"); - v4_default2 = v42; + init_rng(); + init_stringify(); + __name(v4, "v4"); + v4_default = v4; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/sha1.js -function sha12(bytes) { +function sha1(bytes) { if (Array.isArray(bytes)) { bytes = Buffer.from(bytes); } else if (typeof bytes === "string") { bytes = Buffer.from(bytes, "utf8"); } - return import_crypto7.default.createHash("sha1").update(bytes).digest(); + return import_crypto4.default.createHash("sha1").update(bytes).digest(); } -var import_crypto7, sha1_default2; -var init_sha12 = __esm({ +var import_crypto4, sha1_default; +var init_sha1 = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/sha1.js"() { - import_crypto7 = __toESM(require("crypto")); - __name(sha12, "sha1"); - sha1_default2 = sha12; + import_crypto4 = __toESM(require("crypto")); + __name(sha1, "sha1"); + sha1_default = sha1; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v5.js -var v52, v5_default2; -var init_v52 = __esm({ +var v5, v5_default; +var init_v5 = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/v5.js"() { - init_v352(); - init_sha12(); - v52 = v35_default2("v5", 80, sha1_default2); - v5_default2 = v52; + init_v35(); + init_sha1(); + v5 = v35_default("v5", 80, sha1_default); + v5_default = v5; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/nil.js -var nil_default2; -var init_nil2 = __esm({ +var nil_default; +var init_nil = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/nil.js"() { - nil_default2 = "00000000-0000-0000-0000-000000000000"; + nil_default = "00000000-0000-0000-0000-000000000000"; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/version.js -function version3(uuid) { - if (!validate_default2(uuid)) { +function version2(uuid) { + if (!validate_default(uuid)) { throw TypeError("Invalid UUID"); } return parseInt(uuid.substr(14, 1), 16); } -var version_default2; -var init_version2 = __esm({ +var version_default; +var init_version = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/version.js"() { - init_validate2(); - __name(version3, "version"); - version_default2 = version3; + init_validate(); + __name(version2, "version"); + version_default = version2; } }); // ../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/index.js -var esm_node_exports2 = {}; -__export(esm_node_exports2, { - NIL: () => nil_default2, - parse: () => parse_default2, - stringify: () => stringify_default2, - v1: () => v1_default2, - v3: () => v3_default2, - v4: () => v4_default2, - v5: () => v5_default2, - validate: () => validate_default2, - version: () => version_default2 -}); -var init_esm_node2 = __esm({ +var esm_node_exports = {}; +__export(esm_node_exports, { + NIL: () => nil_default, + parse: () => parse_default, + stringify: () => stringify_default, + v1: () => v1_default, + v3: () => v3_default, + v4: () => v4_default, + v5: () => v5_default, + validate: () => validate_default, + version: () => version_default +}); +var init_esm_node = __esm({ "../node_modules/@azure/core-http/node_modules/uuid/dist/esm-node/index.js"() { - init_v12(); - init_v32(); - init_v42(); - init_v52(); - init_nil2(); - init_version2(); - init_validate2(); - init_stringify2(); - init_parse2(); + init_v1(); + init_v3(); + init_v4(); + init_v5(); + init_nil(); + init_version(); + init_validate(); + init_stringify(); + init_parse(); } }); @@ -39497,7 +39155,7 @@ var require_XMLDOMImplementation = __commonJS({ function XMLDOMImplementation2() { } __name(XMLDOMImplementation2, "XMLDOMImplementation"); - XMLDOMImplementation2.prototype.hasFeature = function(feature, version4) { + XMLDOMImplementation2.prototype.hasFeature = function(feature, version3) { return true; }; XMLDOMImplementation2.prototype.createDocumentType = function(qualifiedName, publicId, systemId) { @@ -39509,7 +39167,7 @@ var require_XMLDOMImplementation = __commonJS({ XMLDOMImplementation2.prototype.createHTMLDocument = function(title) { throw new Error("This DOM method is not implemented."); }; - XMLDOMImplementation2.prototype.getFeature = function(feature, version4) { + XMLDOMImplementation2.prototype.getFeature = function(feature, version3) { throw new Error("This DOM method is not implemented."); }; return XMLDOMImplementation2; @@ -40254,17 +39912,17 @@ var require_XMLDeclaration = __commonJS({ NodeType = require_NodeType(); module2.exports = XMLDeclaration = function(superClass) { extend(XMLDeclaration2, superClass); - function XMLDeclaration2(parent, version4, encoding, standalone) { + function XMLDeclaration2(parent, version3, encoding, standalone) { var ref; XMLDeclaration2.__super__.constructor.call(this, parent); - if (isObject(version4)) { - ref = version4, version4 = ref.version, encoding = ref.encoding, standalone = ref.standalone; + if (isObject(version3)) { + ref = version3, version3 = ref.version, encoding = ref.encoding, standalone = ref.standalone; } - if (!version4) { - version4 = "1.0"; + if (!version3) { + version3 = "1.0"; } this.type = NodeType.Declaration; - this.version = this.stringify.xmlVersion(version4); + this.version = this.stringify.xmlVersion(version3); if (encoding != null) { this.encoding = this.stringify.xmlEncoding(encoding); } @@ -41340,10 +40998,10 @@ var require_XMLNode = __commonJS({ Array.prototype.push.apply(this.parent.children, removed); return this; }; - XMLNode2.prototype.declaration = function(version4, encoding, standalone) { + XMLNode2.prototype.declaration = function(version3, encoding, standalone) { var doc, xmldec; doc = this.document(); - xmldec = new XMLDeclaration(doc, version4, encoding, standalone); + xmldec = new XMLDeclaration(doc, version3, encoding, standalone); if (doc.children.length === 0) { doc.children.unshift(xmldec); } else if (doc.children[0].type === NodeType.Declaration) { @@ -41467,8 +41125,8 @@ var require_XMLNode = __commonJS({ XMLNode2.prototype.doc = function() { return this.document(); }; - XMLNode2.prototype.dec = function(version4, encoding, standalone) { - return this.declaration(version4, encoding, standalone); + XMLNode2.prototype.dec = function(version3, encoding, standalone) { + return this.declaration(version3, encoding, standalone); }; XMLNode2.prototype.e = function(name, attributes, text) { return this.element(name, attributes, text); @@ -41515,7 +41173,7 @@ var require_XMLNode = __commonJS({ XMLNode2.prototype.normalize = function() { throw new Error("This DOM method is not implemented." + this.debugInfo()); }; - XMLNode2.prototype.isSupported = function(feature, version4) { + XMLNode2.prototype.isSupported = function(feature, version3) { return true; }; XMLNode2.prototype.hasAttributes = function() { @@ -41571,7 +41229,7 @@ var require_XMLNode = __commonJS({ } return true; }; - XMLNode2.prototype.getFeature = function(feature, version4) { + XMLNode2.prototype.getFeature = function(feature, version3) { throw new Error("This DOM method is not implemented." + this.debugInfo()); }; XMLNode2.prototype.setUserData = function(key, data, handler) { @@ -42775,13 +42433,13 @@ var require_XMLDocumentCB = __commonJS({ } return this; }; - XMLDocumentCB2.prototype.declaration = function(version4, encoding, standalone) { + XMLDocumentCB2.prototype.declaration = function(version3, encoding, standalone) { var node; this.openCurrent(); if (this.documentStarted) { throw new Error("declaration() must be the first node."); } - node = new XMLDeclaration(this, version4, encoding, standalone); + node = new XMLDeclaration(this, version3, encoding, standalone); this.onData(this.writer.declaration(node, this.writerOptions, this.currentLevel + 1), this.currentLevel + 1); return this; }; @@ -42952,8 +42610,8 @@ var require_XMLDocumentCB = __commonJS({ XMLDocumentCB2.prototype.ins = function(target, value) { return this.instruction(target, value); }; - XMLDocumentCB2.prototype.dec = function(version4, encoding, standalone) { - return this.declaration(version4, encoding, standalone); + XMLDocumentCB2.prototype.dec = function(version3, encoding, standalone) { + return this.declaration(version3, encoding, standalone); }; XMLDocumentCB2.prototype.dtd = function(root, pubID, sysID) { return this.doctype(root, pubID, sysID); @@ -57354,8 +57012,8 @@ var require_URL = __commonJS({ var utils = require_utils6(); var Impl = require_URL_impl(); var impl = utils.implSymbol; - function URL4(url) { - if (!this || this[impl] || !(this instanceof URL4)) { + function URL3(url) { + if (!this || this[impl] || !(this instanceof URL3)) { throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function."); } if (arguments.length < 1) { @@ -57371,8 +57029,8 @@ var require_URL = __commonJS({ } module2.exports.setup(this, args); } - __name(URL4, "URL"); - URL4.prototype.toJSON = /* @__PURE__ */ __name(function toJSON() { + __name(URL3, "URL"); + URL3.prototype.toJSON = /* @__PURE__ */ __name(function toJSON() { if (!this || !module2.exports.is(this)) { throw new TypeError("Illegal invocation"); } @@ -57382,7 +57040,7 @@ var require_URL = __commonJS({ } return this[impl].toJSON.apply(this[impl], args); }, "toJSON"); - Object.defineProperty(URL4.prototype, "href", { + Object.defineProperty(URL3.prototype, "href", { get() { return this[impl].href; }, @@ -57393,20 +57051,20 @@ var require_URL = __commonJS({ enumerable: true, configurable: true }); - URL4.prototype.toString = function() { + URL3.prototype.toString = function() { if (!this || !module2.exports.is(this)) { throw new TypeError("Illegal invocation"); } return this.href; }; - Object.defineProperty(URL4.prototype, "origin", { + Object.defineProperty(URL3.prototype, "origin", { get() { return this[impl].origin; }, enumerable: true, configurable: true }); - Object.defineProperty(URL4.prototype, "protocol", { + Object.defineProperty(URL3.prototype, "protocol", { get() { return this[impl].protocol; }, @@ -57417,7 +57075,7 @@ var require_URL = __commonJS({ enumerable: true, configurable: true }); - Object.defineProperty(URL4.prototype, "username", { + Object.defineProperty(URL3.prototype, "username", { get() { return this[impl].username; }, @@ -57428,7 +57086,7 @@ var require_URL = __commonJS({ enumerable: true, configurable: true }); - Object.defineProperty(URL4.prototype, "password", { + Object.defineProperty(URL3.prototype, "password", { get() { return this[impl].password; }, @@ -57439,7 +57097,7 @@ var require_URL = __commonJS({ enumerable: true, configurable: true }); - Object.defineProperty(URL4.prototype, "host", { + Object.defineProperty(URL3.prototype, "host", { get() { return this[impl].host; }, @@ -57450,7 +57108,7 @@ var require_URL = __commonJS({ enumerable: true, configurable: true }); - Object.defineProperty(URL4.prototype, "hostname", { + Object.defineProperty(URL3.prototype, "hostname", { get() { return this[impl].hostname; }, @@ -57461,7 +57119,7 @@ var require_URL = __commonJS({ enumerable: true, configurable: true }); - Object.defineProperty(URL4.prototype, "port", { + Object.defineProperty(URL3.prototype, "port", { get() { return this[impl].port; }, @@ -57472,7 +57130,7 @@ var require_URL = __commonJS({ enumerable: true, configurable: true }); - Object.defineProperty(URL4.prototype, "pathname", { + Object.defineProperty(URL3.prototype, "pathname", { get() { return this[impl].pathname; }, @@ -57483,7 +57141,7 @@ var require_URL = __commonJS({ enumerable: true, configurable: true }); - Object.defineProperty(URL4.prototype, "search", { + Object.defineProperty(URL3.prototype, "search", { get() { return this[impl].search; }, @@ -57494,7 +57152,7 @@ var require_URL = __commonJS({ enumerable: true, configurable: true }); - Object.defineProperty(URL4.prototype, "hash", { + Object.defineProperty(URL3.prototype, "hash", { get() { return this[impl].hash; }, @@ -57510,7 +57168,7 @@ var require_URL = __commonJS({ return !!obj && obj[impl] instanceof Impl.implementation; }, create(constructorArgs, privateData) { - let obj = Object.create(URL4.prototype); + let obj = Object.create(URL3.prototype); this.setup(obj, constructorArgs, privateData); return obj; }, @@ -57520,10 +57178,10 @@ var require_URL = __commonJS({ obj[impl] = new Impl.implementation(constructorArgs, privateData); obj[impl][utils.wrapperSymbol] = obj; }, - interface: URL4, + interface: URL3, expose: { - Window: { URL: URL4 }, - Worker: { URL: URL4 } + Window: { URL: URL3 }, + Worker: { URL: URL3 } } }; } @@ -58397,12 +58055,12 @@ var require_lib7 = __commonJS({ configurable: true }); var INTERNALS$2 = Symbol("Request internals"); - var URL4 = Url.URL || whatwgUrl.URL; + var URL3 = Url.URL || whatwgUrl.URL; var parse_url = Url.parse; var format_url = Url.format; function parseURL(urlStr) { if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { - urlStr = new URL4(urlStr).toString(); + urlStr = new URL3(urlStr).toString(); } return parse_url(urlStr); } @@ -58847,7 +58505,7 @@ var init_platform = __esm({ // ../node_modules/@opentelemetry/api/build/esm/version.js var VERSION2; -var init_version3 = __esm({ +var init_version2 = __esm({ "../node_modules/@opentelemetry/api/build/esm/version.js"() { VERSION2 = "1.8.0"; } @@ -58922,7 +58580,7 @@ function _makeCompatibilityCheck(ownVersion) { var re, isCompatible; var init_semver = __esm({ "../node_modules/@opentelemetry/api/build/esm/internal/semver.js"() { - init_version3(); + init_version2(); re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; __name(_makeCompatibilityCheck, "_makeCompatibilityCheck"); isCompatible = _makeCompatibilityCheck(VERSION2); @@ -58971,7 +58629,7 @@ var major, GLOBAL_OPENTELEMETRY_API_KEY, _global; var init_global_utils = __esm({ "../node_modules/@opentelemetry/api/build/esm/internal/global-utils.js"() { init_platform(); - init_version3(); + init_version2(); init_semver(); major = VERSION2.split(".")[0]; GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for("opentelemetry.js.api." + major); @@ -59944,10 +59602,10 @@ var init_ProxyTracer = __esm({ NOOP_TRACER = new NoopTracer(); ProxyTracer = /** @class */ function() { - function ProxyTracer2(_provider, name, version4, options) { + function ProxyTracer2(_provider, name, version3, options) { this._provider = _provider; this.name = name; - this.version = version4; + this.version = version3; this.options = options; } __name(ProxyTracer2, "ProxyTracer"); @@ -60004,9 +59662,9 @@ var init_ProxyTracerProvider = __esm({ function ProxyTracerProvider2() { } __name(ProxyTracerProvider2, "ProxyTracerProvider"); - ProxyTracerProvider2.prototype.getTracer = function(name, version4, options) { + ProxyTracerProvider2.prototype.getTracer = function(name, version3, options) { var _a; - return (_a = this.getDelegateTracer(name, version4, options)) !== null && _a !== void 0 ? _a : new ProxyTracer(this, name, version4, options); + return (_a = this.getDelegateTracer(name, version3, options)) !== null && _a !== void 0 ? _a : new ProxyTracer(this, name, version3, options); }; ProxyTracerProvider2.prototype.getDelegate = function() { var _a; @@ -60015,9 +59673,9 @@ var init_ProxyTracerProvider = __esm({ ProxyTracerProvider2.prototype.setDelegate = function(delegate) { this._delegate = delegate; }; - ProxyTracerProvider2.prototype.getDelegateTracer = function(name, version4, options) { + ProxyTracerProvider2.prototype.getDelegateTracer = function(name, version3, options) { var _a; - return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version4, options); + return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version3, options); }; return ProxyTracerProvider2; }(); @@ -60229,8 +59887,8 @@ var init_metrics = __esm({ MetricsAPI2.prototype.getMeterProvider = function() { return getGlobal(API_NAME3) || NOOP_METER_PROVIDER; }; - MetricsAPI2.prototype.getMeter = function(name, version4, options) { - return this.getMeterProvider().getMeter(name, version4, options); + MetricsAPI2.prototype.getMeter = function(name, version3, options) { + return this.getMeterProvider().getMeter(name, version3, options); }; MetricsAPI2.prototype.disable = function() { unregisterGlobal(API_NAME3, DiagAPI.instance()); @@ -60403,8 +60061,8 @@ var init_trace = __esm({ TraceAPI2.prototype.getTracerProvider = function() { return getGlobal(API_NAME5) || this._proxyTracerProvider; }; - TraceAPI2.prototype.getTracer = function(name, version4) { - return this.getTracerProvider().getTracer(name, version4); + TraceAPI2.prototype.getTracer = function(name, version3) { + return this.getTracerProvider().getTracer(name, version3); }; TraceAPI2.prototype.disable = function() { unregisterGlobal(API_NAME5, DiagAPI.instance()); @@ -60523,8 +60181,8 @@ var require_dist2 = __commonJS({ return api.trace.isSpanContextValid(context3); } __name(isSpanContextValid2, "isSpanContextValid"); - function getTracer(name, version4) { - return api.trace.getTracer(name || "azure/core-tracing", version4); + function getTracer(name, version3) { + return api.trace.getTracer(name || "azure/core-tracing", version3); } __name(getTracer, "getTracer"); var context2 = api.context; @@ -60579,8 +60237,8 @@ var require_dist2 = __commonJS({ if (parts.length !== 4) { return; } - const [version4, traceId, spanId, traceOptions] = parts; - if (version4 !== VERSION3) { + const [version3, traceId, spanId, traceOptions] = parts; + if (version3 !== VERSION3) { return; } const traceFlags = parseInt(traceOptions, 16); @@ -60627,7 +60285,7 @@ var require_dist3 = __commonJS({ "../node_modules/@azure/core-http/dist/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - var uuid = (init_esm_node2(), __toCommonJS(esm_node_exports2)); + var uuid = (init_esm_node(), __toCommonJS(esm_node_exports)); var util = require("util"); var tslib = (init_tslib_es6(), __toCommonJS(tslib_es6_exports)); var xml2js = require_xml2js(); @@ -63188,7 +62846,7 @@ var require_dist3 = __commonJS({ includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY }; - return parse3(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { + return parse2(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { if (!shouldDeserializeResponse(parsedResponse)) { return parsedResponse; } @@ -63282,7 +62940,7 @@ var require_dist3 = __commonJS({ return { error, shouldReturnResponse: false }; } __name(handleErrorResponse, "handleErrorResponse"); - function parse3(jsonContentTypes, xmlContentTypes, operationResponse, opts) { + function parse2(jsonContentTypes, xmlContentTypes, operationResponse, opts) { var _a; const errorHandler = /* @__PURE__ */ __name((err) => { const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; @@ -63309,7 +62967,7 @@ var require_dist3 = __commonJS({ } return Promise.resolve(operationResponse); } - __name(parse3, "parse"); + __name(parse2, "parse"); var DefaultKeepAliveOptions = { enable: true }; @@ -66456,7 +66114,7 @@ var require_dist4 = __commonJS({ var logger$1 = require_commonjs3(); var abortController = require_dist(); var os = require("os"); - var crypto7 = require("crypto"); + var crypto4 = require("crypto"); var stream = require("stream"); require_commonjs5(); var coreLro = require_commonjs6(); @@ -74936,7 +74594,7 @@ var require_dist4 = __commonJS({ } } }; - var version4 = { + var version3 = { parameterPath: "version", mapper: { defaultValue: "2023-11-03", @@ -76600,7 +76258,7 @@ var require_dist4 = __commonJS({ headerParameters: [ contentType, accept, - version4, + version3, requestId ], isXML: true, @@ -76628,7 +76286,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1 ], @@ -76655,7 +76313,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1 ], @@ -76685,7 +76343,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1 ], @@ -76715,7 +76373,7 @@ var require_dist4 = __commonJS({ headerParameters: [ contentType, accept, - version4, + version3, requestId ], isXML: true, @@ -76737,7 +76395,7 @@ var require_dist4 = __commonJS({ }, queryParameters: [comp, restype1], urlParameters: [url], - headerParameters: [version4, accept1], + headerParameters: [version3, accept1], isXML: true, serializer: xmlSerializer$5 }; @@ -76763,7 +76421,7 @@ var require_dist4 = __commonJS({ headerParameters: [ contentType, accept, - version4, + version3, requestId, contentLength, multipartContentType @@ -76795,7 +76453,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1 ], @@ -77049,7 +76707,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, metadata, @@ -77075,7 +76733,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId @@ -77098,7 +76756,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -77127,7 +76785,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, metadata, @@ -77168,7 +76826,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId @@ -77198,7 +76856,7 @@ var require_dist4 = __commonJS({ headerParameters: [ contentType, accept, - version4, + version3, requestId, access, leaseId, @@ -77229,7 +76887,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, deletedContainerName, @@ -77257,7 +76915,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, sourceContainerName, @@ -77292,7 +76950,7 @@ var require_dist4 = __commonJS({ headerParameters: [ contentType, accept, - version4, + version3, requestId, contentLength, multipartContentType @@ -77325,7 +76983,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1 ], @@ -77351,7 +77009,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, ifModifiedSince, @@ -77382,7 +77040,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, ifModifiedSince, @@ -77412,7 +77070,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, ifModifiedSince, @@ -77442,7 +77100,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, ifModifiedSince, @@ -77472,7 +77130,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, ifModifiedSince, @@ -77508,7 +77166,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1 ], @@ -77540,7 +77198,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1 ], @@ -77561,7 +77219,7 @@ var require_dist4 = __commonJS({ }, queryParameters: [comp, restype1], urlParameters: [url], - headerParameters: [version4, accept1], + headerParameters: [version3, accept1], isXML: true, serializer: xmlSerializer$4 }; @@ -77907,7 +77565,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -77945,7 +77603,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -77981,7 +77639,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -78010,7 +77668,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp8], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1 ], @@ -78032,7 +77690,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp11], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, expiryOptions, @@ -78056,7 +77714,7 @@ var require_dist4 = __commonJS({ queryParameters: [comp, timeoutInSeconds], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -78090,7 +77748,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp12], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, ifUnmodifiedSince, @@ -78115,7 +77773,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp12], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1 ], @@ -78137,7 +77795,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp13], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, legalHold @@ -78160,7 +77818,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp6], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, metadata, @@ -78193,7 +77851,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, ifModifiedSince, @@ -78223,7 +77881,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, ifModifiedSince, @@ -78252,7 +77910,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, ifModifiedSince, @@ -78281,7 +77939,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, ifModifiedSince, @@ -78311,7 +77969,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, ifModifiedSince, @@ -78340,7 +77998,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp14], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, metadata, @@ -78373,7 +78031,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, metadata, @@ -78415,7 +78073,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, metadata, @@ -78463,7 +78121,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -78495,7 +78153,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -78520,7 +78178,7 @@ var require_dist4 = __commonJS({ }, queryParameters: [comp, restype1], urlParameters: [url], - headerParameters: [version4, accept1], + headerParameters: [version3, accept1], isXML: true, serializer: xmlSerializer$3 }; @@ -78557,7 +78215,7 @@ var require_dist4 = __commonJS({ headerParameters: [ contentType, accept, - version4, + version3, requestId, leaseId, ifModifiedSince, @@ -78595,7 +78253,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -78626,7 +78284,7 @@ var require_dist4 = __commonJS({ headerParameters: [ contentType, accept, - version4, + version3, requestId, leaseId, ifTags, @@ -78805,7 +78463,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, contentLength, @@ -78854,7 +78512,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, contentLength, leaseId, @@ -78895,7 +78553,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, contentLength, @@ -78933,7 +78591,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, contentLength, @@ -78987,7 +78645,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -79024,7 +78682,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -79054,7 +78712,7 @@ var require_dist4 = __commonJS({ queryParameters: [comp, timeoutInSeconds], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -79087,7 +78745,7 @@ var require_dist4 = __commonJS({ queryParameters: [comp, timeoutInSeconds], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -79117,7 +78775,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp21], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, ifModifiedSince, @@ -79223,7 +78881,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, contentLength, @@ -79269,7 +78927,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp22], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, contentLength, leaseId, @@ -79307,7 +78965,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp22], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, contentLength, @@ -79352,7 +79010,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds, comp23], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -79510,7 +79168,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, contentLength, metadata, @@ -79559,7 +79217,7 @@ var require_dist4 = __commonJS({ queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, contentLength, @@ -79618,7 +79276,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, contentLength, leaseId, @@ -79653,7 +79311,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, contentLength, @@ -79693,7 +79351,7 @@ var require_dist4 = __commonJS({ headerParameters: [ contentType, accept, - version4, + version3, requestId, metadata, leaseId, @@ -79746,7 +79404,7 @@ var require_dist4 = __commonJS({ ], urlParameters: [url], headerParameters: [ - version4, + version3, requestId, accept1, leaseId, @@ -81195,7 +80853,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param stringToSign - */ computeHMACSHA256(stringToSign) { - return crypto7.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + return crypto4.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } }; var packageName = "azure-storage-blob"; @@ -81616,7 +81274,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * @param stringToSign - */ computeHMACSHA256(stringToSign) { - return crypto7.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); + return crypto4.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } }; function ipRangeToString(ipRange) { @@ -81632,8 +81290,8 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; static { __name(this, "SASQueryParameters"); } - constructor(version5, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn2, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType2, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope2) { - this.version = version5; + constructor(version4, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn2, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType2, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope2) { + this.version = version4; this.signature = signature; if (permissionsOrOptions !== void 0 && typeof permissionsOrOptions !== "string") { this.permissions = permissionsOrOptions.permissions; @@ -81836,7 +81494,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } }; function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { - const version5 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + const version4 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : void 0; let userDelegationKeyCredential; if (sharedKeyCredential === void 0 && accountName !== void 0) { @@ -81845,25 +81503,25 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (sharedKeyCredential === void 0 && userDelegationKeyCredential === void 0) { throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); } - if (version5 >= "2020-12-06") { + if (version4 >= "2020-12-06") { if (sharedKeyCredential !== void 0) { return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); } else { return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); } } - if (version5 >= "2018-11-09") { + if (version4 >= "2018-11-09") { if (sharedKeyCredential !== void 0) { return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); } else { - if (version5 >= "2020-02-10") { + if (version4 >= "2020-02-10") { return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); } else { return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); } } } - if (version5 >= "2015-04-05") { + if (version4 >= "2015-04-05") { if (sharedKeyCredential !== void 0) { return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); } else { @@ -82166,44 +81824,44 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } __name(getCanonicalName, "getCanonicalName"); function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { - const version5 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; - if (blobSASSignatureValues.snapshotTime && version5 < "2018-11-09") { + const version4 = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; + if (blobSASSignatureValues.snapshotTime && version4 < "2018-11-09") { throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); } if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.snapshotTime) { throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); } - if (blobSASSignatureValues.versionId && version5 < "2019-10-10") { + if (blobSASSignatureValues.versionId && version4 < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); } if (blobSASSignatureValues.blobName === void 0 && blobSASSignatureValues.versionId) { throw RangeError("Must provide 'blobName' when providing 'versionId'."); } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.setImmutabilityPolicy && version5 < "2020-08-04") { + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.setImmutabilityPolicy && version4 < "2020-08-04") { throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.deleteVersion && version5 < "2019-10-10") { + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.deleteVersion && version4 < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.permanentDelete && version5 < "2019-10-10") { + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.permanentDelete && version4 < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); } - if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version5 < "2019-12-12") { + if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version4 < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); } - if (version5 < "2020-02-10" && blobSASSignatureValues.permissions && (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { + if (version4 < "2020-02-10" && blobSASSignatureValues.permissions && (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); } - if (version5 < "2021-04-10" && blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.filterByTags) { + if (version4 < "2021-04-10" && blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.filterByTags) { throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); } - if (version5 < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { + if (version4 < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); } - if (blobSASSignatureValues.encryptionScope && version5 < "2020-12-06") { + if (blobSASSignatureValues.encryptionScope && version4 < "2020-12-06") { throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); } - blobSASSignatureValues.version = version5; + blobSASSignatureValues.version = version4; return blobSASSignatureValues; } __name(SASSignatureValuesSanityCheckAndAutofill, "SASSignatureValuesSanityCheckAndAutofill"); @@ -83822,757 +83480,1912 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; return this.originalResponse.lastModified; } /** - * A name-value pair - * to associate with a file storage object. + * A name-value pair + * to associate with a file storage object. + * + * @readonly + */ + get metadata() { + return this.originalResponse.metadata; + } + /** + * This header uniquely identifies the request + * that was made and can be used for troubleshooting the request. + * + * @readonly + */ + get requestId() { + return this.originalResponse.requestId; + } + /** + * If a client request id header is sent in the request, this header will be present in the + * response with the same value. + * + * @readonly + */ + get clientRequestId() { + return this.originalResponse.clientRequestId; + } + /** + * Indicates the version of the File service used + * to execute the request. + * + * @readonly + */ + get version() { + return this.originalResponse.version; + } + /** + * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned + * when the blob was encrypted with a customer-provided key. + * + * @readonly + */ + get encryptionKeySha256() { + return this.originalResponse.encryptionKeySha256; + } + /** + * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to + * true, then the request returns a crc64 for the range, as long as the range size is less than + * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is + * specified in the same request, it will fail with 400(Bad Request) + */ + get contentCrc64() { + return this.originalResponse.contentCrc64; + } + /** + * The response body as a browser Blob. + * Always undefined in node.js. + * + * @readonly + */ + get blobBody() { + return void 0; + } + /** + * The response body as a node.js Readable stream. + * Always undefined in the browser. + * + * It will parse avor data returned by blob query. + * + * @readonly + */ + get readableStreamBody() { + return coreHttp.isNode ? this.blobDownloadStream : void 0; + } + /** + * The HTTP response. + */ + get _response() { + return this.originalResponse._response; + } + }; + exports2.BlockBlobTier = void 0; + (function(BlockBlobTier) { + BlockBlobTier["Hot"] = "Hot"; + BlockBlobTier["Cool"] = "Cool"; + BlockBlobTier["Cold"] = "Cold"; + BlockBlobTier["Archive"] = "Archive"; + })(exports2.BlockBlobTier || (exports2.BlockBlobTier = {})); + exports2.PremiumPageBlobTier = void 0; + (function(PremiumPageBlobTier) { + PremiumPageBlobTier["P4"] = "P4"; + PremiumPageBlobTier["P6"] = "P6"; + PremiumPageBlobTier["P10"] = "P10"; + PremiumPageBlobTier["P15"] = "P15"; + PremiumPageBlobTier["P20"] = "P20"; + PremiumPageBlobTier["P30"] = "P30"; + PremiumPageBlobTier["P40"] = "P40"; + PremiumPageBlobTier["P50"] = "P50"; + PremiumPageBlobTier["P60"] = "P60"; + PremiumPageBlobTier["P70"] = "P70"; + PremiumPageBlobTier["P80"] = "P80"; + })(exports2.PremiumPageBlobTier || (exports2.PremiumPageBlobTier = {})); + function toAccessTier(tier2) { + if (tier2 === void 0) { + return void 0; + } + return tier2; + } + __name(toAccessTier, "toAccessTier"); + function ensureCpkIfSpecified(cpk, isHttps) { + if (cpk && !isHttps) { + throw new RangeError("Customer-provided encryption key must be used over HTTPS."); + } + if (cpk && !cpk.encryptionAlgorithm) { + cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; + } + } + __name(ensureCpkIfSpecified, "ensureCpkIfSpecified"); + exports2.StorageBlobAudience = void 0; + (function(StorageBlobAudience) { + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; + })(exports2.StorageBlobAudience || (exports2.StorageBlobAudience = {})); + function getBlobServiceAccountAudience(storageAccountName) { + return `https://${storageAccountName}.blob.core.windows.net/.default`; + } + __name(getBlobServiceAccountAudience, "getBlobServiceAccountAudience"); + function rangeResponseFromModel(response) { + const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start + })); + const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ + offset: x.start, + count: x.end - x.start + })); + return Object.assign(Object.assign({}, response), { + pageRange, + clearRange, + _response: Object.assign(Object.assign({}, response._response), { parsedBody: { + pageRange, + clearRange + } }) + }); + } + __name(rangeResponseFromModel, "rangeResponseFromModel"); + var BlobBeginCopyFromUrlPoller = class extends coreLro.Poller { + static { + __name(this, "BlobBeginCopyFromUrlPoller"); + } + constructor(options) { + const { blobClient, copySource: copySource2, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; + let state; + if (resumeFrom) { + state = JSON.parse(resumeFrom).state; + } + const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { + blobClient, + copySource: copySource2, + startCopyFromURLOptions + })); + super(operation); + if (typeof onProgress === "function") { + this.onProgress(onProgress); + } + this.intervalInMs = intervalInMs; + } + delay() { + return coreHttp.delay(this.intervalInMs); + } + }; + var cancel = /* @__PURE__ */ __name(async function cancel2(options = {}) { + const state = this.state; + const { copyId: copyId2 } = state; + if (state.isCompleted) { + return makeBlobBeginCopyFromURLPollOperation(state); + } + if (!copyId2) { + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + } + await state.blobClient.abortCopyFromURL(copyId2, { + abortSignal: options.abortSignal + }); + state.isCancelled = true; + return makeBlobBeginCopyFromURLPollOperation(state); + }, "cancel"); + var update = /* @__PURE__ */ __name(async function update2(options = {}) { + const state = this.state; + const { blobClient, copySource: copySource2, startCopyFromURLOptions } = state; + if (!state.isStarted) { + state.isStarted = true; + const result = await blobClient.startCopyFromURL(copySource2, startCopyFromURLOptions); + state.copyId = result.copyId; + if (result.copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } + } else if (!state.isCompleted) { + try { + const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); + const { copyStatus, copyProgress } = result; + const prevCopyProgress = state.copyProgress; + if (copyProgress) { + state.copyProgress = copyProgress; + } + if (copyStatus === "pending" && copyProgress !== prevCopyProgress && typeof options.fireProgress === "function") { + options.fireProgress(state); + } else if (copyStatus === "success") { + state.result = result; + state.isCompleted = true; + } else if (copyStatus === "failed") { + state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); + state.isCompleted = true; + } + } catch (err) { + state.error = err; + state.isCompleted = true; + } + } + return makeBlobBeginCopyFromURLPollOperation(state); + }, "update"); + var toString = /* @__PURE__ */ __name(function toString2() { + return JSON.stringify({ state: this.state }, (key, value) => { + if (key === "blobClient") { + return void 0; + } + return value; + }); + }, "toString"); + function makeBlobBeginCopyFromURLPollOperation(state) { + return { + state: Object.assign({}, state), + cancel, + toString, + update + }; + } + __name(makeBlobBeginCopyFromURLPollOperation, "makeBlobBeginCopyFromURLPollOperation"); + function rangeToString(iRange) { + if (iRange.offset < 0) { + throw new RangeError(`Range.offset cannot be smaller than 0.`); + } + if (iRange.count && iRange.count <= 0) { + throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); + } + return iRange.count ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` : `bytes=${iRange.offset}-`; + } + __name(rangeToString, "rangeToString"); + var BatchStates; + (function(BatchStates2) { + BatchStates2[BatchStates2["Good"] = 0] = "Good"; + BatchStates2[BatchStates2["Error"] = 1] = "Error"; + })(BatchStates || (BatchStates = {})); + var Batch = class { + static { + __name(this, "Batch"); + } + /** + * Creates an instance of Batch. + * @param concurrency - + */ + constructor(concurrency = 5) { + this.actives = 0; + this.completed = 0; + this.offset = 0; + this.operations = []; + this.state = BatchStates.Good; + if (concurrency < 1) { + throw new RangeError("concurrency must be larger than 0"); + } + this.concurrency = concurrency; + this.emitter = new events.EventEmitter(); + } + /** + * Add a operation into queue. + * + * @param operation - + */ + addOperation(operation) { + this.operations.push(async () => { + try { + this.actives++; + await operation(); + this.actives--; + this.completed++; + this.parallelExecute(); + } catch (error) { + this.emitter.emit("error", error); + } + }); + } + /** + * Start execute operations in the queue. + * + */ + async do() { + if (this.operations.length === 0) { + return Promise.resolve(); + } + this.parallelExecute(); + return new Promise((resolve, reject) => { + this.emitter.on("finish", resolve); + this.emitter.on("error", (error) => { + this.state = BatchStates.Error; + reject(error); + }); + }); + } + /** + * Get next operation to be executed. Return null when reaching ends. + * + */ + nextOperation() { + if (this.offset < this.operations.length) { + return this.operations[this.offset++]; + } + return null; + } + /** + * Start execute operations. One one the most important difference between + * this method with do() is that do() wraps as an sync method. + * + */ + parallelExecute() { + if (this.state === BatchStates.Error) { + return; + } + if (this.completed >= this.operations.length) { + this.emitter.emit("finish"); + return; + } + while (this.actives < this.concurrency) { + const operation = this.nextOperation(); + if (operation) { + operation(); + } else { + return; + } + } + } + }; + var BuffersStream = class extends stream.Readable { + static { + __name(this, "BuffersStream"); + } + /** + * Creates an instance of BuffersStream that will emit the data + * contained in the array of buffers. + * + * @param buffers - Array of buffers containing the data + * @param byteLength - The total length of data contained in the buffers + */ + constructor(buffers, byteLength, options) { + super(options); + this.buffers = buffers; + this.byteLength = byteLength; + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex = 0; + this.pushedBytesLength = 0; + let buffersLength = 0; + for (const buf of this.buffers) { + buffersLength += buf.byteLength; + } + if (buffersLength < this.byteLength) { + throw new Error("Data size shouldn't be larger than the total length of buffers."); + } + } + /** + * Internal _read() that will be called when the stream wants to pull more data in. + * + * @param size - Optional. The size of data to be read + */ + _read(size) { + if (this.pushedBytesLength >= this.byteLength) { + this.push(null); + } + if (!size) { + size = this.readableHighWaterMark; + } + const outBuffers = []; + let i = 0; + while (i < size && this.pushedBytesLength < this.byteLength) { + const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; + const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; + const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); + if (remaining > size - i) { + const end = this.byteOffsetInCurrentBuffer + size - i; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + this.pushedBytesLength += size - i; + this.byteOffsetInCurrentBuffer = end; + i = size; + break; + } else { + const end = this.byteOffsetInCurrentBuffer + remaining; + outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); + if (remaining === remainingCapacityInThisBuffer) { + this.byteOffsetInCurrentBuffer = 0; + this.bufferIndex++; + } else { + this.byteOffsetInCurrentBuffer = end; + } + this.pushedBytesLength += remaining; + i += remaining; + } + } + if (outBuffers.length > 1) { + this.push(Buffer.concat(outBuffers)); + } else if (outBuffers.length === 1) { + this.push(outBuffers[0]); + } + } + }; + var maxBufferLength = require("buffer").constants.MAX_LENGTH; + var PooledBuffer = class { + static { + __name(this, "PooledBuffer"); + } + constructor(capacity, buffers, totalLength) { + this.buffers = []; + this.capacity = capacity; + this._size = 0; + const bufferNum = Math.ceil(capacity / maxBufferLength); + for (let i = 0; i < bufferNum; i++) { + let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; + if (len === 0) { + len = maxBufferLength; + } + this.buffers.push(Buffer.allocUnsafe(len)); + } + if (buffers) { + this.fill(buffers, totalLength); + } + } + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } + /** + * Fill the internal buffers with data in the input buffers serially + * with respect to the total length and the total capacity of the internal buffers. + * Data copied will be shift out of the input buffers. + * + * @param buffers - Input buffers containing the data to be filled in the pooled buffer + * @param totalLength - Total length of the data to be filled in. + * + */ + fill(buffers, totalLength) { + this._size = Math.min(this.capacity, totalLength); + let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; + while (totalCopiedNum < this._size) { + const source = buffers[i]; + const target = this.buffers[j]; + const copiedNum = source.copy(target, targetOffset, sourceOffset); + totalCopiedNum += copiedNum; + sourceOffset += copiedNum; + targetOffset += copiedNum; + if (sourceOffset === source.length) { + i++; + sourceOffset = 0; + } + if (targetOffset === target.length) { + j++; + targetOffset = 0; + } + } + buffers.splice(0, i); + if (buffers.length > 0) { + buffers[0] = buffers[0].slice(sourceOffset); + } + } + /** + * Get the readable stream assembled from all the data in the internal buffers. + * + */ + getReadableStream() { + return new BuffersStream(this.buffers, this.size); + } + }; + var BufferScheduler = class { + static { + __name(this, "BufferScheduler"); + } + /** + * Creates an instance of BufferScheduler. + * + * @param readable - A Node.js Readable stream + * @param bufferSize - Buffer size of every maintained buffer + * @param maxBuffers - How many buffers can be allocated + * @param outgoingHandler - An async function scheduled to be + * triggered when a buffer fully filled + * with stream data + * @param concurrency - Concurrency of executing outgoingHandlers (>0) + * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + */ + constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { + this.emitter = new events.EventEmitter(); + this.offset = 0; + this.isStreamEnd = false; + this.isError = false; + this.executingOutgoingHandlers = 0; + this.numBuffers = 0; + this.unresolvedDataArray = []; + this.unresolvedLength = 0; + this.incoming = []; + this.outgoing = []; + if (bufferSize <= 0) { + throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); + } + if (maxBuffers <= 0) { + throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); + } + if (concurrency <= 0) { + throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); + } + this.bufferSize = bufferSize; + this.maxBuffers = maxBuffers; + this.readable = readable; + this.outgoingHandler = outgoingHandler; + this.concurrency = concurrency; + this.encoding = encoding; + } + /** + * Start the scheduler, will return error when stream of any of the outgoingHandlers + * returns error. + * + */ + async do() { + return new Promise((resolve, reject) => { + this.readable.on("data", (data) => { + data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; + this.appendUnresolvedData(data); + if (!this.resolveData()) { + this.readable.pause(); + } + }); + this.readable.on("error", (err) => { + this.emitter.emit("error", err); + }); + this.readable.on("end", () => { + this.isStreamEnd = true; + this.emitter.emit("checkEnd"); + }); + this.emitter.on("error", (err) => { + this.isError = true; + this.readable.pause(); + reject(err); + }); + this.emitter.on("checkEnd", () => { + if (this.outgoing.length > 0) { + this.triggerOutgoingHandlers(); + return; + } + if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { + if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { + const buffer = this.shiftBufferFromUnresolvedDataArray(); + this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset).then(resolve).catch(reject); + } else if (this.unresolvedLength >= this.bufferSize) { + return; + } else { + resolve(); + } + } + }); + }); + } + /** + * Insert a new data into unresolved array. + * + * @param data - + */ + appendUnresolvedData(data) { + this.unresolvedDataArray.push(data); + this.unresolvedLength += data.length; + } + /** + * Try to shift a buffer with size in blockSize. The buffer returned may be less + * than blockSize when data in unresolvedDataArray is less than bufferSize. + * + */ + shiftBufferFromUnresolvedDataArray(buffer) { + if (!buffer) { + buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); + } else { + buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + } + this.unresolvedLength -= buffer.size; + return buffer; + } + /** + * Resolve data in unresolvedDataArray. For every buffer with size in blockSize + * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, + * then push it into outgoing to be handled by outgoing handler. + * + * Return false when available buffers in incoming are not enough, else true. + * + * @returns Return false when buffers in incoming are not enough, else true. + */ + resolveData() { + while (this.unresolvedLength >= this.bufferSize) { + let buffer; + if (this.incoming.length > 0) { + buffer = this.incoming.shift(); + this.shiftBufferFromUnresolvedDataArray(buffer); + } else { + if (this.numBuffers < this.maxBuffers) { + buffer = this.shiftBufferFromUnresolvedDataArray(); + this.numBuffers++; + } else { + return false; + } + } + this.outgoing.push(buffer); + this.triggerOutgoingHandlers(); + } + return true; + } + /** + * Try to trigger a outgoing handler for every buffer in outgoing. Stop when + * concurrency reaches. + */ + async triggerOutgoingHandlers() { + let buffer; + do { + if (this.executingOutgoingHandlers >= this.concurrency) { + return; + } + buffer = this.outgoing.shift(); + if (buffer) { + this.triggerOutgoingHandler(buffer); + } + } while (buffer); + } + /** + * Trigger a outgoing handler for a buffer shifted from outgoing. + * + * @param buffer - + */ + async triggerOutgoingHandler(buffer) { + const bufferLength = buffer.size; + this.executingOutgoingHandlers++; + this.offset += bufferLength; + try { + await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); + } catch (err) { + this.emitter.emit("error", err); + return; + } + this.executingOutgoingHandlers--; + this.reuseBuffer(buffer); + this.emitter.emit("checkEnd"); + } + /** + * Return buffer used by outgoing handler into incoming. + * + * @param buffer - + */ + reuseBuffer(buffer) { + this.incoming.push(buffer); + if (!this.isError && this.resolveData() && !this.isStreamEnd) { + this.readable.resume(); + } + } + }; + async function streamToBuffer(stream2, buffer, offset, end, encoding) { + let pos = 0; + const count = end - offset; + return new Promise((resolve, reject) => { + const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); + stream2.on("readable", () => { + if (pos >= count) { + clearTimeout(timeout); + resolve(); + return; + } + let chunk = stream2.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; + buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); + pos += chunkLength; + }); + stream2.on("end", () => { + clearTimeout(timeout); + if (pos < count) { + reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); + } + resolve(); + }); + stream2.on("error", (msg) => { + clearTimeout(timeout); + reject(msg); + }); + }); + } + __name(streamToBuffer, "streamToBuffer"); + async function streamToBuffer2(stream2, buffer, encoding) { + let pos = 0; + const bufferSize = buffer.length; + return new Promise((resolve, reject) => { + stream2.on("readable", () => { + let chunk = stream2.read(); + if (!chunk) { + return; + } + if (typeof chunk === "string") { + chunk = Buffer.from(chunk, encoding); + } + if (pos + chunk.length > bufferSize) { + reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); + return; + } + buffer.fill(chunk, pos, pos + chunk.length); + pos += chunk.length; + }); + stream2.on("end", () => { + resolve(pos); + }); + stream2.on("error", reject); + }); + } + __name(streamToBuffer2, "streamToBuffer2"); + async function readStreamToLocalFile(rs, file) { + return new Promise((resolve, reject) => { + const ws = fs__namespace.createWriteStream(file); + rs.on("error", (err) => { + reject(err); + }); + ws.on("error", (err) => { + reject(err); + }); + ws.on("close", resolve); + rs.pipe(ws); + }); + } + __name(readStreamToLocalFile, "readStreamToLocalFile"); + var fsStat = util__namespace.promisify(fs__namespace.stat); + var fsCreateReadStream = fs__namespace.createReadStream; + var BlobClient = class _BlobClient extends StorageClient { + static { + __name(this, "BlobClient"); + } + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + options = options || {}; + let pipeline; + let url2; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url2 = urlOrConnectionString; + if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { + options = blobNameOrOptions; + } + pipeline = newPipeline(new AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url2, pipeline); + ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); + this.blobContext = new Blob$1(this.storageClientContext); + this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); + this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + } + /** + * The name of the blob. + */ + get name() { + return this._name; + } + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } + /** + * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + */ + withSnapshot(snapshot2) { + return new _BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + } + /** + * Creates a new BlobClient object pointing to a version of this blob. + * Provide "" will remove the versionId and return a Client to the base blob. + * + * @param versionId - The versionId. + * @returns A new BlobClient object pointing to the version of this blob. + */ + withVersion(versionId2) { + return new _BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId2.length === 0 ? void 0 : versionId2), this.pipeline); + } + /** + * Creates a AppendBlobClient object. + * + */ + getAppendBlobClient() { + return new AppendBlobClient(this.url, this.pipeline); + } + /** + * Creates a BlockBlobClient object. + * + */ + getBlockBlobClient() { + return new BlockBlobClient(this.url, this.pipeline); + } + /** + * Creates a PageBlobClient object. + * + */ + getPageBlobClient() { + return new PageBlobClient(this.url, this.pipeline); + } + /** + * Reads or downloads a blob from the system, including its metadata and properties. + * You can also call Get Blob to read a snapshot. + * + * * In Node.js, data returns in a Readable stream readableStreamBody + * * In browsers, data returns in a promise blobBody + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Optional options to Blob Download operation. + * + * + * Example usage (Node.js): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); + * console.log("Downloaded blob content:", downloaded.toString()); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * Example usage (browser): + * + * ```js + * // Download and convert a blob to a string + * const downloadBlockBlobResponse = await blobClient.download(); + * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); + * console.log( + * "Downloaded blob content", + * downloaded + * ); + * + * async function blobToString(blob: Blob): Promise { + * const fileReader = new FileReader(); + * return new Promise((resolve, reject) => { + * fileReader.onloadend = (ev: any) => { + * resolve(ev.target!.result); + * }; + * fileReader.onerror = reject; + * fileReader.readAsText(blob); + * }); + * } + * ``` + */ + async download(offset = 0, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlobClient-download", options); + try { + const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onDownloadProgress: coreHttp.isNode ? void 0 : options.onProgress + // for Node.js, progress is reported by RetriableReadableStream + }, range: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + if (!coreHttp.isNode) { + return wrappedRes; + } + if (options.maxRetryRequests === void 0 || options.maxRetryRequests < 0) { + options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + } + if (res.contentLength === void 0) { + throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!res.etag) { + throw new RangeError(`File download response doesn't contain valid etag header`); + } + return new BlobDownloadResponse(wrappedRes, async (start) => { + var _a2; + const updatedDownloadOptions = { + leaseAccessConditions: options.conditions, + modifiedAccessConditions: { + ifMatch: options.conditions.ifMatch || res.etag, + ifModifiedSince: options.conditions.ifModifiedSince, + ifNoneMatch: options.conditions.ifNoneMatch, + ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, + ifTags: (_a2 = options.conditions) === null || _a2 === void 0 ? void 0 : _a2.tagConditions + }, + range: rangeToString({ + count: offset + res.contentLength - start, + offset: start + }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey + }; + return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; + }, offset, res.contentLength, { + maxRetryRequests: options.maxRetryRequests, + onProgress: options.onProgress + }); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } + } + /** + * Returns true if the Azure blob resource represented by this client exists; false otherwise. * - * @readonly - */ - get metadata() { - return this.originalResponse.metadata; - } - /** - * This header uniquely identifies the request - * that was made and can be used for troubleshooting the request. + * NOTE: use this function with care since an existing blob might be deleted by other clients or + * applications. Vice versa new blobs might be added by other clients or applications after this + * function completes. * - * @readonly + * @param options - options to Exists operation. */ - get requestId() { - return this.originalResponse.requestId; + async exists(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-exists", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions + }); + return true; + } catch (e) { + if (e.statusCode === 404) { + return false; + } else if (e.statusCode === 409 && (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + return true; + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * If a client request id header is sent in the request, this header will be present in the - * response with the same value. + * Returns all user-defined metadata, standard HTTP properties, and system properties + * for the blob. It does not return the content of the blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties * - * @readonly + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which + * will retain their original casing. + * + * @param options - Optional options to Get Properties operation. */ - get clientRequestId() { - return this.originalResponse.clientRequestId; + async getProperties(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); + try { + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Indicates the version of the File service used - * to execute the request. + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * - * @readonly + * @param options - Optional options to Blob Delete operation. */ - get version() { - return this.originalResponse.version; + async delete(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-delete", options); + options.conditions = options.conditions || {}; + try { + return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned - * when the blob was encrypted with a customer-provided key. + * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * - * @readonly + * @param options - Optional options to Blob Delete operation. */ - get encryptionKeySha256() { - return this.originalResponse.encryptionKeySha256; + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a blob or snapshot only if it exists." + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to - * true, then the request returns a crc64 for the range, as long as the range size is less than - * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is - * specified in the same request, it will fail with 400(Bad Request) + * Restores the contents and metadata of soft deleted blob and any associated + * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 + * or later. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * + * @param options - Optional options to Blob Undelete operation. */ - get contentCrc64() { - return this.originalResponse.contentCrc64; + async undelete(options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-undelete", options); + try { + return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * The response body as a browser Blob. - * Always undefined in node.js. + * Sets system properties on the blob. * - * @readonly + * If no value provided, or no value provided for the specified blob HTTP headers, + * these blob HTTP headers without a value will be cleared. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * + * @param blobHTTPHeaders - If no value provided, or no value provided for + * the specified blob HTTP headers, these blob HTTP + * headers without a value will be cleared. + * A common header to set is `blobContentType` + * enabling the browser to provide functionality + * based on file type. + * @param options - Optional options to Blob Set HTTP Headers operation. */ - get blobBody() { - return void 0; + async setHTTPHeaders(blobHTTPHeaders, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * The response body as a node.js Readable stream. - * Always undefined in the browser. + * Sets user-defined metadata for the specified blob as one or more name-value pairs. * - * It will parse avor data returned by blob query. + * If no option provided, or no metadata defined in the parameter, the blob + * metadata will be removed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata * - * @readonly + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Optional options to Set Metadata operation. */ - get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : void 0; + async setMetadata(metadata2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata2, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * The HTTP response. + * Sets tags on the underlying blob. + * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. + * Valid tag key and value characters include lower and upper case letters, digits (0-9), + * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * + * @param tags - + * @param options - */ - get _response() { - return this.originalResponse._response; - } - }; - exports2.BlockBlobTier = void 0; - (function(BlockBlobTier) { - BlockBlobTier["Hot"] = "Hot"; - BlockBlobTier["Cool"] = "Cool"; - BlockBlobTier["Cold"] = "Cold"; - BlockBlobTier["Archive"] = "Archive"; - })(exports2.BlockBlobTier || (exports2.BlockBlobTier = {})); - exports2.PremiumPageBlobTier = void 0; - (function(PremiumPageBlobTier) { - PremiumPageBlobTier["P4"] = "P4"; - PremiumPageBlobTier["P6"] = "P6"; - PremiumPageBlobTier["P10"] = "P10"; - PremiumPageBlobTier["P15"] = "P15"; - PremiumPageBlobTier["P20"] = "P20"; - PremiumPageBlobTier["P30"] = "P30"; - PremiumPageBlobTier["P40"] = "P40"; - PremiumPageBlobTier["P50"] = "P50"; - PremiumPageBlobTier["P60"] = "P60"; - PremiumPageBlobTier["P70"] = "P70"; - PremiumPageBlobTier["P80"] = "P80"; - })(exports2.PremiumPageBlobTier || (exports2.PremiumPageBlobTier = {})); - function toAccessTier(tier2) { - if (tier2 === void 0) { - return void 0; - } - return tier2; - } - __name(toAccessTier, "toAccessTier"); - function ensureCpkIfSpecified(cpk, isHttps) { - if (cpk && !isHttps) { - throw new RangeError("Customer-provided encryption key must be used over HTTPS."); - } - if (cpk && !cpk.encryptionAlgorithm) { - cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; - } - } - __name(ensureCpkIfSpecified, "ensureCpkIfSpecified"); - exports2.StorageBlobAudience = void 0; - (function(StorageBlobAudience) { - StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; - StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; - })(exports2.StorageBlobAudience || (exports2.StorageBlobAudience = {})); - function getBlobServiceAccountAudience(storageAccountName) { - return `https://${storageAccountName}.blob.core.windows.net/.default`; - } - __name(getBlobServiceAccountAudience, "getBlobServiceAccountAudience"); - function rangeResponseFromModel(response) { - const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start - })); - const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ - offset: x.start, - count: x.end - x.start - })); - return Object.assign(Object.assign({}, response), { - pageRange, - clearRange, - _response: Object.assign(Object.assign({}, response._response), { parsedBody: { - pageRange, - clearRange - } }) - }); - } - __name(rangeResponseFromModel, "rangeResponseFromModel"); - var BlobBeginCopyFromUrlPoller = class extends coreLro.Poller { - static { - __name(this, "BlobBeginCopyFromUrlPoller"); - } - constructor(options) { - const { blobClient, copySource: copySource2, intervalInMs = 15e3, onProgress, resumeFrom, startCopyFromURLOptions } = options; - let state; - if (resumeFrom) { - state = JSON.parse(resumeFrom).state; - } - const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { - blobClient, - copySource: copySource2, - startCopyFromURLOptions - })); - super(operation); - if (typeof onProgress === "function") { - this.onProgress(onProgress); - } - this.intervalInMs = intervalInMs; - } - delay() { - return coreHttp.delay(this.intervalInMs); - } - }; - var cancel = /* @__PURE__ */ __name(async function cancel2(options = {}) { - const state = this.state; - const { copyId: copyId2 } = state; - if (state.isCompleted) { - return makeBlobBeginCopyFromURLPollOperation(state); - } - if (!copyId2) { - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); - } - await state.blobClient.abortCopyFromURL(copyId2, { - abortSignal: options.abortSignal - }); - state.isCancelled = true; - return makeBlobBeginCopyFromURLPollOperation(state); - }, "cancel"); - var update = /* @__PURE__ */ __name(async function update2(options = {}) { - const state = this.state; - const { blobClient, copySource: copySource2, startCopyFromURLOptions } = state; - if (!state.isStarted) { - state.isStarted = true; - const result = await blobClient.startCopyFromURL(copySource2, startCopyFromURLOptions); - state.copyId = result.copyId; - if (result.copyStatus === "success") { - state.result = result; - state.isCompleted = true; - } - } else if (!state.isCompleted) { + async setTags(tags2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setTags", options); try { - const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); - const { copyStatus, copyProgress } = result; - const prevCopyProgress = state.copyProgress; - if (copyProgress) { - state.copyProgress = copyProgress; - } - if (copyStatus === "pending" && copyProgress !== prevCopyProgress && typeof options.fireProgress === "function") { - options.fireProgress(state); - } else if (copyStatus === "success") { - state.result = result; - state.isCompleted = true; - } else if (copyStatus === "failed") { - state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); - state.isCompleted = true; - } - } catch (err) { - state.error = err; - state.isCompleted = true; - } - } - return makeBlobBeginCopyFromURLPollOperation(state); - }, "update"); - var toString = /* @__PURE__ */ __name(function toString2() { - return JSON.stringify({ state: this.state }, (key, value) => { - if (key === "blobClient") { - return void 0; + return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags2) })); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - return value; - }); - }, "toString"); - function makeBlobBeginCopyFromURLPollOperation(state) { - return { - state: Object.assign({}, state), - cancel, - toString, - update - }; - } - __name(makeBlobBeginCopyFromURLPollOperation, "makeBlobBeginCopyFromURLPollOperation"); - function rangeToString(iRange) { - if (iRange.offset < 0) { - throw new RangeError(`Range.offset cannot be smaller than 0.`); - } - if (iRange.count && iRange.count <= 0) { - throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); - } - return iRange.count ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` : `bytes=${iRange.offset}-`; - } - __name(rangeToString, "rangeToString"); - var BatchStates; - (function(BatchStates2) { - BatchStates2[BatchStates2["Good"] = 0] = "Good"; - BatchStates2[BatchStates2["Error"] = 1] = "Error"; - })(BatchStates || (BatchStates = {})); - var Batch = class { - static { - __name(this, "Batch"); } /** - * Creates an instance of Batch. - * @param concurrency - + * Gets the tags associated with the underlying blob. + * + * @param options - */ - constructor(concurrency = 5) { - this.actives = 0; - this.completed = 0; - this.offset = 0; - this.operations = []; - this.state = BatchStates.Good; - if (concurrency < 1) { - throw new RangeError("concurrency must be larger than 0"); + async getTags(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-getTags", options); + try { + const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); + return wrappedResponse; + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - this.concurrency = concurrency; - this.emitter = new events.EventEmitter(); } /** - * Add a operation into queue. + * Get a {@link BlobLeaseClient} that manages leases on the blob. * - * @param operation - + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the blob. */ - addOperation(operation) { - this.operations.push(async () => { - try { - this.actives++; - await operation(); - this.actives--; - this.completed++; - this.parallelExecute(); - } catch (error) { - this.emitter.emit("error", error); - } - }); + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); } /** - * Start execute operations in the queue. + * Creates a read-only snapshot of a blob. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob * + * @param options - Optional options to the Blob Create Snapshot operation. */ - async do() { - if (this.operations.length === 0) { - return Promise.resolve(); - } - this.parallelExecute(); - return new Promise((resolve, reject) => { - this.emitter.on("finish", resolve); - this.emitter.on("error", (error) => { - this.state = BatchStates.Error; - reject(error); + async createSnapshot(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message }); - }); + throw e; + } finally { + span.end(); + } } /** - * Get next operation to be executed. Return null when reaching ends. + * Asynchronously copies a blob to a destination within the storage account. + * This method returns a long running operation poller that allows you to wait + * indefinitely until the copy is completed. + * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. + * Note that the onProgress callback will not be invoked if the operation completes in the first + * request, and attempting to cancel a completed copy will result in an error being thrown. + * + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * + * Example using automatic polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using manual polling: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * while (!poller.isDone()) { + * await poller.poll(); + * } + * const result = copyPoller.getResult(); + * ``` + * + * Example using progress updates: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * onProgress(state) { + * console.log(`Progress: ${state.copyProgress}`); + * } + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using a changing polling interval (default 15 seconds): + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url', { + * intervalInMs: 1000 // poll blob every 1 second for copy progress + * }); + * const result = await copyPoller.pollUntilDone(); + * ``` + * + * Example using copy cancellation: + * + * ```js + * const copyPoller = await blobClient.beginCopyFromURL('url'); + * // cancel operation after starting it. + * try { + * await copyPoller.cancelOperation(); + * // calls to get the result now throw PollerCancelledError + * await copyPoller.getResult(); + * } catch (err) { + * if (err.name === 'PollerCancelledError') { + * console.log('The copy was cancelled.'); + * } + * } + * ``` * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. */ - nextOperation() { - if (this.offset < this.operations.length) { - return this.operations[this.offset++]; - } - return null; + async beginCopyFromURL(copySource2, options = {}) { + const client = { + abortCopyFromURL: /* @__PURE__ */ __name((...args) => this.abortCopyFromURL(...args), "abortCopyFromURL"), + getProperties: /* @__PURE__ */ __name((...args) => this.getProperties(...args), "getProperties"), + startCopyFromURL: /* @__PURE__ */ __name((...args) => this.startCopyFromURL(...args), "startCopyFromURL") + }; + const poller = new BlobBeginCopyFromUrlPoller({ + blobClient: client, + copySource: copySource2, + intervalInMs: options.intervalInMs, + onProgress: options.onProgress, + resumeFrom: options.resumeFrom, + startCopyFromURLOptions: options + }); + await poller.poll(); + return poller; } /** - * Start execute operations. One one the most important difference between - * this method with do() is that do() wraps as an sync method. + * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero + * length and full metadata. Version 2012-02-12 and newer. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob * + * @param copyId - Id of the Copy From URL operation. + * @param options - Optional options to the Blob Abort Copy From URL operation. */ - parallelExecute() { - if (this.state === BatchStates.Error) { - return; - } - if (this.completed >= this.operations.length) { - this.emitter.emit("finish"); - return; - } - while (this.actives < this.concurrency) { - const operation = this.nextOperation(); - if (operation) { - operation(); - } else { - return; - } + async abortCopyFromURL(copyId2, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); + try { + return await this.blobContext.abortCopyFromURL(copyId2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } } - }; - var BuffersStream = class extends stream.Readable { - static { - __name(this, "BuffersStream"); - } /** - * Creates an instance of BuffersStream that will emit the data - * contained in the array of buffers. + * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not + * return a response until the copy is complete. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url * - * @param buffers - Array of buffers containing the data - * @param byteLength - The total length of data contained in the buffers + * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication + * @param options - */ - constructor(buffers, byteLength, options) { - super(options); - this.buffers = buffers; - this.byteLength = byteLength; - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex = 0; - this.pushedBytesLength = 0; - let buffersLength = 0; - for (const buf of this.buffers) { - buffersLength += buf.byteLength; - } - if (buffersLength < this.byteLength) { - throw new Error("Data size shouldn't be larger than the total length of buffers."); + async syncCopyFromURL(copySource2, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.copyFromURL(copySource2, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } } /** - * Internal _read() that will be called when the stream wants to pull more data in. + * Sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant + * storage only). A premium page blob's tier determines the allowed size, IOPS, + * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive + * storage type. This operation does not update the blob's ETag. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier * - * @param size - Optional. The size of data to be read + * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. + * @param options - Optional options to the Blob Set Tier operation. */ - _read(size) { - if (this.pushedBytesLength >= this.byteLength) { - this.push(null); + async setAccessTier(tier2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); + try { + return await this.blobContext.setTier(toAccessTier(tier2), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - if (!size) { - size = this.readableHighWaterMark; + } + async downloadToBuffer(param1, param2, param3, param4 = {}) { + let buffer; + let offset = 0; + let count = 0; + let options = param4; + if (param1 instanceof Buffer) { + buffer = param1; + offset = param2 || 0; + count = typeof param3 === "number" ? param3 : 0; + } else { + offset = typeof param1 === "number" ? param1 : 0; + count = typeof param2 === "number" ? param2 : 0; + options = param3 || {}; } - const outBuffers = []; - let i = 0; - while (i < size && this.pushedBytesLength < this.byteLength) { - const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; - const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; - const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); - if (remaining > size - i) { - const end = this.byteOffsetInCurrentBuffer + size - i; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); - this.pushedBytesLength += size - i; - this.byteOffsetInCurrentBuffer = end; - i = size; - break; - } else { - const end = this.byteOffsetInCurrentBuffer + remaining; - outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); - if (remaining === remainingCapacityInThisBuffer) { - this.byteOffsetInCurrentBuffer = 0; - this.bufferIndex++; - } else { - this.byteOffsetInCurrentBuffer = end; + const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); + try { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (options.blockSize === 0) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + if (!count) { + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + count = response.contentLength - offset; + if (count < 0) { + throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); } - this.pushedBytesLength += remaining; - i += remaining; } - } - if (outBuffers.length > 1) { - this.push(Buffer.concat(outBuffers)); - } else if (outBuffers.length === 1) { - this.push(outBuffers[0]); - } - } - }; - var maxBufferLength = require("buffer").constants.MAX_LENGTH; - var PooledBuffer = class { - static { - __name(this, "PooledBuffer"); - } - constructor(capacity, buffers, totalLength) { - this.buffers = []; - this.capacity = capacity; - this._size = 0; - const bufferNum = Math.ceil(capacity / maxBufferLength); - for (let i = 0; i < bufferNum; i++) { - let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; - if (len === 0) { - len = maxBufferLength; + if (!buffer) { + try { + buffer = Buffer.alloc(count); + } catch (error) { + throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile". ${error.message}`); + } } - this.buffers.push(Buffer.allocUnsafe(len)); - } - if (buffers) { - this.fill(buffers, totalLength); + if (buffer.length < count) { + throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); + } + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let off = offset; off < offset + count; off = off + options.blockSize) { + batch.addOperation(async () => { + let chunkEnd = offset + count; + if (off + options.blockSize < chunkEnd) { + chunkEnd = off + options.blockSize; + } + const response = await this.download(off, chunkEnd - off, { + abortSignal: options.abortSignal, + conditions: options.conditions, + maxRetryRequests: options.maxRetryRequestsPerBlock, + customerProvidedKey: options.customerProvidedKey, + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) + }); + const stream2 = response.readableStreamBody; + await streamToBuffer(stream2, buffer, off - offset, chunkEnd - offset); + transferProgress += chunkEnd - off; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }); + } + await batch.do(); + return buffer; + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } } /** - * The size of the data contained in the pooled buffers. - */ - get size() { - return this._size; - } - /** - * Fill the internal buffers with data in the input buffers serially - * with respect to the total length and the total capacity of the internal buffers. - * Data copied will be shift out of the input buffers. + * ONLY AVAILABLE IN NODE.JS RUNTIME. * - * @param buffers - Input buffers containing the data to be filled in the pooled buffer - * @param totalLength - Total length of the data to be filled in. + * Downloads an Azure Blob to a local file. + * Fails if the the given file path already exits. + * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. * + * @param filePath - + * @param offset - From which position of the block blob to download. + * @param count - How much data to be downloaded. Will download to the end when passing undefined. + * @param options - Options to Blob download options. + * @returns The response data for blob download operation, + * but with readableStreamBody set to undefined since its + * content is already read and written into a local file + * at the specified path. */ - fill(buffers, totalLength) { - this._size = Math.min(this.capacity, totalLength); - let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; - while (totalCopiedNum < this._size) { - const source = buffers[i]; - const target = this.buffers[j]; - const copiedNum = source.copy(target, targetOffset, sourceOffset); - totalCopiedNum += copiedNum; - sourceOffset += copiedNum; - targetOffset += copiedNum; - if (sourceOffset === source.length) { - i++; - sourceOffset = 0; - } - if (targetOffset === target.length) { - j++; - targetOffset = 0; + async downloadToFile(filePath, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); + try { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + if (response.readableStreamBody) { + await readStreamToLocalFile(response.readableStreamBody, filePath); } + response.blobDownloadStream = void 0; + return response; + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - buffers.splice(0, i); - if (buffers.length > 0) { - buffers[0] = buffers[0].slice(sourceOffset); + } + getBlobAndContainerNamesFromUrl() { + let containerName; + let blobName; + try { + const parsedUrl = coreHttp.URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } else if (isIpEndpointStyle(parsedUrl)) { + const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + containerName = pathComponents[2]; + blobName = pathComponents[4]; + } else { + const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + containerName = pathComponents[1]; + blobName = pathComponents[3]; + } + containerName = decodeURIComponent(containerName); + blobName = decodeURIComponent(blobName); + blobName = blobName.replace(/\\/g, "/"); + if (!containerName) { + throw new Error("Provided containerName is invalid."); + } + return { blobName, containerName }; + } catch (error) { + throw new Error("Unable to extract blobName and containerName with provided information."); } } /** - * Get the readable stream assembled from all the data in the internal buffers. + * Asynchronously copies a blob to a destination within the storage account. + * In version 2012-02-12 and later, the source for a Copy Blob operation can be + * a committed blob in any Azure storage account. + * Beginning with version 2015-02-21, the source for a Copy Blob operation can be + * an Azure file in any Azure storage account. + * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob + * operation to copy from another storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob * + * @param copySource - url to the source Azure Blob/File. + * @param options - Optional options to the Blob Start Copy From URL operation. */ - getReadableStream() { - return new BuffersStream(this.buffers, this.size); + async startCopyFromURL(copySource2, options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + return await this.blobContext.startCopyFromURL(copySource2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + sourceIfTags: options.sourceConditions.tagConditions + }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } - }; - var BufferScheduler = class { - static { - __name(this, "BufferScheduler"); + /** + * Only available for BlobClient constructed with a shared key credential. + * + * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + */ + generateSasUrl(options) { + return new Promise((resolve) => { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); + }); } /** - * Creates an instance of BufferScheduler. + * Delete the immutablility policy on the blob. * - * @param readable - A Node.js Readable stream - * @param bufferSize - Buffer size of every maintained buffer - * @param maxBuffers - How many buffers can be allocated - * @param outgoingHandler - An async function scheduled to be - * triggered when a buffer fully filled - * with stream data - * @param concurrency - Concurrency of executing outgoingHandlers (>0) - * @param encoding - [Optional] Encoding of Readable stream when it's a string stream + * @param options - Optional options to delete immutability policy on the blob. */ - constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { - this.emitter = new events.EventEmitter(); - this.offset = 0; - this.isStreamEnd = false; - this.isError = false; - this.executingOutgoingHandlers = 0; - this.numBuffers = 0; - this.unresolvedDataArray = []; - this.unresolvedLength = 0; - this.incoming = []; - this.outgoing = []; - if (bufferSize <= 0) { - throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); - } - if (maxBuffers <= 0) { - throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); - } - if (concurrency <= 0) { - throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); - } - this.bufferSize = bufferSize; - this.maxBuffers = maxBuffers; - this.readable = readable; - this.outgoingHandler = outgoingHandler; - this.concurrency = concurrency; - this.encoding = encoding; + async deleteImmutabilityPolicy(options) { + const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); + try { + return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Start the scheduler, will return error when stream of any of the outgoingHandlers - * returns error. + * Set immutablility policy on the blob. * + * @param options - Optional options to set immutability policy on the blob. */ - async do() { - return new Promise((resolve, reject) => { - this.readable.on("data", (data) => { - data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; - this.appendUnresolvedData(data); - if (!this.resolveData()) { - this.readable.pause(); - } - }); - this.readable.on("error", (err) => { - this.emitter.emit("error", err); - }); - this.readable.on("end", () => { - this.isStreamEnd = true; - this.emitter.emit("checkEnd"); + async setImmutabilityPolicy(immutabilityPolicy, options) { + const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); + try { + return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message }); - this.emitter.on("error", (err) => { - this.isError = true; - this.readable.pause(); - reject(err); + throw e; + } finally { + span.end(); + } + } + /** + * Set legal hold on the blob. + * + * @param options - Optional options to set legal hold on the blob. + */ + async setLegalHold(legalHoldEnabled, options) { + const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); + try { + return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message }); - this.emitter.on("checkEnd", () => { - if (this.outgoing.length > 0) { - this.triggerOutgoingHandlers(); - return; - } - if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { - if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { - const buffer = this.shiftBufferFromUnresolvedDataArray(); - this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset).then(resolve).catch(reject); - } else if (this.unresolvedLength >= this.bufferSize) { - return; - } else { - resolve(); + throw e; + } finally { + span.end(); + } + } + }; + var AppendBlobClient = class _AppendBlobClient extends BlobClient { + static { + __name(this, "AppendBlobClient"); + } + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + let pipeline; + let url2; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url2 = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); } + pipeline = newPipeline(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); } - }); - }); + } else if (extractedCreds.kind === "SASConnString") { + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } + } else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + } + super(url2, pipeline); + this.appendBlobContext = new AppendBlob(this.storageClientContext); } /** - * Insert a new data into unresolved array. + * Creates a new AppendBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. * - * @param data - + * @param snapshot - The snapshot timestamp. + * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. */ - appendUnresolvedData(data) { - this.unresolvedDataArray.push(data); - this.unresolvedLength += data.length; + withSnapshot(snapshot2) { + return new _AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); } /** - * Try to shift a buffer with size in blockSize. The buffer returned may be less - * than blockSize when data in unresolvedDataArray is less than bufferSize. + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param options - Options to the Append Block Create operation. + * * + * Example usage: + * + * ```js + * const appendBlobClient = containerClient.getAppendBlobClient(""); + * await appendBlobClient.create(); + * ``` */ - shiftBufferFromUnresolvedDataArray(buffer) { - if (!buffer) { - buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); - } else { - buffer.fill(this.unresolvedDataArray, this.unresolvedLength); + async create(options = {}) { + var _a, _b, _c; + const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); + options.conditions = options.conditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - this.unresolvedLength -= buffer.size; - return buffer; } /** - * Resolve data in unresolvedDataArray. For every buffer with size in blockSize - * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, - * then push it into outgoing to be handled by outgoing handler. - * - * Return false when available buffers in incoming are not enough, else true. + * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. + * If the blob with the same name already exists, the content of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * - * @returns Return false when buffers in incoming are not enough, else true. + * @param options - */ - resolveData() { - while (this.unresolvedLength >= this.bufferSize) { - let buffer; - if (this.incoming.length > 0) { - buffer = this.incoming.shift(); - this.shiftBufferFromUnresolvedDataArray(buffer); - } else { - if (this.numBuffers < this.maxBuffers) { - buffer = this.shiftBufferFromUnresolvedDataArray(); - this.numBuffers++; - } else { - return false; - } + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); + const conditions = { ifNoneMatch: ETagAny }; + try { + const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a blob only if it does not already exist." + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } - this.outgoing.push(buffer); - this.triggerOutgoingHandlers(); + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - return true; } /** - * Try to trigger a outgoing handler for every buffer in outgoing. Stop when - * concurrency reaches. + * Seals the append blob, making it read only. + * + * @param options - */ - async triggerOutgoingHandlers() { - let buffer; - do { - if (this.executingOutgoingHandlers >= this.concurrency) { - return; - } - buffer = this.outgoing.shift(); - if (buffer) { - this.triggerOutgoingHandler(buffer); - } - } while (buffer); + async seal(options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); + options.conditions = options.conditions || {}; + try { + return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Trigger a outgoing handler for a buffer shifted from outgoing. + * Commits a new block of data to the end of the existing append blob. + * @see https://docs.microsoft.com/rest/api/storageservices/append-block * - * @param buffer - + * @param body - Data to be appended. + * @param contentLength - Length of the body in bytes. + * @param options - Options to the Append Block operation. + * + * + * Example usage: + * + * ```js + * const content = "Hello World!"; + * + * // Create a new append blob and append data to the blob. + * const newAppendBlobClient = containerClient.getAppendBlobClient(""); + * await newAppendBlobClient.create(); + * await newAppendBlobClient.appendBlock(content, content.length); + * + * // Append data to an existing append blob. + * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); + * await existingAppendBlobClient.appendBlock(content, content.length); + * ``` */ - async triggerOutgoingHandler(buffer) { - const bufferLength = buffer.size; - this.executingOutgoingHandlers++; - this.offset += bufferLength; + async appendBlock(body2, contentLength2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); + options.conditions = options.conditions || {}; try { - await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); - } catch (err) { - this.emitter.emit("error", err); - return; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlock(contentLength2, body2, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } - this.executingOutgoingHandlers--; - this.reuseBuffer(buffer); - this.emitter.emit("checkEnd"); } /** - * Return buffer used by outgoing handler into incoming. + * The Append Block operation commits a new block of data to the end of an existing append blob + * where the contents are read from a source url. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url * - * @param buffer - + * @param sourceURL - + * The url to the blob that will be the source of the copy. A source blob in the same storage account can + * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob + * must either be public or must be authenticated via a shared access signature. If the source blob is + * public, no authentication is required to perform the operation. + * @param sourceOffset - Offset in source to be appended + * @param count - Number of bytes to be appended as a block + * @param options - */ - reuseBuffer(buffer) { - this.incoming.push(buffer); - if (!this.isError && this.resolveData() && !this.isStreamEnd) { - this.readable.resume(); + async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); } } }; - async function streamToBuffer(stream2, buffer, offset, end, encoding) { - let pos = 0; - const count = end - offset; - return new Promise((resolve, reject) => { - const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); - stream2.on("readable", () => { - if (pos >= count) { - clearTimeout(timeout); - resolve(); - return; - } - let chunk = stream2.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; - buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); - pos += chunkLength; - }); - stream2.on("end", () => { - clearTimeout(timeout); - if (pos < count) { - reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); - } - resolve(); - }); - stream2.on("error", (msg) => { - clearTimeout(timeout); - reject(msg); - }); - }); - } - __name(streamToBuffer, "streamToBuffer"); - async function streamToBuffer2(stream2, buffer, encoding) { - let pos = 0; - const bufferSize = buffer.length; - return new Promise((resolve, reject) => { - stream2.on("readable", () => { - let chunk = stream2.read(); - if (!chunk) { - return; - } - if (typeof chunk === "string") { - chunk = Buffer.from(chunk, encoding); - } - if (pos + chunk.length > bufferSize) { - reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); - return; - } - buffer.fill(chunk, pos, pos + chunk.length); - pos += chunk.length; - }); - stream2.on("end", () => { - resolve(pos); - }); - stream2.on("error", reject); - }); - } - __name(streamToBuffer2, "streamToBuffer2"); - async function readStreamToLocalFile(rs, file) { - return new Promise((resolve, reject) => { - const ws = fs__namespace.createWriteStream(file); - rs.on("error", (err) => { - reject(err); - }); - ws.on("error", (err) => { - reject(err); - }); - ws.on("close", resolve); - rs.pipe(ws); - }); - } - __name(readStreamToLocalFile, "readStreamToLocalFile"); - var fsStat = util__namespace.promisify(fs__namespace.stat); - var fsCreateReadStream = fs__namespace.createReadStream; - var BlobClient = class _BlobClient extends StorageClient { + var BlockBlobClient = class _BlockBlobClient extends BlobClient { static { - __name(this, "BlobClient"); + __name(this, "BlockBlobClient"); } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - options = options || {}; let pipeline; let url2; + options = options || {}; if (isPipelineLike(credentialOrPipelineOrContainerName)) { url2 = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; @@ -84611,172 +85424,550 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url2, pipeline); - ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); - this.blobContext = new Blob$1(this.storageClientContext); - this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); - this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); + this.blockBlobContext = new BlockBlob(this.storageClientContext); + this._blobContext = new Blob$1(this.storageClientContext); } /** - * The name of the blob. + * Creates a new BlockBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a URL to the base blob. + * + * @param snapshot - The snapshot timestamp. + * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. */ - get name() { - return this._name; + withSnapshot(snapshot2) { + return new _BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); } /** - * The name of the storage container the blob is associated with. + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Quick query for a JSON or CSV formatted blob. + * + * Example usage (Node.js): + * + * ```js + * // Query and convert a blob to a string + * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); + * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); + * console.log("Query blob content:", downloaded); + * + * async function streamToBuffer(readableStream) { + * return new Promise((resolve, reject) => { + * const chunks = []; + * readableStream.on("data", (data) => { + * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); + * }); + * readableStream.on("end", () => { + * resolve(Buffer.concat(chunks)); + * }); + * readableStream.on("error", reject); + * }); + * } + * ``` + * + * @param query - + * @param options - */ - get containerName() { - return this._containerName; + async query(query, options = {}) { + var _a; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); + try { + if (!coreHttp.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { + queryType: "SQL", + expression: query, + inputSerialization: toQuerySerialization(options.inputTextConfiguration), + outputSerialization: toQuerySerialization(options.outputTextConfiguration) + }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return new BlobQueryResponse(response, { + abortSignal: options.abortSignal, + onProgress: options.onProgress, + onError: options.onError + }); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. + * Creates a new block blob, or updates the content of an existing block blob. + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link stageBlock} and {@link commitBlockList}. * - * @param snapshot - The snapshot timestamp. - * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp + * This is a non-parallel uploading method, please use {@link uploadFile}, + * {@link uploadStream} or {@link uploadBrowserData} for better performance + * with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to the Block Blob Upload operation. + * @returns Response data for the Block Blob Upload operation. + * + * Example usage: + * + * ```js + * const content = "Hello world!"; + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` */ - withSnapshot(snapshot2) { - return new _BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + async upload(body2, contentLength2, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.upload(contentLength2, body2, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Creates a new BlobClient object pointing to a version of this blob. - * Provide "" will remove the versionId and return a Client to the base blob. + * Creates a new Block Blob where the contents of the blob are read from a given URL. + * This API is supported beginning with the 2020-04-08 version. Partial updates + * are not supported with Put Blob from URL; the content of an existing blob is overwritten with + * the content of the new blob. To perform partial updates to a block blob’s contents using a + * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. * - * @param versionId - The versionId. - * @returns A new BlobClient object pointing to the version of this blob. + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Optional parameters. */ - withVersion(versionId2) { - return new _BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId2.length === 0 ? void 0 : versionId2), this.pipeline); + async syncUploadFromURL(sourceURL, options = {}) { + var _a, _b, _c, _d, _e; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, + sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, + sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, + sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Creates a AppendBlobClient object. + * Uploads the specified block to the block blob's "staging area" to be later + * committed by a call to commitBlockList. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block * + * @param blockId - A 64-byte value that is base64-encoded + * @param body - Data to upload to the staging area. + * @param contentLength - Number of bytes to upload. + * @param options - Options to the Block Blob Stage Block operation. + * @returns Response data for the Block Blob Stage Block operation. */ - getAppendBlobClient() { - return new AppendBlobClient(this.url, this.pipeline); + async stageBlock(blockId2, body2, contentLength2, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlock(blockId2, contentLength2, body2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + onUploadProgress: options.onProgress + }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Creates a BlockBlobClient object. + * The Stage Block From URL operation creates a new block to be committed as part + * of a blob where the contents are read from a URL. + * This API is available starting in version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url * + * @param blockId - A 64-byte value that is base64-encoded + * @param sourceURL - Specifies the URL of the blob. The value + * may be a URL of up to 2 KB in length that specifies a blob. + * The value should be URL-encoded as it would appear + * in a request URI. The source blob must either be public + * or must be authenticated via a shared access signature. + * If the source blob is public, no authentication is required + * to perform the operation. Here are some examples of source object URLs: + * - https://myaccount.blob.core.windows.net/mycontainer/myblob + * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param offset - From which position of the blob to download, greater than or equal to 0 + * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined + * @param options - Options to the Block Blob Stage Block From URL operation. + * @returns Response data for the Block Blob Stage Block From URL operation. */ - getBlockBlobClient() { - return new BlockBlobClient(this.url, this.pipeline); + async stageBlockFromURL(blockId2, sourceURL, offset = 0, count, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.stageBlockFromURL(blockId2, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Creates a PageBlobClient object. + * Writes a blob by specifying the list of block IDs that make up the blob. + * In order to be written as part of a blob, a block must have been successfully written + * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to + * update a blob by uploading only those blocks that have changed, then committing the new and existing + * blocks together. Any blocks not specified in the block list and permanently deleted. + * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list * + * @param blocks - Array of 64-byte value that is base64-encoded + * @param options - Options to the Block Blob Commit Block List operation. + * @returns Response data for the Block Blob Commit Block List operation. */ - getPageBlobClient() { - return new PageBlobClient(this.url, this.pipeline); + async commitBlockList(blocks2, options = {}) { + var _a, _b, _c; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.blockBlobContext.commitBlockList({ latest: blocks2 }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } } /** - * Reads or downloads a blob from the system, including its metadata and properties. - * You can also call Get Blob to read a snapshot. - * - * * In Node.js, data returns in a Readable stream readableStreamBody - * * In browsers, data returns in a promise blobBody + * Returns the list of blocks that have been uploaded as part of a block blob + * using the specified block list filter. + * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob + * @param listType - Specifies whether to return the list of committed blocks, + * the list of uncommitted blocks, or both lists together. + * @param options - Options to the Block Blob Get Block List operation. + * @returns Response data for the Block Blob Get Block List operation. + */ + async getBlockList(listType2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); + try { + const res = await this.blockBlobContext.getBlockList(listType2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + if (!res.committedBlocks) { + res.committedBlocks = []; + } + if (!res.uncommittedBlocks) { + res.uncommittedBlocks = []; + } + return res; + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } + } + // High level functions + /** + * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. * - * @param offset - From which position of the blob to download, greater than or equal to 0 - * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined - * @param options - Optional options to Blob Download operation. + * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. * + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. * - * Example usage (Node.js): + * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView + * @param options - + */ + async uploadData(data, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); + try { + if (coreHttp.isNode) { + let buffer; + if (data instanceof Buffer) { + buffer = data; + } else if (data instanceof ArrayBuffer) { + buffer = Buffer.from(data); + } else { + data = data; + buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); + } + return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); + } else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN BROWSERS. * - * ```js - * // Download and convert a blob to a string - * const downloadBlockBlobResponse = await blobClient.download(); - * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); - * console.log("Downloaded blob content:", downloaded.toString()); + * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. * - * async function streamToBuffer(readableStream) { - * return new Promise((resolve, reject) => { - * const chunks = []; - * readableStream.on("data", (data) => { - * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); - * }); - * readableStream.on("end", () => { - * resolve(Buffer.concat(chunks)); - * }); - * readableStream.on("error", reject); - * }); - * } - * ``` + * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call + * {@link commitBlockList} to commit the block list. * - * Example usage (browser): + * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is + * `blobContentType`, enabling the browser to provide + * functionality based on file type. * - * ```js - * // Download and convert a blob to a string - * const downloadBlockBlobResponse = await blobClient.download(); - * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); - * console.log( - * "Downloaded blob content", - * downloaded - * ); + * @deprecated Use {@link uploadData} instead. * - * async function blobToString(blob: Blob): Promise { - * const fileReader = new FileReader(); - * return new Promise((resolve, reject) => { - * fileReader.onloadend = (ev: any) => { - * resolve(ev.target!.result); - * }; - * fileReader.onerror = reject; - * fileReader.readAsText(blob); - * }); - * } - * ``` + * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView + * @param options - Options to upload browser data. + * @returns Response data for the Blob Upload operation. */ - async download(offset = 0, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlobClient-download", options); + async uploadBrowserData(browserData, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); try { - const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onDownloadProgress: coreHttp.isNode ? void 0 : options.onProgress - // for Node.js, progress is reported by RetriableReadableStream - }, range: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - if (!coreHttp.isNode) { - return wrappedRes; + const browserBlob = new Blob([browserData]); + return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } + } + /** + * + * Uploads data to block blob. Requires a bodyFactory as the data source, + * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. + * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} + * to commit the block list. + * + * @param bodyFactory - + * @param size - size of the data to upload. + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadSeekableInternal(bodyFactory, size, options = {}) { + if (!options.blockSize) { + options.blockSize = 0; + } + if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); + } + if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { + options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + } + if (options.maxSingleShotSize < 0 || options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); + } + if (options.blockSize === 0) { + if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`${size} is too larger to upload to a block blob.`); } - if (options.maxRetryRequests === void 0 || options.maxRetryRequests < 0) { - options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; + if (size > options.maxSingleShotSize) { + options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } } - if (res.contentLength === void 0) { - throw new RangeError(`File download response doesn't contain valid content length header`); + } + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); + try { + if (size <= options.maxSingleShotSize) { + return await this.upload(bodyFactory(0, size), size, updatedOptions); } - if (!res.etag) { - throw new RangeError(`File download response doesn't contain valid etag header`); + const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; + if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { + throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); } - return new BlobDownloadResponse(wrappedRes, async (start) => { - var _a2; - const updatedDownloadOptions = { - leaseAccessConditions: options.conditions, - modifiedAccessConditions: { - ifMatch: options.conditions.ifMatch || res.etag, - ifModifiedSince: options.conditions.ifModifiedSince, - ifNoneMatch: options.conditions.ifNoneMatch, - ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: (_a2 = options.conditions) === null || _a2 === void 0 ? void 0 : _a2.tagConditions - }, - range: rangeToString({ - count: offset + res.contentLength - start, - offset: start - }), - rangeGetContentMD5: options.rangeGetContentMD5, - rangeGetContentCRC64: options.rangeGetContentCrc64, - snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey - }; - return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; - }, offset, res.contentLength, { - maxRetryRequests: options.maxRetryRequests, - onProgress: options.onProgress + const blockList = []; + const blockIDPrefix = coreHttp.generateUuid(); + let transferProgress = 0; + const batch = new Batch(options.concurrency); + for (let i = 0; i < numBlocks; i++) { + batch.addOperation(async () => { + const blockID = generateBlockID(blockIDPrefix, i); + const start = options.blockSize * i; + const end = i === numBlocks - 1 ? size : start + options.blockSize; + const contentLength2 = end - start; + blockList.push(blockID); + await this.stageBlock(blockID, bodyFactory(start, contentLength2), contentLength2, { + abortSignal: options.abortSignal, + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + }); + transferProgress += contentLength2; + if (options.onProgress) { + options.onProgress({ + loadedBytes: transferProgress + }); + } + }); + } + await batch.do(); + return this.commitBlockList(blockList, updatedOptions); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message + }); + throw e; + } finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a local file in blocks to a block blob. + * + * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. + * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList + * to commit the block list. + * + * @param filePath - Full path of local file + * @param options - Options to Upload to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadFile(filePath, options = {}) { + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); + try { + const size = (await fsStat(filePath)).size; + return await this.uploadSeekableInternal((offset, count) => { + return () => fsCreateReadStream(filePath, { + autoClose: true, + end: count ? offset + count - 1 : Infinity, + start: offset + }); + }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + } catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message }); + throw e; + } finally { + span.end(); + } + } + /** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * Uploads a Node.js Readable stream into block blob. + * + * PERFORMANCE IMPROVEMENT TIPS: + * * Input stream highWaterMark is better to set a same value with bufferSize + * parameter, which will avoid Buffer.concat() operations. + * + * @param stream - Node.js Readable stream + * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB + * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, + * positive correlation with max uploading concurrency. Default value is 5 + * @param options - Options to Upload Stream to Block Blob operation. + * @returns Response data for the Blob Upload operation. + */ + async uploadStream(stream2, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { + if (!options.blobHTTPHeaders) { + options.blobHTTPHeaders = {}; + } + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); + try { + let blockNum = 0; + const blockIDPrefix = coreHttp.generateUuid(); + let transferProgress = 0; + const blockList = []; + const scheduler = new BufferScheduler( + stream2, + bufferSize, + maxConcurrency, + async (body2, length) => { + const blockID = generateBlockID(blockIDPrefix, blockNum); + blockList.push(blockID); + blockNum++; + await this.stageBlock(blockID, body2, length, { + conditions: options.conditions, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions + }); + transferProgress += length; + if (options.onProgress) { + options.onProgress({ loadedBytes: transferProgress }); + } + }, + // concurrency should set a smaller value than maxConcurrency, which is helpful to + // reduce the possibility when a outgoing handler waits for stream data, in + // this situation, outgoing handlers are blocked. + // Outgoing queue shouldn't be empty. + Math.ceil(maxConcurrency / 4 * 3) + ); + await scheduler.do(); + return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -84787,86 +85978,79 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; span.end(); } } - /** - * Returns true if the Azure blob resource represented by this client exists; false otherwise. - * - * NOTE: use this function with care since an existing blob might be deleted by other clients or - * applications. Vice versa new blobs might be added by other clients or applications after this - * function completes. - * - * @param options - options to Exists operation. - */ - async exists(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-exists", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - await this.getProperties({ - abortSignal: options.abortSignal, - customerProvidedKey: options.customerProvidedKey, - conditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions - }); - return true; - } catch (e) { - if (e.statusCode === 404) { - return false; - } else if (e.statusCode === 409 && (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { - return true; + }; + var PageBlobClient = class _PageBlobClient extends BlobClient { + static { + __name(this, "PageBlobClient"); + } + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + let pipeline; + let url2; + options = options || {}; + if (isPipelineLike(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + pipeline = credentialOrPipelineOrContainerName; + } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + url2 = urlOrConnectionString; + options = blobNameOrOptions; + pipeline = newPipeline(credentialOrPipelineOrContainerName, options); + } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { + url2 = urlOrConnectionString; + pipeline = newPipeline(new AnonymousCredential(), options); + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + const containerName = credentialOrPipelineOrContainerName; + const blobName = blobNameOrOptions; + const extractedCreds = extractConnectionStringParts(urlOrConnectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } + pipeline = newPipeline(sharedKeyCredential, options); + } else { + throw new Error("Account connection string is only supported in Node.js environment"); + } + } else if (extractedCreds.kind === "SASConnString") { + url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + pipeline = newPipeline(new AnonymousCredential(), options); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + } else { + throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } + super(url2, pipeline); + this.pageBlobContext = new PageBlob(this.storageClientContext); } /** - * Returns all user-defined metadata, standard HTTP properties, and system properties - * for the blob. It does not return the content of the blob. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties - * - * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if - * they originally contained uppercase characters. This differs from the metadata keys returned by - * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which - * will retain their original casing. + * Creates a new PageBlobClient object identical to the source but with the + * specified snapshot timestamp. + * Provide "" will remove the snapshot and return a Client to the base blob. * - * @param options - Optional options to Get Properties operation. + * @param snapshot - The snapshot timestamp. + * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. */ - async getProperties(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); - try { - options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + withSnapshot(snapshot2) { + return new _PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); } /** - * Marks the specified blob or snapshot for deletion. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * - * @param options - Optional options to Blob Delete operation. + * @param size - size of the page blob. + * @param options - Options to the Page Blob Create operation. + * @returns Response data for the Page Blob Create operation. */ - async delete(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-delete", options); + async create(size, options = {}) { + var _a, _b, _c; options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-create", options); try { - return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -84878,25 +86062,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * Creates a page blob of the specified length. Call uploadPages to upload data + * data to a page blob. If the blob with the same name already exists, the content + * of the existing blob will remain unchanged. + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * - * @param options - Optional options to Blob Delete operation. + * @param size - size of the page blob. + * @param options - */ - async deleteIfExists(options = {}) { + async createIfNotExists(size, options = {}) { var _a, _b; - const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); + const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); try { - const res = await this.delete(updatedOptions); + const conditions = { ifNoneMatch: ETagAny }; + const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a blob or snapshot only if it exists." + message: "Expected exception when creating a blob only if it does not already exist." }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } @@ -84910,17 +86095,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Restores the contents and metadata of soft deleted blob and any associated - * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 - * or later. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob + * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page * - * @param options - Optional options to Blob Undelete operation. + * @param body - Data to upload + * @param offset - Offset of destination page blob + * @param count - Content length of the body, also number of bytes to be uploaded + * @param options - Options to the Page Blob Upload Pages operation. + * @returns Response data for the Page Blob Upload Pages operation. */ - async undelete(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-undelete", options); + async uploadPages(body2, offset, count, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); try { - return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return await this.pageBlobContext.uploadPages(count, body2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + onUploadProgress: options.onProgress + }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -84932,27 +86124,29 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Sets system properties on the blob. - * - * If no value provided, or no value provided for the specified blob HTTP headers, - * these blob HTTP headers without a value will be cleared. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * The Upload Pages operation writes a range of pages to a page blob where the + * contents are read from a URL. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url * - * @param blobHTTPHeaders - If no value provided, or no value provided for - * the specified blob HTTP headers, these blob HTTP - * headers without a value will be cleared. - * A common header to set is `blobContentType` - * enabling the browser to provide functionality - * based on file type. - * @param options - Optional options to Blob Set HTTP Headers operation. + * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication + * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob + * @param destOffset - Offset of destination page blob + * @param count - Number of bytes to be uploaded from source page blob + * @param options - */ - async setHTTPHeaders(blobHTTPHeaders, options = {}) { + async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: options.sourceConditions.ifMatch, + sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, + sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -84964,23 +86158,20 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Sets user-defined metadata for the specified blob as one or more name-value pairs. - * - * If no option provided, or no metadata defined in the parameter, the blob - * metadata will be removed. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata + * Frees the specified pages from the page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/put-page * - * @param metadata - Replace existing metadata with this value. - * If no value provided the existing metadata will be removed. - * @param options - Optional options to Set Metadata operation. + * @param offset - Starting byte position of the pages to clear. + * @param count - Number of bytes to clear. + * @param options - Options to the Page Blob Clear Pages operation. + * @returns Response data for the Page Blob Clear Pages operation. */ - async setMetadata(metadata2, options = {}) { + async clearPages(offset = 0, count, options = {}) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata2, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -84992,19 +86183,20 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Sets tags on the underlying blob. - * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. - * Valid tag key and value characters include lower and upper case letters, digits (0-9), - * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). + * Returns the list of valid page ranges for a page blob or snapshot of a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param tags - - * @param options - + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns Response data for the Page Blob Get Ranges operation. */ - async setTags(tags2, options = {}) { + async getPageRanges(offset = 0, count, options = {}) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-setTags", options); + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); try { - return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags2) })); + return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))).then(rangeResponseFromModel); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85016,17 +86208,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Gets the tags associated with the underlying blob. + * getPageRangesSegment returns a single segment of page ranges starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param options - + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to PageBlob Get Page Ranges Segment operation. */ - async getTags(options = {}) { + async listPageRangesSegment(offset = 0, count, marker2, options = {}) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-getTags", options); + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); try { - const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); - return wrappedResponse; + return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker2, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85038,138 +86235,169 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Get a {@link BlobLeaseClient} that manages leases on the blob. + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} * - * @param proposeLeaseId - Initial proposed lease Id. - * @returns A new BlobLeaseClient object for managing leases on the blob. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to List Page Ranges operation. */ - getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient(this, proposeLeaseId); + listPageRangeItemSegments(offset = 0, count, marker2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker2 || marker2 === void 0) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker2, options)); + marker2 = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker2); + } + }, "listPageRangeItemSegments_1")); } /** - * Creates a read-only snapshot of a blob. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects * - * @param options - Optional options to the Blob Create Snapshot operation. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to List Page Ranges operation. */ - async createSnapshot(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + listPageRangeItems(offset = 0, count, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeItems_1() { + var e_1, _a; + let marker2; + try { + for (var _b = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const getPageRangesSegment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } catch (e_1_1) { + e_1 = { error: e_1_1 }; + } finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } finally { + if (e_1) throw e_1.error; + } + } + }, "listPageRangeItems_1")); } /** - * Asynchronously copies a blob to a destination within the storage account. - * This method returns a long running operation poller that allows you to wait - * indefinitely until the copy is completed. - * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. - * Note that the onProgress callback will not be invoked if the operation completes in the first - * request, and attempting to cancel a completed copy will result in an error being thrown. + * Returns an async iterable iterator to list of page ranges for a page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * In version 2012-02-12 and later, the source for a Copy Blob operation can be - * a committed blob in any Azure storage account. - * Beginning with version 2015-02-21, the source for a Copy Blob operation can be - * an Azure file in any Azure storage account. - * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob - * operation to copy from another storage account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * .byPage() returns an async iterable iterator to list of page ranges for a page blob. * - * Example using automatic polling: + * Example using `for await` syntax: * * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * const result = await copyPoller.pollUntilDone(); + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRanges()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } * ``` * - * Example using manual polling: + * Example using `iter.next()`: * * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * while (!poller.isDone()) { - * await poller.poll(); + * let i = 1; + * let iter = pageBlobClient.listPageRanges(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); * } - * const result = copyPoller.getResult(); * ``` * - * Example using progress updates: + * Example using `byPage()`: * * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url', { - * onProgress(state) { - * console.log(`Progress: ${state.copyProgress}`); + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } - * }); - * const result = await copyPoller.pollUntilDone(); + * } * ``` * - * Example using a changing polling interval (default 15 seconds): + * Example using paging with a marker: * * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url', { - * intervalInMs: 1000 // poll blob every 1 second for copy progress - * }); - * const result = await copyPoller.pollUntilDone(); - * ``` + * let i = 1; + * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; * - * Example using copy cancellation: + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } * - * ```js - * const copyPoller = await blobClient.beginCopyFromURL('url'); - * // cancel operation after starting it. - * try { - * await copyPoller.cancelOperation(); - * // calls to get the result now throw PollerCancelledError - * await copyPoller.getResult(); - * } catch (err) { - * if (err.name === 'PollerCancelledError') { - * console.log('The copy was cancelled.'); - * } + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * ``` - * - * @param copySource - url to the source Azure Blob/File. - * @param options - Optional options to the Blob Start Copy From URL operation. + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. */ - async beginCopyFromURL(copySource2, options = {}) { - const client = { - abortCopyFromURL: /* @__PURE__ */ __name((...args) => this.abortCopyFromURL(...args), "abortCopyFromURL"), - getProperties: /* @__PURE__ */ __name((...args) => this.getProperties(...args), "getProperties"), - startCopyFromURL: /* @__PURE__ */ __name((...args) => this.startCopyFromURL(...args), "startCopyFromURL") + listPageRanges(offset = 0, count, options = {}) { + options.conditions = options.conditions || {}; + const iter = this.listPageRangeItems(offset, count, options); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: /* @__PURE__ */ __name((settings = {}) => { + return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, "byPage") }; - const poller = new BlobBeginCopyFromUrlPoller({ - blobClient: client, - copySource: copySource2, - intervalInMs: options.intervalInMs, - onProgress: options.onProgress, - resumeFrom: options.resumeFrom, - startCopyFromURLOptions: options - }); - await poller.poll(); - return poller; } /** - * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero - * length and full metadata. Version 2012-02-12 and newer. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob + * Gets the collection of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param copyId - Id of the Copy From URL operation. - * @param options - Optional options to the Blob Abort Copy From URL operation. + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. */ - async abortCopyFromURL(copyId2, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); + async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); try { - return await this.blobContext.abortCopyFromURL(copyId2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))).then(rangeResponseFromModel); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85181,25 +86409,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not - * return a response until the copy is complete. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url + * getPageRangesDiffSegment returns a single segment of page ranges starting from the + * specified Marker for difference between previous snapshot and the target page blob. + * Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call getPageRangesDiffSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * - * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication - * @param options - + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - async syncCopyFromURL(copySource2, options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker2, options) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); try { - return await this.blobContext.copyFromURL(copySource2, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ + offset, + count + }), marker: marker2, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85211,21 +86441,173 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Sets the tier on a blob. The operation is allowed on a page blob in a premium - * storage account and on a block blob in a blob storage account (locally redundant - * storage only). A premium page blob's tier determines the allowed size, IOPS, - * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive - * storage type. This operation does not update the blob's ETag. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier + * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} * - * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. - * @param options - Optional options to the Blob Set Tier operation. + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param marker - A string value that identifies the portion of + * the get of page ranges to be returned with the next getting operation. The + * operation returns the ContinuationToken value within the response body if the + * getting operation did not return all page ranges remaining within the current page. + * The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of get + * items. The marker value is opaque to the client. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - async setAccessTier(tier2, options = {}) { + listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker2, options) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeDiffItemSegments_1() { + let getPageRangeItemSegmentsResponse; + if (!!marker2 || marker2 === void 0) { + do { + getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker2, options)); + marker2 = getPageRangeItemSegmentsResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + } while (marker2); + } + }, "listPageRangeDiffItemSegments_1")); + } + /** + * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + */ + listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeDiffItems_1() { + var e_2, _a; + let marker2; + try { + for (var _b = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const getPageRangesSegment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + } + } catch (e_2_1) { + e_2 = { error: e_2_1 }; + } finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } finally { + if (e_2) throw e_2.error; + } + } + }, "listPageRangeDiffItems_1")); + } + /** + * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * + * Example using `for await` syntax: + * + * ```js + * // Get the pageBlobClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * let i = 1; + * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * let iter = pageBlobClient.listPageRangesDiff(); + * let pageRangeItem = await iter.next(); + * while (!pageRangeItem.done) { + * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); + * pageRangeItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 page ranges + * for (const pageRange of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * + * // Passing next marker as continuationToken + * + * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 page ranges + * for (const blob of response) { + * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * } + * ``` + * @param offset - Starting byte position of the page ranges. + * @param count - Number of bytes to get. + * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Ranges operation. + * @returns An asyncIterableIterator that supports paging. + */ + listPageRangesDiff(offset, count, prevSnapshot, options = {}) { + options.conditions = options.conditions || {}; + const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: /* @__PURE__ */ __name((settings = {}) => { + return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + }, "byPage") + }; + } + /** + * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. + * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * + * @param offset - Starting byte position of the page blob + * @param count - Number of bytes to get ranges diff. + * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. + * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @returns Response data for the Page Blob Get Page Range Diff operation. + */ + async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl2, options = {}) { var _a; - const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); try { - return await this.blobContext.setTier(toAccessTier(tier2), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl: prevSnapshotUrl2, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))).then(rangeResponseFromModel); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85236,82 +86618,20 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; span.end(); } } - async downloadToBuffer(param1, param2, param3, param4 = {}) { - let buffer; - let offset = 0; - let count = 0; - let options = param4; - if (param1 instanceof Buffer) { - buffer = param1; - offset = param2 || 0; - count = typeof param3 === "number" ? param3 : 0; - } else { - offset = typeof param1 === "number" ? param1 : 0; - count = typeof param2 === "number" ? param2 : 0; - options = param3 || {}; - } - const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); + /** + * Resizes the page blob to the specified size (which must be a multiple of 512). + * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties + * + * @param size - Target size + * @param options - Options to the Page Blob Resize operation. + * @returns Response data for the Page Blob Resize operation. + */ + async resize(size, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); try { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0) { - throw new RangeError("blockSize option must be >= 0"); - } - if (options.blockSize === 0) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - if (offset < 0) { - throw new RangeError("offset option must be >= 0"); - } - if (count && count <= 0) { - throw new RangeError("count option must be greater than 0"); - } - if (!options.conditions) { - options.conditions = {}; - } - if (!count) { - const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - count = response.contentLength - offset; - if (count < 0) { - throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); - } - } - if (!buffer) { - try { - buffer = Buffer.alloc(count); - } catch (error) { - throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile". ${error.message}`); - } - } - if (buffer.length < count) { - throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); - } - let transferProgress = 0; - const batch = new Batch(options.concurrency); - for (let off = offset; off < offset + count; off = off + options.blockSize) { - batch.addOperation(async () => { - let chunkEnd = offset + count; - if (off + options.blockSize < chunkEnd) { - chunkEnd = off + options.blockSize; - } - const response = await this.download(off, chunkEnd - off, { - abortSignal: options.abortSignal, - conditions: options.conditions, - maxRetryRequests: options.maxRetryRequestsPerBlock, - customerProvidedKey: options.customerProvidedKey, - tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) - }); - const stream2 = response.readableStreamBody; - await streamToBuffer(stream2, buffer, off - offset, chunkEnd - offset); - transferProgress += chunkEnd - off; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }); - } - await batch.do(); - return buffer; + return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85323,30 +86643,20 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Downloads an Azure Blob to a local file. - * Fails if the the given file path already exits. - * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. + * Sets a page blob's sequence number. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties * - * @param filePath - - * @param offset - From which position of the block blob to download. - * @param count - How much data to be downloaded. Will download to the end when passing undefined. - * @param options - Options to Blob download options. - * @returns The response data for blob download operation, - * but with readableStreamBody set to undefined since its - * content is already read and written into a local file - * at the specified path. + * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. + * @param sequenceNumber - Required if sequenceNumberAction is max or update + * @param options - Options to the Page Blob Update Sequence Number operation. + * @returns Response data for the Page Blob Update Sequence Number operation. */ - async downloadToFile(filePath, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); + async updateSequenceNumber(sequenceNumberAction2, sequenceNumber, options = {}) { + var _a; + options.conditions = options.conditions || {}; + const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); try { - const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - if (response.readableStreamBody) { - await readStreamToLocalFile(response.readableStreamBody, filePath); - } - response.blobDownloadStream = void 0; - return response; + return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction2, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85357,61 +86667,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; span.end(); } } - getBlobAndContainerNamesFromUrl() { - let containerName; - let blobName; - try { - const parsedUrl = coreHttp.URLBuilder.parse(this.url); - if (parsedUrl.getHost().split(".")[1] === "blob") { - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); - containerName = pathComponents[1]; - blobName = pathComponents[3]; - } else if (isIpEndpointStyle(parsedUrl)) { - const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); - containerName = pathComponents[2]; - blobName = pathComponents[4]; - } else { - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); - containerName = pathComponents[1]; - blobName = pathComponents[3]; - } - containerName = decodeURIComponent(containerName); - blobName = decodeURIComponent(blobName); - blobName = blobName.replace(/\\/g, "/"); - if (!containerName) { - throw new Error("Provided containerName is invalid."); - } - return { blobName, containerName }; - } catch (error) { - throw new Error("Unable to extract blobName and containerName with provided information."); - } - } /** - * Asynchronously copies a blob to a destination within the storage account. - * In version 2012-02-12 and later, the source for a Copy Blob operation can be - * a committed blob in any Azure storage account. - * Beginning with version 2015-02-21, the source for a Copy Blob operation can be - * an Azure file in any Azure storage account. - * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob - * operation to copy from another storage account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob + * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. + * The snapshot is copied such that only the differential changes between the previously + * copied snapshot are transferred to the destination. + * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. + * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob + * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots * - * @param copySource - url to the source Azure Blob/File. - * @param options - Optional options to the Blob Start Copy From URL operation. + * @param copySource - Specifies the name of the source page blob snapshot. For example, + * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= + * @param options - Options to the Page Blob Copy Incremental operation. + * @returns Response data for the Page Blob Copy Incremental operation. */ - async startCopyFromURL(copySource2, options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; + async startCopyIncremental(copySource2, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); try { - return await this.blobContext.startCopyFromURL(copySource2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions.tagConditions - }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.pageBlobContext.copyIncremental(copySource2, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85422,54 +86695,247 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; span.end(); } } + }; + async function getBodyAsText(batchResponse) { + let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); + const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); + buffer = buffer.slice(0, responseLength); + return buffer.toString(); + } + __name(getBodyAsText, "getBodyAsText"); + function utf8ByteLength(str) { + return Buffer.byteLength(str); + } + __name(utf8ByteLength, "utf8ByteLength"); + var HTTP_HEADER_DELIMITER = ": "; + var SPACE_DELIMITER = " "; + var NOT_FOUND = -1; + var BatchResponseParser = class { + static { + __name(this, "BatchResponseParser"); + } + constructor(batchResponse, subRequests) { + if (!batchResponse || !batchResponse.contentType) { + throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); + } + if (!subRequests || subRequests.size === 0) { + throw new RangeError("Invalid state: subRequests is not provided or size is 0."); + } + this.batchResponse = batchResponse; + this.subRequests = subRequests; + this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; + this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; + this.batchResponseEnding = `--${this.responseBatchBoundary}--`; + } + // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response + async parseBatchResponse() { + if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { + throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); + } + const responseBodyAsText = await getBodyAsText(this.batchResponse); + const subResponses = responseBodyAsText.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1); + const subResponseCount = subResponses.length; + if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { + throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); + } + const deserializedSubResponses = new Array(subResponseCount); + let subResponsesSucceededCount = 0; + let subResponsesFailedCount = 0; + for (let index = 0; index < subResponseCount; index++) { + const subResponse = subResponses[index]; + const deserializedSubResponse = {}; + deserializedSubResponse.headers = new coreHttp.HttpHeaders(); + const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); + let subRespHeaderStartFound = false; + let subRespHeaderEndFound = false; + let subRespFailed = false; + let contentId = NOT_FOUND; + for (const responseLine of responseLines) { + if (!subRespHeaderStartFound) { + if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { + contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); + } + if (responseLine.startsWith(HTTP_VERSION_1_1)) { + subRespHeaderStartFound = true; + const tokens = responseLine.split(SPACE_DELIMITER); + deserializedSubResponse.status = parseInt(tokens[1]); + deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); + } + continue; + } + if (responseLine.trim() === "") { + if (!subRespHeaderEndFound) { + subRespHeaderEndFound = true; + } + continue; + } + if (!subRespHeaderEndFound) { + if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { + throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); + } + const tokens = responseLine.split(HTTP_HEADER_DELIMITER); + deserializedSubResponse.headers.set(tokens[0], tokens[1]); + if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { + deserializedSubResponse.errorCode = tokens[1]; + subRespFailed = true; + } + } else { + if (!deserializedSubResponse.bodyAsText) { + deserializedSubResponse.bodyAsText = ""; + } + deserializedSubResponse.bodyAsText += responseLine; + } + } + if (contentId !== NOT_FOUND && Number.isInteger(contentId) && contentId >= 0 && contentId < this.subRequests.size && deserializedSubResponses[contentId] === void 0) { + deserializedSubResponse._request = this.subRequests.get(contentId); + deserializedSubResponses[contentId] = deserializedSubResponse; + } else { + logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + } + if (subRespFailed) { + subResponsesFailedCount++; + } else { + subResponsesSucceededCount++; + } + } + return { + subResponses: deserializedSubResponses, + subResponsesSucceededCount, + subResponsesFailedCount + }; + } + }; + var MutexLockStatus; + (function(MutexLockStatus2) { + MutexLockStatus2[MutexLockStatus2["LOCKED"] = 0] = "LOCKED"; + MutexLockStatus2[MutexLockStatus2["UNLOCKED"] = 1] = "UNLOCKED"; + })(MutexLockStatus || (MutexLockStatus = {})); + var Mutex = class { + static { + __name(this, "Mutex"); + } + /** + * Lock for a specific key. If the lock has been acquired by another customer, then + * will wait until getting the lock. + * + * @param key - lock key + */ + static async lock(key) { + return new Promise((resolve) => { + if (this.keys[key] === void 0 || this.keys[key] === MutexLockStatus.UNLOCKED) { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + } else { + this.onUnlockEvent(key, () => { + this.keys[key] = MutexLockStatus.LOCKED; + resolve(); + }); + } + }); + } /** - * Only available for BlobClient constructed with a shared key credential. - * - * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * Unlock a key. * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + * @param key - */ - generateSasUrl(options) { + static async unlock(key) { return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); + if (this.keys[key] === MutexLockStatus.LOCKED) { + this.emitUnlockEvent(key); } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); + delete this.keys[key]; + resolve(); }); } + static onUnlockEvent(key, handler) { + if (this.listeners[key] === void 0) { + this.listeners[key] = [handler]; + } else { + this.listeners[key].push(handler); + } + } + static emitUnlockEvent(key) { + if (this.listeners[key] !== void 0 && this.listeners[key].length > 0) { + const handler = this.listeners[key].shift(); + setImmediate(() => { + handler.call(this); + }); + } + } + }; + Mutex.keys = {}; + Mutex.listeners = {}; + var BlobBatch = class { + static { + __name(this, "BlobBatch"); + } + constructor() { + this.batch = "batch"; + this.batchRequest = new InnerBatchRequest(); + } /** - * Delete the immutablility policy on the blob. - * - * @param options - Optional options to delete immutability policy on the blob. + * Get the value of Content-Type for a batch request. + * The value must be multipart/mixed with a batch boundary. + * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 */ - async deleteImmutabilityPolicy(options) { - const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); + getMultiPartContentType() { + return this.batchRequest.getMultipartContentType(); + } + /** + * Get assembled HTTP request body for sub requests. + */ + getHttpRequestBody() { + return this.batchRequest.getHttpRequestBody(); + } + /** + * Get sub requests that are added into the batch request. + */ + getSubRequests() { + return this.batchRequest.getSubRequests(); + } + async addSubRequestInternal(subRequest, assembleSubRequestFunc) { + await Mutex.lock(this.batch); try { - return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; + this.batchRequest.preAddSubRequest(subRequest); + await assembleSubRequestFunc(); + this.batchRequest.postAddSubRequest(subRequest); } finally { - span.end(); + await Mutex.unlock(this.batch); } } - /** - * Set immutablility policy on the blob. - * - * @param options - Optional options to set immutability policy on the blob. - */ - async setImmutabilityPolicy(immutabilityPolicy, options) { - const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); + setBatchType(batchType) { + if (!this.batchType) { + this.batchType = batchType; + } + if (this.batchType !== batchType) { + throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + } + } + async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { + let url2; + let credential; + if (typeof urlOrBlobClient === "string" && (coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrOptions))) { + url2 = urlOrBlobClient; + credential = credentialOrOptions; + } else if (urlOrBlobClient instanceof BlobClient) { + url2 = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + options = credentialOrOptions; + } else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); try { - return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); + this.setBatchType("delete"); + await this.addSubRequestInternal({ + url: url2, + credential + }, async () => { + await new BlobClient(url2, this.batchRequest.createPipeline(credential)).delete(updatedOptions); + }); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85480,15 +86946,34 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; span.end(); } } - /** - * Set legal hold on the blob. - * - * @param options - Optional options to set legal hold on the blob. - */ - async setLegalHold(legalHoldEnabled, options) { - const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); + async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { + let url2; + let credential; + let tier2; + if (typeof urlOrBlobClient === "string" && (coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrTier))) { + url2 = urlOrBlobClient; + credential = credentialOrTier; + tier2 = tierOrOptions; + } else if (urlOrBlobClient instanceof BlobClient) { + url2 = urlOrBlobClient.url; + credential = urlOrBlobClient.credential; + tier2 = credentialOrTier; + options = tierOrOptions; + } else { + throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + } + if (!options) { + options = {}; + } + const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); try { - return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + this.setBatchType("setAccessTier"); + await this.addSubRequestInternal({ + url: url2, + credential + }, async () => { + await new BlobClient(url2, this.batchRequest.createPipeline(credential)).setAccessTier(tier2, updatedOptions); + }); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85500,215 +86985,242 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var AppendBlobClient = class _AppendBlobClient extends BlobClient { + var InnerBatchRequest = class { static { - __name(this, "AppendBlobClient"); + __name(this, "InnerBatchRequest"); } - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - let pipeline; - let url2; - options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); - } - super(url2, pipeline); - this.appendBlobContext = new AppendBlob(this.storageClientContext); + constructor() { + this.operationCount = 0; + this.body = ""; + const tempGuid = coreHttp.generateUuid(); + this.boundary = `batch_${tempGuid}`; + this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; + this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; + this.batchRequestEnding = `--${this.boundary}--`; + this.subRequests = /* @__PURE__ */ new Map(); } /** - * Creates a new AppendBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. - * - * @param snapshot - The snapshot timestamp. - * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. + * Create pipeline to assemble sub requests. The idea here is to use existing + * credential and serialization/deserialization components, with additional policies to + * filter unnecessary headers, assemble sub requests into request's body + * and intercept request from going to wire. + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. */ - withSnapshot(snapshot2) { - return new _AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + createPipeline(credential) { + const isAnonymousCreds = credential instanceof AnonymousCredential; + const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); + const factories = new Array(policyFactoryLength); + factories[0] = coreHttp.deserializationPolicy(); + factories[1] = new BatchHeaderFilterPolicyFactory(); + if (!isAnonymousCreds) { + factories[2] = coreHttp.isTokenCredential(credential) ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) : credential; + } + factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); + return new Pipeline(factories, {}); } - /** - * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob - * - * @param options - Options to the Append Block Create operation. - * - * - * Example usage: - * - * ```js - * const appendBlobClient = containerClient.getAppendBlobClient(""); - * await appendBlobClient.create(); - * ``` - */ - async create(options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + appendSubRequestToBody(request) { + this.body += [ + this.subRequestPrefix, + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, + "", + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` + // sub request start line with method + ].join(HTTP_LINE_ENDING); + for (const header of request.headers.headersArray()) { + this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; } + this.body += HTTP_LINE_ENDING; } - /** - * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. - * If the blob with the same name already exists, the content of the existing blob will remain unchanged. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob - * - * @param options - - */ - async createIfNotExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); - const conditions = { ifNoneMatch: ETagAny }; - try { - const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + preAddSubRequest(subRequest) { + if (this.operationCount >= BATCH_MAX_REQUEST) { + throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + } + const path2 = getURLPath(subRequest.url); + if (!path2 || path2 === "") { + throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + } + } + postAddSubRequest(subRequest) { + this.subRequests.set(this.operationCount, subRequest); + this.operationCount++; + } + // Return the http request body with assembling the ending line to the sub request body. + getHttpRequestBody() { + return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; + } + getMultipartContentType() { + return this.multipartContentType; + } + getSubRequests() { + return this.subRequests; + } + }; + var BatchRequestAssemblePolicy = class extends coreHttp.BaseRequestPolicy { + static { + __name(this, "BatchRequestAssemblePolicy"); + } + constructor(batchRequest, nextPolicy, options) { + super(nextPolicy, options); + this.dummyResponse = { + request: new coreHttp.WebResource(), + status: 200, + headers: new coreHttp.HttpHeaders() + }; + this.batchRequest = batchRequest; + } + async sendRequest(request) { + await this.batchRequest.appendSubRequestToBody(request); + return this.dummyResponse; + } + }; + var BatchRequestAssemblePolicyFactory = class { + static { + __name(this, "BatchRequestAssemblePolicyFactory"); + } + constructor(batchRequest) { + this.batchRequest = batchRequest; + } + create(nextPolicy, options) { + return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + } + }; + var BatchHeaderFilterPolicy = class extends coreHttp.BaseRequestPolicy { + static { + __name(this, "BatchHeaderFilterPolicy"); + } + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(request) { + let xMsHeaderName = ""; + for (const header of request.headers.headersArray()) { + if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = header.name; } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + } + if (xMsHeaderName !== "") { + request.headers.remove(xMsHeaderName); + } + return this._nextPolicy.sendRequest(request); + } + }; + var BatchHeaderFilterPolicyFactory = class { + static { + __name(this, "BatchHeaderFilterPolicyFactory"); + } + create(nextPolicy, options) { + return new BatchHeaderFilterPolicy(nextPolicy, options); + } + }; + var BlobBatchClient = class { + static { + __name(this, "BlobBatchClient"); + } + constructor(url2, credentialOrPipeline, options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } else if (!credentialOrPipeline) { + pipeline = newPipeline(new AnonymousCredential(), options); + } else { + pipeline = newPipeline(credentialOrPipeline, options); + } + const storageClientContext = new StorageClientContext(url2, pipeline.toServiceClientOptions()); + const path2 = getURLPath(url2); + if (path2 && path2 !== "/") { + this.serviceOrContainerContext = new Container(storageClientContext); + } else { + this.serviceOrContainerContext = new Service(storageClientContext); } } /** - * Seals the append blob, making it read only. - * - * @param options - + * Creates a {@link BlobBatch}. + * A BlobBatch represents an aggregated set of operations on blobs. */ - async seal(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); - options.conditions = options.conditions || {}; - try { - return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + createBatch() { + return new BlobBatch(); + } + async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); + } else { + await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); + } + } + return this.submitBatch(batch); + } + async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { + const batch = new BlobBatch(); + for (const urlOrBlobClient of urlsOrBlobClients) { + if (typeof urlOrBlobClient === "string") { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); + } else { + await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); + } } + return this.submitBatch(batch); } /** - * Commits a new block of data to the end of the existing append blob. - * @see https://docs.microsoft.com/rest/api/storageservices/append-block - * - * @param body - Data to be appended. - * @param contentLength - Length of the body in bytes. - * @param options - Options to the Append Block operation. + * Submit batch request which consists of multiple subrequests. * + * Get `blobBatchClient` and other details before running the snippets. + * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` * * Example usage: * * ```js - * const content = "Hello World!"; + * let batchRequest = new BlobBatch(); + * await batchRequest.deleteBlob(urlInString0, credential0); + * await batchRequest.deleteBlob(urlInString1, credential1, { + * deleteSnapshots: "include" + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); + * ``` * - * // Create a new append blob and append data to the blob. - * const newAppendBlobClient = containerClient.getAppendBlobClient(""); - * await newAppendBlobClient.create(); - * await newAppendBlobClient.appendBlock(content, content.length); + * Example using a lease: * - * // Append data to an existing append blob. - * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); - * await existingAppendBlobClient.appendBlock(content, content.length); + * ```js + * let batchRequest = new BlobBatch(); + * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); + * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { + * conditions: { leaseId: leaseId } + * }); + * const batchResp = await blobBatchClient.submitBatch(batchRequest); + * console.log(batchResp.subResponsesSucceededCount); * ``` - */ - async appendBlock(body2, contentLength2, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); - options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlock(contentLength2, body2, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * The Append Block operation commits a new block of data to the end of an existing append blob - * where the contents are read from a source url. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url * - * @param sourceURL - - * The url to the blob that will be the source of the copy. A source blob in the same storage account can - * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob - * must either be public or must be authenticated via a shared access signature. If the source blob is - * public, no authentication is required to perform the operation. - * @param sourceOffset - Offset in source to be appended - * @param count - Number of bytes to be appended as a block + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @param batchRequest - A set of Delete or SetTier operations. * @param options - */ - async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; + async submitBatch(batchRequest, options = {}) { + if (!batchRequest || batchRequest.getSubRequests().size === 0) { + throw new RangeError("Batch request should contain one or more sub requests."); + } + const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + const batchRequestBody = batchRequest.getHttpRequestBody(); + const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); + const responseSummary = await batchResponseParser.parseBatchResponse(); + const res = { + _response: rawBatchResponse._response, + contentType: rawBatchResponse.contentType, + errorCode: rawBatchResponse.errorCode, + requestId: rawBatchResponse.requestId, + clientRequestId: rawBatchResponse.clientRequestId, + version: rawBatchResponse.version, + subResponses: responseSummary.subResponses, + subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, + subResponsesFailedCount: responseSummary.subResponsesFailedCount + }; + return res; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85720,11 +87232,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } }; - var BlockBlobClient = class _BlockBlobClient extends BlobClient { + var ContainerClient = class extends StorageClient { static { - __name(this, "BlockBlobClient"); + __name(this, "ContainerClient"); } - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { + constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { let pipeline; let url2; options = options || {}; @@ -85733,22 +87245,17 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; pipeline = credentialOrPipelineOrContainerName; } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { url2 = urlOrConnectionString; - options = blobNameOrOptions; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { url2 = urlOrConnectionString; - if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { - options = blobNameOrOptions; - } pipeline = newPipeline(new AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { + } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); + url2 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); if (!options.proxyOptions) { options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); } @@ -85757,125 +87264,45 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; + url2 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; pipeline = newPipeline(new AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); + throw new Error("Expecting non-empty strings for containerName parameter"); } super(url2, pipeline); - this.blockBlobContext = new BlockBlob(this.storageClientContext); - this._blobContext = new Blob$1(this.storageClientContext); - } - /** - * Creates a new BlockBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a URL to the base blob. - * - * @param snapshot - The snapshot timestamp. - * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. - */ - withSnapshot(snapshot2) { - return new _BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); + this._containerName = this.getContainerNameFromUrl(); + this.containerContext = new Container(this.storageClientContext); } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Quick query for a JSON or CSV formatted blob. - * - * Example usage (Node.js): - * - * ```js - * // Query and convert a blob to a string - * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); - * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); - * console.log("Query blob content:", downloaded); - * - * async function streamToBuffer(readableStream) { - * return new Promise((resolve, reject) => { - * const chunks = []; - * readableStream.on("data", (data) => { - * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); - * }); - * readableStream.on("end", () => { - * resolve(Buffer.concat(chunks)); - * }); - * readableStream.on("error", reject); - * }); - * } - * ``` - * - * @param query - - * @param options - + * The name of the container. */ - async query(query, options = {}) { - var _a; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); - try { - if (!coreHttp.isNode) { - throw new Error("This operation currently is only supported in Node.js."); - } - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { - queryType: "SQL", - expression: query, - inputSerialization: toQuerySerialization(options.inputTextConfiguration), - outputSerialization: toQuerySerialization(options.outputTextConfiguration) - }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); - return new BlobQueryResponse(response, { - abortSignal: options.abortSignal, - onProgress: options.onProgress, - onError: options.onError - }); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + get containerName() { + return this._containerName; } /** - * Creates a new block blob, or updates the content of an existing block blob. - * Updating an existing block blob overwrites any existing metadata on the blob. - * Partial updates are not supported; the content of the existing blob is - * overwritten with the new content. To perform a partial update of a block blob's, - * use {@link stageBlock} and {@link commitBlockList}. - * - * This is a non-parallel uploading method, please use {@link uploadFile}, - * {@link uploadStream} or {@link uploadBrowserData} for better performance - * with concurrency uploading. + * Creates a new container under the specified account. If the container with + * the same name already exists, the operation fails. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * @param options - Options to Container Create operation. * - * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function - * which returns a new Readable stream whose offset is from data source beginning. - * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a - * string including non non-Base64/Hex-encoded characters. - * @param options - Options to the Block Blob Upload operation. - * @returns Response data for the Block Blob Upload operation. * * Example usage: * * ```js - * const content = "Hello world!"; - * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * const containerClient = blobServiceClient.getContainerClient(""); + * const createContainerResponse = await containerClient.create(); + * console.log("Container was created successfully", createContainerResponse.requestId); * ``` */ - async upload(body2, contentLength2, options = {}) { - var _a, _b, _c; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); + async create(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-create", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.upload(contentLength2, body2, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -85887,37 +87314,27 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Creates a new Block Blob where the contents of the blob are read from a given URL. - * This API is supported beginning with the 2020-04-08 version. Partial updates - * are not supported with Put Blob from URL; the content of an existing blob is overwritten with - * the content of the new blob. To perform partial updates to a block blob’s contents using a - * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. + * Creates a new container under the specified account. If the container with + * the same name already exists, it is not changed. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container + * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * - * @param sourceURL - Specifies the URL of the blob. The value - * may be a URL of up to 2 KB in length that specifies a blob. - * The value should be URL-encoded as it would appear - * in a request URI. The source blob must either be public - * or must be authenticated via a shared access signature. - * If the source blob is public, no authentication is required - * to perform the operation. Here are some examples of source object URLs: - * - https://myaccount.blob.core.windows.net/mycontainer/myblob - * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param options - Optional parameters. + * @param options - */ - async syncUploadFromURL(sourceURL, options = {}) { - var _a, _b, _c, _d, _e; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); + async createIfNotExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, - sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, - sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, - sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, - sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions - }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when creating a container only if it does not already exist." + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, message: e.message @@ -85928,89 +87345,30 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Uploads the specified block to the block blob's "staging area" to be later - * committed by a call to commitBlockList. - * @see https://docs.microsoft.com/rest/api/storageservices/put-block + * Returns true if the Azure container resource represented by this client exists; false otherwise. * - * @param blockId - A 64-byte value that is base64-encoded - * @param body - Data to upload to the staging area. - * @param contentLength - Number of bytes to upload. - * @param options - Options to the Block Blob Stage Block operation. - * @returns Response data for the Block Blob Stage Block operation. - */ - async stageBlock(blockId2, body2, contentLength2, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlock(blockId2, contentLength2, body2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { - onUploadProgress: options.onProgress - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * The Stage Block From URL operation creates a new block to be committed as part - * of a blob where the contents are read from a URL. - * This API is available starting in version 2018-03-28. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url + * NOTE: use this function with care since an existing container might be deleted by other clients or + * applications. Vice versa new containers with the same name might be added by other clients or + * applications after this function completes. * - * @param blockId - A 64-byte value that is base64-encoded - * @param sourceURL - Specifies the URL of the blob. The value - * may be a URL of up to 2 KB in length that specifies a blob. - * The value should be URL-encoded as it would appear - * in a request URI. The source blob must either be public - * or must be authenticated via a shared access signature. - * If the source blob is public, no authentication is required - * to perform the operation. Here are some examples of source object URLs: - * - https://myaccount.blob.core.windows.net/mycontainer/myblob - * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param offset - From which position of the blob to download, greater than or equal to 0 - * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined - * @param options - Options to the Block Blob Stage Block From URL operation. - * @returns Response data for the Block Blob Stage Block From URL operation. + * @param options - */ - async stageBlockFromURL(blockId2, sourceURL, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); + async exists(options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-exists", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlockFromURL(blockId2, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? void 0 : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions }); - throw e; - } finally { - span.end(); - } - } - /** - * Writes a blob by specifying the list of block IDs that make up the blob. - * In order to be written as part of a blob, a block must have been successfully written - * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to - * update a blob by uploading only those blocks that have changed, then committing the new and existing - * blocks together. Any blocks not specified in the block list and permanently deleted. - * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list - * - * @param blocks - Array of 64-byte value that is base64-encoded - * @param options - Options to the Block Blob Commit Block List operation. - * @returns Response data for the Block Blob Commit Block List operation. - */ - async commitBlockList(blocks2, options = {}) { - var _a, _b, _c; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.commitBlockList({ latest: blocks2 }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + return true; } catch (e) { + if (e.statusCode === 404) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when checking container existence" + }); + return false; + } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, message: e.message @@ -86021,196 +87379,67 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Returns the list of blocks that have been uploaded as part of a block blob - * using the specified block list filter. - * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list + * Creates a {@link BlobClient} * - * @param listType - Specifies whether to return the list of committed blocks, - * the list of uncommitted blocks, or both lists together. - * @param options - Options to the Block Blob Get Block List operation. - * @returns Response data for the Block Blob Get Block List operation. + * @param blobName - A blob name + * @returns A new BlobClient object for the given blob name. */ - async getBlockList(listType2, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); - try { - const res = await this.blockBlobContext.getBlockList(listType2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - if (!res.committedBlocks) { - res.committedBlocks = []; - } - if (!res.uncommittedBlocks) { - res.uncommittedBlocks = []; - } - return res; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + getBlobClient(blobName) { + return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } - // High level functions /** - * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. - * - * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is - * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} - * to commit the block list. - * - * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is - * `blobContentType`, enabling the browser to provide - * functionality based on file type. + * Creates an {@link AppendBlobClient} * - * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView - * @param options - + * @param blobName - An append blob name */ - async uploadData(data, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); - try { - if (coreHttp.isNode) { - let buffer; - if (data instanceof Buffer) { - buffer = data; - } else if (data instanceof ArrayBuffer) { - buffer = Buffer.from(data); - } else { - data = data; - buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); - } - return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); - } else { - const browserBlob = new Blob([data]); - return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + getAppendBlobClient(blobName) { + return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** - * ONLY AVAILABLE IN BROWSERS. + * Creates a {@link BlockBlobClient} * - * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. + * @param blobName - A block blob name * - * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call - * {@link commitBlockList} to commit the block list. * - * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is - * `blobContentType`, enabling the browser to provide - * functionality based on file type. + * Example usage: * - * @deprecated Use {@link uploadData} instead. + * ```js + * const content = "Hello world!"; * - * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView - * @param options - Options to upload browser data. - * @returns Response data for the Blob Upload operation. + * const blockBlobClient = containerClient.getBlockBlobClient(""); + * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * ``` */ - async uploadBrowserData(browserData, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); - try { - const browserBlob = new Blob([browserData]); - return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + getBlockBlobClient(blobName) { + return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** + * Creates a {@link PageBlobClient} * - * Uploads data to block blob. Requires a bodyFactory as the data source, - * which need to return a {@link HttpRequestBody} object with the offset and size provided. + * @param blobName - A page blob name + */ + getPageBlobClient(blobName) { + return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } + /** + * Returns all user-defined metadata and system properties for the specified + * container. The data returned does not include the container's list of blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties * - * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is - * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. - * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} - * to commit the block list. + * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if + * they originally contained uppercase characters. This differs from the metadata keys returned by + * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which + * will retain their original casing. * - * @param bodyFactory - - * @param size - size of the data to upload. - * @param options - Options to Upload to Block Blob operation. - * @returns Response data for the Blob Upload operation. - */ - async uploadSeekableInternal(bodyFactory, size, options = {}) { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { - throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); - } - if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { - options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; - } - if (options.maxSingleShotSize < 0 || options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { - throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); - } - if (options.blockSize === 0) { - if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`${size} is too larger to upload to a block blob.`); - } - if (size > options.maxSingleShotSize) { - options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); - if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - } - } - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); - try { - if (size <= options.maxSingleShotSize) { - return await this.upload(bodyFactory(0, size), size, updatedOptions); - } - const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; - if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { - throw new RangeError(`The buffer's size is too big or the BlockSize is too small;the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); - } - const blockList = []; - const blockIDPrefix = coreHttp.generateUuid(); - let transferProgress = 0; - const batch = new Batch(options.concurrency); - for (let i = 0; i < numBlocks; i++) { - batch.addOperation(async () => { - const blockID = generateBlockID(blockIDPrefix, i); - const start = options.blockSize * i; - const end = i === numBlocks - 1 ? size : start + options.blockSize; - const contentLength2 = end - start; - blockList.push(blockID); - await this.stageBlock(blockID, bodyFactory(start, contentLength2), contentLength2, { - abortSignal: options.abortSignal, - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - }); - transferProgress += contentLength2; - if (options.onProgress) { - options.onProgress({ - loadedBytes: transferProgress - }); - } - }); - } - await batch.do(); - return this.commitBlockList(blockList, updatedOptions); + * @param options - Options to Container Get Properties operation. + */ + async getProperties(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); + try { + return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86222,29 +87451,19 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Uploads a local file in blocks to a block blob. - * - * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. - * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList - * to commit the block list. + * Marks the specified container for deletion. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container * - * @param filePath - Full path of local file - * @param options - Options to Upload to Block Blob operation. - * @returns Response data for the Blob Upload operation. + * @param options - Options to Container Delete operation. */ - async uploadFile(filePath, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); + async delete(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-delete", options); try { - const size = (await fsStat(filePath)).size; - return await this.uploadSeekableInternal((offset, count) => { - return () => fsCreateReadStream(filePath, { - autoClose: true, - end: count ? offset + count - 1 : Infinity, - start: offset - }); - }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86256,61 +87475,26 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * Uploads a Node.js Readable stream into block blob. - * - * PERFORMANCE IMPROVEMENT TIPS: - * * Input stream highWaterMark is better to set a same value with bufferSize - * parameter, which will avoid Buffer.concat() operations. + * Marks the specified container for deletion if it exists. The container and any blobs + * contained within it are later deleted during garbage collection. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container * - * @param stream - Node.js Readable stream - * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB - * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, - * positive correlation with max uploading concurrency. Default value is 5 - * @param options - Options to Upload Stream to Block Blob operation. - * @returns Response data for the Blob Upload operation. + * @param options - Options to Container Delete operation. */ - async uploadStream(stream2, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { - if (!options.blobHTTPHeaders) { - options.blobHTTPHeaders = {}; - } - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); + async deleteIfExists(options = {}) { + var _a, _b; + const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); try { - let blockNum = 0; - const blockIDPrefix = coreHttp.generateUuid(); - let transferProgress = 0; - const blockList = []; - const scheduler = new BufferScheduler( - stream2, - bufferSize, - maxConcurrency, - async (body2, length) => { - const blockID = generateBlockID(blockIDPrefix, blockNum); - blockList.push(blockID); - blockNum++; - await this.stageBlock(blockID, body2, length, { - conditions: options.conditions, - encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions - }); - transferProgress += length; - if (options.onProgress) { - options.onProgress({ loadedBytes: transferProgress }); - } - }, - // concurrency should set a smaller value than maxConcurrency, which is helpful to - // reduce the possibility when a outgoing handler waits for stream data, in - // this situation, outgoing handlers are blocked. - // Outgoing queue shouldn't be empty. - Math.ceil(maxConcurrency / 4 * 3) - ); - await scheduler.do(); - return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: "Expected exception when deleting a container only if it exists." + }); + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, message: e.message @@ -86320,79 +87504,28 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; span.end(); } } - }; - var PageBlobClient = class _PageBlobClient extends BlobClient { - static { - __name(this, "PageBlobClient"); - } - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, options) { - let pipeline; - let url2; - options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - options = blobNameOrOptions; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { - const containerName = credentialOrPipelineOrContainerName; - const blobName = blobNameOrOptions; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); - } - } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); - } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } else { - throw new Error("Expecting non-empty strings for containerName and blobName parameters"); - } - super(url2, pipeline); - this.pageBlobContext = new PageBlob(this.storageClientContext); - } /** - * Creates a new PageBlobClient object identical to the source but with the - * specified snapshot timestamp. - * Provide "" will remove the snapshot and return a Client to the base blob. + * Sets one or more user-defined name-value pairs for the specified container. * - * @param snapshot - The snapshot timestamp. - * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. - */ - withSnapshot(snapshot2) { - return new _PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot2.length === 0 ? void 0 : snapshot2), this.pipeline); - } - /** - * Creates a page blob of the specified length. Call uploadPages to upload data - * data to a page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * If no option provided, or no metadata defined in the parameter, the container + * metadata will be removed. * - * @param size - size of the page blob. - * @param options - Options to the Page Blob Create operation. - * @returns Response data for the Page Blob Create operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * + * @param metadata - Replace existing metadata with this value. + * If no value provided the existing metadata will be removed. + * @param options - Options to Container Set Metadata operation. */ - async create(size, options = {}) { - var _a, _b, _c; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-create", options); + async setMetadata(metadata2, options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + if (options.conditions.ifUnmodifiedSince) { + throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); + } + const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata2, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86404,29 +87537,55 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Creates a page blob of the specified length. Call uploadPages to upload data - * data to a page blob. If the blob with the same name already exists, the content - * of the existing blob will remain unchanged. - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * Gets the permissions for the specified container. The permissions indicate + * whether container data may be accessed publicly. * - * @param size - size of the page blob. - * @param options - + * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. + * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * + * @param options - Options to Container Get Access Policy operation. */ - async createIfNotExists(size, options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); + async getAccessPolicy(options = {}) { + if (!options.conditions) { + options.conditions = {}; + } + const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); try { - const conditions = { ifNoneMatch: ETagAny }; - const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." + const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + const res = { + _response: response._response, + blobPublicAccess: response.blobPublicAccess, + date: response.date, + etag: response.etag, + errorCode: response.errorCode, + lastModified: response.lastModified, + requestId: response.requestId, + clientRequestId: response.clientRequestId, + signedIdentifiers: [], + version: response.version + }; + for (const identifier of response) { + let accessPolicy = void 0; + if (identifier.accessPolicy) { + accessPolicy = { + permissions: identifier.accessPolicy.permissions + }; + if (identifier.accessPolicy.expiresOn) { + accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); + } + if (identifier.accessPolicy.startsOn) { + accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); + } + } + res.signedIdentifiers.push({ + accessPolicy, + id: identifier.id }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } + return res; + } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, message: e.message @@ -86437,24 +87596,38 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. - * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * Sets the permissions for the specified container. The permissions indicate + * whether blobs in a container may be accessed publicly. * - * @param body - Data to upload - * @param offset - Offset of destination page blob - * @param count - Content length of the body, also number of bytes to be uploaded - * @param options - Options to the Page Blob Upload Pages operation. - * @returns Response data for the Page Blob Upload Pages operation. + * When you set permissions for a container, the existing permissions are replaced. + * If no access or containerAcl provided, the existing container ACL will be + * removed. + * + * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. + * During this interval, a shared access signature that is associated with the stored access policy will + * fail with status code 403 (Forbidden), until the access policy becomes active. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * + * @param access - The level of public access to data in the container. + * @param containerAcl - Array of elements each having a unique Id and details of the access policy. + * @param options - Options to Container Set Access Policy operation. */ - async uploadPages(body2, offset, count, options = {}) { - var _a; + async setAccessPolicy(access2, containerAcl2, options = {}) { options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); + const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPages(count, body2, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress - }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + const acl = []; + for (const identifier of containerAcl2 || []) { + acl.push({ + accessPolicy: { + expiresOn: identifier.accessPolicy.expiresOn ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) : "", + permissions: identifier.accessPolicy.permissions, + startsOn: identifier.accessPolicy.startsOn ? truncatedISO8061Date(identifier.accessPolicy.startsOn) : "" + }, + id: identifier.id + }); + } + return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access: access2, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86466,29 +87639,45 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * The Upload Pages operation writes a range of pages to a page blob where the - * contents are read from a URL. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url + * Get a {@link BlobLeaseClient} that manages leases on the container. * - * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication - * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob - * @param destOffset - Offset of destination page blob - * @param count - Number of bytes to be uploaded from source page blob - * @param options - + * @param proposeLeaseId - Initial proposed lease Id. + * @returns A new BlobLeaseClient object for managing leases on the container. */ - async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); + getBlobLeaseClient(proposeLeaseId) { + return new BlobLeaseClient(this, proposeLeaseId); + } + /** + * Creates a new block blob, or updates the content of an existing block blob. + * + * Updating an existing block blob overwrites any existing metadata on the blob. + * Partial updates are not supported; the content of the existing blob is + * overwritten with the new content. To perform a partial update of a block blob's, + * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. + * + * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, + * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better + * performance with concurrency uploading. + * + * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * + * @param blobName - Name of the block blob to create or update. + * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function + * which returns a new Readable stream whose offset is from data source beginning. + * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a + * string including non non-Base64/Hex-encoded characters. + * @param options - Options to configure the Block Blob Upload operation. + * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + */ + async uploadBlockBlob(blobName, body2, contentLength2, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); + const blockBlobClient = this.getBlockBlobClient(blobName); + const response = await blockBlobClient.upload(body2, contentLength2, updatedOptions); + return { + blockBlobClient, + response + }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86500,20 +87689,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Frees the specified pages from the page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/put-page + * Marks the specified blob or snapshot for deletion. The blob is later deleted + * during garbage collection. Note that in order to delete a blob, you must delete + * all of its snapshots. You can delete both at the same time with the Delete + * Blob operation. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * - * @param offset - Starting byte position of the pages to clear. - * @param count - Number of bytes to clear. - * @param options - Options to the Page Blob Clear Pages operation. - * @returns Response data for the Page Blob Clear Pages operation. + * @param blobName - + * @param options - Options to Blob Delete operation. + * @returns Block blob deletion response data. */ - async clearPages(offset = 0, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); + async deleteBlob(blobName, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); try { - return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); + let blobClient = this.getBlobClient(blobName); + if (options.versionId) { + blobClient = blobClient.withVersion(options.versionId); + } + return await blobClient.delete(updatedOptions); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86525,20 +87718,24 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Returns the list of valid page ranges for a page blob or snapshot of a page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * listBlobFlatSegment returns a single segment of blobs starting from the + * specified Marker. Use an empty Marker to start enumeration from the beginning. + * After getting a segment, process it, and then call listBlobsFlatSegment again + * (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns Response data for the Page Blob Get Ranges operation. + * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. + * @param options - Options to Container List Blob Flat Segment operation. */ - async getPageRanges(offset = 0, count, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); + async listBlobFlatSegment(marker2, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); try { - return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))).then(rangeResponseFromModel); + const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker: marker2 }, options), convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }) }) }); + return wrappedResponse; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86550,22 +87747,29 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * getPageRangesSegment returns a single segment of page ranges starting from the - * specified Marker. Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call getPageRangesSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * listBlobHierarchySegment returns a single segment of blobs starting from + * the specified Marker. Use an empty Marker to start enumeration from the + * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment + * again (passing the the previously-returned Marker) to get the next segment. + * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. + * @param delimiter - The character or string used to define the virtual hierarchy * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to PageBlob Get Page Ranges Segment operation. + * @param options - Options to Container List Blob Hierarchy Segment operation. */ - async listPageRangesSegment(offset = 0, count, marker2, options = {}) { + async listBlobHierarchySegment(delimiter2, marker2, options = {}) { var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); + const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); try { - return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker2, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const response = await this.containerContext.listBlobHierarchySegment(delimiter2, Object.assign(Object.assign({ marker: marker2 }, options), convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }) }) }); + return wrappedResponse; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -86577,46 +87781,42 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} + * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. * @param marker - A string value that identifies the portion of - * the get of page ranges to be returned with the next getting operation. The + * the list of blobs to be returned with the next listing operation. The * operation returns the ContinuationToken value within the response body if the - * getting operation did not return all page ranges remaining within the current page. - * The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of get + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. - * @param options - Options to List Page Ranges operation. + * @param options - Options to list blobs operation. */ - listPageRangeItemSegments(offset = 0, count, marker2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeItemSegments_1() { - let getPageRangeItemSegmentsResponse; + listSegments(marker2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listSegments_1() { + let listBlobsFlatSegmentResponse; if (!!marker2 || marker2 === void 0) { do { - getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker2, options)); - marker2 = getPageRangeItemSegmentsResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker2, options)); + marker2 = listBlobsFlatSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); } while (marker2); } - }, "listPageRangeItemSegments_1")); + }, "listSegments_1")); } /** - * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * Returns an AsyncIterableIterator of {@link BlobItem} objects * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to List Page Ranges operation. + * @param options - Options to list blobs operation. */ - listPageRangeItems(offset = 0, count, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeItems_1() { + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listItems_1() { var e_1, _a; let marker2; try { - for (var _b = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const getPageRangesSegment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + for (var _b = tslib.__asyncValues(this.listSegments(marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const listBlobsFlatSegmentResponse = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; @@ -86627,22 +87827,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (e_1) throw e_1.error; } } - }, "listPageRangeItems_1")); + }, "listItems_1")); } /** - * Returns an async iterable iterator to list of page ranges for a page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * Returns an async iterable iterator to list all the blobs + * under the specified account. * - * .byPage() returns an async iterable iterator to list of page ranges for a page blob. + * .byPage() returns an async iterable iterator to list the blobs in pages. * * Example using `for await` syntax: * * ```js - * // Get the pageBlobClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` + * // Get the containerClient before you run these snippets, + * // Can be obtained from `blobServiceClient.getContainerClient("");` * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRanges()) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * for await (const blob of containerClient.listBlobsFlat()) { + * console.log(`Blob ${i++}: ${blob.name}`); * } * ``` * @@ -86650,11 +87850,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * ```js * let i = 1; - * let iter = pageBlobClient.listPageRanges(); - * let pageRangeItem = await iter.next(); - * while (!pageRangeItem.done) { - * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); - * pageRangeItem = await iter.next(); + * let iter = containerClient.listBlobsFlat(); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); * } * ``` * @@ -86663,9 +87863,9 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * ```js * // passing optional maxPageSize in the page settings * let i = 1; - * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` @@ -86674,12 +87874,12 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * ```js * let i = 1; - * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); + * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * - * // Prints 2 page ranges - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * // Prints 2 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); * } * * // Gets next marker @@ -86687,22 +87887,55 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * // Passing next marker as continuationToken * - * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); + * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * - * // Prints 10 page ranges - * for (const blob of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * // Prints 10 blob names + * for (const blob of response.segment.blobItems) { + * console.log(`Blob ${i++}: ${blob.name}`); * } * ``` - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param options - Options to the Page Blob Get Ranges operation. + * + * @param options - Options to list blobs. * @returns An asyncIterableIterator that supports paging. */ - listPageRanges(offset = 0, count, options = {}) { - options.conditions = options.conditions || {}; - const iter = this.listPageRangeItems(offset, count, options); + listBlobsFlat(options = {}) { + const include2 = []; + if (options.includeCopy) { + include2.push("copy"); + } + if (options.includeDeleted) { + include2.push("deleted"); + } + if (options.includeMetadata) { + include2.push("metadata"); + } + if (options.includeSnapshots) { + include2.push("snapshots"); + } + if (options.includeVersions) { + include2.push("versions"); + } + if (options.includeUncommitedBlobs) { + include2.push("uncommittedblobs"); + } + if (options.includeTags) { + include2.push("tags"); + } + if (options.includeDeletedWithVersions) { + include2.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include2.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include2.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = void 0; + } + const updatedOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); + const iter = this.listItems(updatedOptions); return { /** * The next method, part of the iteration protocol @@ -86720,112 +87953,57 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); }, "byPage") }; } /** - * Gets the collection of page ranges that differ between a specified snapshot and this page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page blob - * @param count - Number of bytes to get ranges diff. - * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - * @returns Response data for the Page Blob Get Page Range Diff operation. - */ - async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); - try { - return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))).then(rangeResponseFromModel); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * getPageRangesDiffSegment returns a single segment of page ranges starting from the - * specified Marker for difference between previous snapshot and the target page blob. - * Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call getPageRangesDiffSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. - * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - */ - async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker2, options) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); - try { - return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ - offset, - count - }), marker: marker2, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} - * + * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. + * @param delimiter - The character or string used to define the virtual hierarchy * @param marker - A string value that identifies the portion of - * the get of page ranges to be returned with the next getting operation. The + * the list of blobs to be returned with the next listing operation. The * operation returns the ContinuationToken value within the response body if the - * getting operation did not return all page ranges remaining within the current page. - * The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of get + * listing operation did not return all blobs remaining to be listed + * with the current page. The ContinuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @param options - Options to list blobs operation. */ - listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker2, options) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeDiffItemSegments_1() { - let getPageRangeItemSegmentsResponse; + listHierarchySegments(delimiter2, marker2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listHierarchySegments_1() { + let listBlobsHierarchySegmentResponse; if (!!marker2 || marker2 === void 0) { do { - getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker2, options)); - marker2 = getPageRangeItemSegmentsResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); + listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter2, marker2, options)); + marker2 = listBlobsHierarchySegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); } while (marker2); } - }, "listPageRangeDiffItemSegments_1")); + }, "listHierarchySegments_1")); } /** - * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects + * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. * - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. */ - listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listPageRangeDiffItems_1() { + listItemsByHierarchy(delimiter2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listItemsByHierarchy_1() { var e_2, _a; let marker2; try { - for (var _b = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const getPageRangesSegment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); + for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter2, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const listBlobsHierarchySegmentResponse = _c.value; + const segment = listBlobsHierarchySegmentResponse.segment; + if (segment.blobPrefixes) { + for (const prefix2 of segment.blobPrefixes) { + yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix2)); + } + } + for (const blob of segment.blobItems) { + yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); + } } } catch (e_2_1) { e_2 = { error: e_2_1 }; @@ -86836,88 +88014,131 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (e_2) throw e_2.error; } } - }, "listPageRangeDiffItems_1")); + }, "listItemsByHierarchy_1")); } /** - * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges + * Returns an async iterable iterator to list all the blobs by hierarchy. + * under the specified account. * - * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. + * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. * * Example using `for await` syntax: * * ```js - * // Get the pageBlobClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` - * let i = 1; - * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * for await (const item of containerClient.listBlobsByHierarchy("/")) { + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } * } * ``` * * Example using `iter.next()`: * * ```js - * let i = 1; - * let iter = pageBlobClient.listPageRangesDiff(); - * let pageRangeItem = await iter.next(); - * while (!pageRangeItem.done) { - * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); - * pageRangeItem = await iter.next(); + * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); + * let entity = await iter.next(); + * while (!entity.done) { + * let item = entity.value; + * if (item.kind === "prefix") { + * console.log(`\tBlobPrefix: ${item.name}`); + * } else { + * console.log(`\tBlobItem: name - ${item.name}`); + * } + * entity = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * console.log("Listing blobs by hierarchy by page"); + * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { + * const segment = response.segment; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` * - * Example using paging with a marker: + * Example using paging with a max page size: * * ```js - * let i = 1; - * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * - * // Prints 2 page ranges - * for (const pageRange of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); - * } - * - * // Gets next marker - * let marker = response.continuationToken; + * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); * - * // Passing next marker as continuationToken + * let i = 1; + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { + * console.log(`Page ${i++}`); + * const segment = response.segment; * - * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; + * if (segment.blobPrefixes) { + * for (const prefix of segment.blobPrefixes) { + * console.log(`\tBlobPrefix: ${prefix.name}`); + * } + * } * - * // Prints 10 page ranges - * for (const blob of response) { - * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); + * for (const blob of response.segment.blobItems) { + * console.log(`\tBlobItem: name - ${blob.name}`); + * } * } * ``` - * @param offset - Starting byte position of the page ranges. - * @param count - Number of bytes to get. - * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Ranges operation. - * @returns An asyncIterableIterator that supports paging. + * + * @param delimiter - The character or string used to define the virtual hierarchy + * @param options - Options to list blobs operation. */ - listPageRangesDiff(offset, count, prevSnapshot, options = {}) { - options.conditions = options.conditions || {}; - const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); + listBlobsByHierarchy(delimiter2, options = {}) { + if (delimiter2 === "") { + throw new RangeError("delimiter should contain one or more characters"); + } + const include2 = []; + if (options.includeCopy) { + include2.push("copy"); + } + if (options.includeDeleted) { + include2.push("deleted"); + } + if (options.includeMetadata) { + include2.push("metadata"); + } + if (options.includeSnapshots) { + include2.push("snapshots"); + } + if (options.includeVersions) { + include2.push("versions"); + } + if (options.includeUncommitedBlobs) { + include2.push("uncommittedblobs"); + } + if (options.includeTags) { + include2.push("tags"); + } + if (options.includeDeletedWithVersions) { + include2.push("deletedwithversions"); + } + if (options.includeImmutabilityPolicy) { + include2.push("immutabilitypolicy"); + } + if (options.includeLegalHold) { + include2.push("legalhold"); + } + if (options.prefix === "") { + options.prefix = void 0; + } + const updatedOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); + const iter = this.listItemsByHierarchy(delimiter2, updatedOptions); return { /** * The next method, part of the iteration protocol */ - next() { + async next() { return iter.next(); }, /** @@ -86930,75 +88151,40 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); + return this.listHierarchySegments(delimiter2, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); }, "byPage") }; } /** - * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. - * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges - * - * @param offset - Starting byte position of the page blob - * @param count - Number of bytes to get ranges diff. - * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. - * @param options - Options to the Page Blob Get Page Ranges Diff operation. - * @returns Response data for the Page Blob Get Page Range Diff operation. - */ - async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl2, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); - try { - return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl: prevSnapshotUrl2, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))).then(rangeResponseFromModel); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * Resizes the page blob to the specified size (which must be a multiple of 512). - * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties - * - * @param size - Target size - * @param options - Options to the Page Blob Resize operation. - * @returns Response data for the Page Blob Resize operation. - */ - async resize(size, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); - try { - return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * Sets a page blob's sequence number. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. * - * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. - * @param sequenceNumber - Required if sequenceNumberAction is max or update - * @param options - Options to the Page Blob Update Sequence Number operation. - * @returns Response data for the Page Blob Update Sequence Number operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - async updateSequenceNumber(sequenceNumberAction2, sequenceNumber, options = {}) { - var _a; - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); + async findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); try { - return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction2, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker2, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -87010,778 +88196,642 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. - * The snapshot is copied such that only the differential changes between the previously - * copied snapshot are transferred to the destination. - * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. - * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob - * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. * - * @param copySource - Specifies the name of the source page blob snapshot. For example, - * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= - * @param options - Options to the Page Blob Copy Incremental operation. - * @returns Response data for the Page Blob Copy Incremental operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - async startCopyIncremental(copySource2, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); - try { - return await this.pageBlobContext.copyIncremental(copySource2, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - }; - async function getBodyAsText(batchResponse) { - let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); - const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); - buffer = buffer.slice(0, responseLength); - return buffer.toString(); - } - __name(getBodyAsText, "getBodyAsText"); - function utf8ByteLength(str) { - return Buffer.byteLength(str); - } - __name(utf8ByteLength, "utf8ByteLength"); - var HTTP_HEADER_DELIMITER = ": "; - var SPACE_DELIMITER = " "; - var NOT_FOUND = -1; - var BatchResponseParser = class { - static { - __name(this, "BatchResponseParser"); - } - constructor(batchResponse, subRequests) { - if (!batchResponse || !batchResponse.contentType) { - throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); - } - if (!subRequests || subRequests.size === 0) { - throw new RangeError("Invalid state: subRequests is not provided or size is 0."); - } - this.batchResponse = batchResponse; - this.subRequests = subRequests; - this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; - this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; - this.batchResponseEnding = `--${this.responseBatchBoundary}--`; - } - // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response - async parseBatchResponse() { - if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { - throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); - } - const responseBodyAsText = await getBodyAsText(this.batchResponse); - const subResponses = responseBodyAsText.split(this.batchResponseEnding)[0].split(this.perResponsePrefix).slice(1); - const subResponseCount = subResponses.length; - if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { - throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); - } - const deserializedSubResponses = new Array(subResponseCount); - let subResponsesSucceededCount = 0; - let subResponsesFailedCount = 0; - for (let index = 0; index < subResponseCount; index++) { - const subResponse = subResponses[index]; - const deserializedSubResponse = {}; - deserializedSubResponse.headers = new coreHttp.HttpHeaders(); - const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); - let subRespHeaderStartFound = false; - let subRespHeaderEndFound = false; - let subRespFailed = false; - let contentId = NOT_FOUND; - for (const responseLine of responseLines) { - if (!subRespHeaderStartFound) { - if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { - contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); - } - if (responseLine.startsWith(HTTP_VERSION_1_1)) { - subRespHeaderStartFound = true; - const tokens = responseLine.split(SPACE_DELIMITER); - deserializedSubResponse.status = parseInt(tokens[1]); - deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); - } - continue; - } - if (responseLine.trim() === "") { - if (!subRespHeaderEndFound) { - subRespHeaderEndFound = true; - } - continue; + findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsSegments_1() { + let response; + if (!!marker2 || marker2 === void 0) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options)); + response.blobs = response.blobs || []; + marker2 = response.continuationToken; + yield yield tslib.__await(response); + } while (marker2); + } + }, "findBlobsByTagsSegments_1")); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker2; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } - if (!subRespHeaderEndFound) { - if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { - throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); - } - const tokens = responseLine.split(HTTP_HEADER_DELIMITER); - deserializedSubResponse.headers.set(tokens[0], tokens[1]); - if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { - deserializedSubResponse.errorCode = tokens[1]; - subRespFailed = true; - } - } else { - if (!deserializedSubResponse.bodyAsText) { - deserializedSubResponse.bodyAsText = ""; - } - deserializedSubResponse.bodyAsText += responseLine; + } catch (e_3_1) { + e_3 = { error: e_3_1 }; + } finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } finally { + if (e_3) throw e_3.error; } } - if (contentId !== NOT_FOUND && Number.isInteger(contentId) && contentId >= 0 && contentId < this.subRequests.size && deserializedSubResponses[contentId] === void 0) { - deserializedSubResponse._request = this.subRequests.get(contentId); - deserializedSubResponses[contentId] = deserializedSubResponse; + }, "findBlobsByTagsItems_1")); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: /* @__PURE__ */ __name((settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, "byPage") + }; + } + getContainerNameFromUrl() { + let containerName; + try { + const parsedUrl = coreHttp.URLBuilder.parse(this.url); + if (parsedUrl.getHost().split(".")[1] === "blob") { + containerName = parsedUrl.getPath().split("/")[1]; + } else if (isIpEndpointStyle(parsedUrl)) { + containerName = parsedUrl.getPath().split("/")[2]; } else { - logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); + containerName = parsedUrl.getPath().split("/")[1]; } - if (subRespFailed) { - subResponsesFailedCount++; - } else { - subResponsesSucceededCount++; + containerName = decodeURIComponent(containerName); + if (!containerName) { + throw new Error("Provided containerName is invalid."); } + return containerName; + } catch (error) { + throw new Error("Unable to extract containerName with provided information."); } - return { - subResponses: deserializedSubResponses, - subResponsesSucceededCount, - subResponsesFailedCount - }; - } - }; - var MutexLockStatus; - (function(MutexLockStatus2) { - MutexLockStatus2[MutexLockStatus2["LOCKED"] = 0] = "LOCKED"; - MutexLockStatus2[MutexLockStatus2["UNLOCKED"] = 1] = "UNLOCKED"; - })(MutexLockStatus || (MutexLockStatus = {})); - var Mutex = class { - static { - __name(this, "Mutex"); } /** - * Lock for a specific key. If the lock has been acquired by another customer, then - * will wait until getting the lock. + * Only available for ContainerClient constructed with a shared key credential. * - * @param key - lock key + * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * + * @param options - Optional parameters. + * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - static async lock(key) { + generateSasUrl(options) { return new Promise((resolve) => { - if (this.keys[key] === void 0 || this.keys[key] === MutexLockStatus.UNLOCKED) { - this.keys[key] = MutexLockStatus.LOCKED; - resolve(); - } else { - this.onUnlockEvent(key, () => { - this.keys[key] = MutexLockStatus.LOCKED; - resolve(); - }); + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } + const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); + resolve(appendToURLQuery(this.url, sas)); }); } /** - * Unlock a key. + * Creates a BlobBatchClient object to conduct batch operations. * - * @param key - + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * + * @returns A new BlobBatchClient object for this container. */ - static async unlock(key) { - return new Promise((resolve) => { - if (this.keys[key] === MutexLockStatus.LOCKED) { - this.emitUnlockEvent(key); - } - delete this.keys[key]; - resolve(); - }); - } - static onUnlockEvent(key, handler) { - if (this.listeners[key] === void 0) { - this.listeners[key] = [handler]; - } else { - this.listeners[key].push(handler); - } - } - static emitUnlockEvent(key) { - if (this.listeners[key] !== void 0 && this.listeners[key].length > 0) { - const handler = this.listeners[key].shift(); - setImmediate(() => { - handler.call(this); - }); - } + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); } }; - Mutex.keys = {}; - Mutex.listeners = {}; - var BlobBatch = class { + var AccountSASPermissions = class _AccountSASPermissions { static { - __name(this, "BlobBatch"); + __name(this, "AccountSASPermissions"); } constructor() { - this.batch = "batch"; - this.batchRequest = new InnerBatchRequest(); + this.read = false; + this.write = false; + this.delete = false; + this.deleteVersion = false; + this.list = false; + this.add = false; + this.create = false; + this.update = false; + this.process = false; + this.tag = false; + this.filter = false; + this.setImmutabilityPolicy = false; + this.permanentDelete = false; } /** - * Get the value of Content-Type for a batch request. - * The value must be multipart/mixed with a batch boundary. - * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 + * Parse initializes the AccountSASPermissions fields from a string. + * + * @param permissions - */ - getMultiPartContentType() { - return this.batchRequest.getMultipartContentType(); + static parse(permissions) { + const accountSASPermissions = new _AccountSASPermissions(); + for (const c of permissions) { + switch (c) { + case "r": + accountSASPermissions.read = true; + break; + case "w": + accountSASPermissions.write = true; + break; + case "d": + accountSASPermissions.delete = true; + break; + case "x": + accountSASPermissions.deleteVersion = true; + break; + case "l": + accountSASPermissions.list = true; + break; + case "a": + accountSASPermissions.add = true; + break; + case "c": + accountSASPermissions.create = true; + break; + case "u": + accountSASPermissions.update = true; + break; + case "p": + accountSASPermissions.process = true; + break; + case "t": + accountSASPermissions.tag = true; + break; + case "f": + accountSASPermissions.filter = true; + break; + case "i": + accountSASPermissions.setImmutabilityPolicy = true; + break; + case "y": + accountSASPermissions.permanentDelete = true; + break; + default: + throw new RangeError(`Invalid permission character: ${c}`); + } + } + return accountSASPermissions; } /** - * Get assembled HTTP request body for sub requests. + * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it + * and boolean values for them. + * + * @param permissionLike - */ - getHttpRequestBody() { - return this.batchRequest.getHttpRequestBody(); + static from(permissionLike) { + const accountSASPermissions = new _AccountSASPermissions(); + if (permissionLike.read) { + accountSASPermissions.read = true; + } + if (permissionLike.write) { + accountSASPermissions.write = true; + } + if (permissionLike.delete) { + accountSASPermissions.delete = true; + } + if (permissionLike.deleteVersion) { + accountSASPermissions.deleteVersion = true; + } + if (permissionLike.filter) { + accountSASPermissions.filter = true; + } + if (permissionLike.tag) { + accountSASPermissions.tag = true; + } + if (permissionLike.list) { + accountSASPermissions.list = true; + } + if (permissionLike.add) { + accountSASPermissions.add = true; + } + if (permissionLike.create) { + accountSASPermissions.create = true; + } + if (permissionLike.update) { + accountSASPermissions.update = true; + } + if (permissionLike.process) { + accountSASPermissions.process = true; + } + if (permissionLike.setImmutabilityPolicy) { + accountSASPermissions.setImmutabilityPolicy = true; + } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } + return accountSASPermissions; } /** - * Get sub requests that are added into the batch request. + * Produces the SAS permissions string for an Azure Storage account. + * Call this method to set AccountSASSignatureValues Permissions field. + * + * Using this method will guarantee the resource types are in + * an order accepted by the service. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * */ - getSubRequests() { - return this.batchRequest.getSubRequests(); - } - async addSubRequestInternal(subRequest, assembleSubRequestFunc) { - await Mutex.lock(this.batch); - try { - this.batchRequest.preAddSubRequest(subRequest); - await assembleSubRequestFunc(); - this.batchRequest.postAddSubRequest(subRequest); - } finally { - await Mutex.unlock(this.batch); + toString() { + const permissions = []; + if (this.read) { + permissions.push("r"); } - } - setBatchType(batchType) { - if (!this.batchType) { - this.batchType = batchType; + if (this.write) { + permissions.push("w"); } - if (this.batchType !== batchType) { - throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); + if (this.delete) { + permissions.push("d"); } - } - async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { - let url2; - let credential; - if (typeof urlOrBlobClient === "string" && (coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential || credentialOrOptions instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrOptions))) { - url2 = urlOrBlobClient; - credential = credentialOrOptions; - } else if (urlOrBlobClient instanceof BlobClient) { - url2 = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - options = credentialOrOptions; - } else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + if (this.deleteVersion) { + permissions.push("x"); } - if (!options) { - options = {}; + if (this.filter) { + permissions.push("f"); } - const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); - try { - this.setBatchType("delete"); - await this.addSubRequestInternal({ - url: url2, - credential - }, async () => { - await new BlobClient(url2, this.batchRequest.createPipeline(credential)).delete(updatedOptions); - }); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + if (this.tag) { + permissions.push("t"); } - } - async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { - let url2; - let credential; - let tier2; - if (typeof urlOrBlobClient === "string" && (coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential || credentialOrTier instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrTier))) { - url2 = urlOrBlobClient; - credential = credentialOrTier; - tier2 = tierOrOptions; - } else if (urlOrBlobClient instanceof BlobClient) { - url2 = urlOrBlobClient.url; - credential = urlOrBlobClient.credential; - tier2 = credentialOrTier; - options = tierOrOptions; - } else { - throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); + if (this.list) { + permissions.push("l"); } - if (!options) { - options = {}; + if (this.add) { + permissions.push("a"); } - const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); - try { - this.setBatchType("setAccessTier"); - await this.addSubRequestInternal({ - url: url2, - credential - }, async () => { - await new BlobClient(url2, this.batchRequest.createPipeline(credential)).setAccessTier(tier2, updatedOptions); - }); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + if (this.create) { + permissions.push("c"); + } + if (this.update) { + permissions.push("u"); + } + if (this.process) { + permissions.push("p"); + } + if (this.setImmutabilityPolicy) { + permissions.push("i"); + } + if (this.permanentDelete) { + permissions.push("y"); } + return permissions.join(""); } }; - var InnerBatchRequest = class { + var AccountSASResourceTypes = class _AccountSASResourceTypes { static { - __name(this, "InnerBatchRequest"); + __name(this, "AccountSASResourceTypes"); } constructor() { - this.operationCount = 0; - this.body = ""; - const tempGuid = coreHttp.generateUuid(); - this.boundary = `batch_${tempGuid}`; - this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; - this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; - this.batchRequestEnding = `--${this.boundary}--`; - this.subRequests = /* @__PURE__ */ new Map(); + this.service = false; + this.container = false; + this.object = false; } /** - * Create pipeline to assemble sub requests. The idea here is to use existing - * credential and serialization/deserialization components, with additional policies to - * filter unnecessary headers, assemble sub requests into request's body - * and intercept request from going to wire. - * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an + * Error if it encounters a character that does not correspond to a valid resource type. + * + * @param resourceTypes - */ - createPipeline(credential) { - const isAnonymousCreds = credential instanceof AnonymousCredential; - const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); - const factories = new Array(policyFactoryLength); - factories[0] = coreHttp.deserializationPolicy(); - factories[1] = new BatchHeaderFilterPolicyFactory(); - if (!isAnonymousCreds) { - factories[2] = coreHttp.isTokenCredential(credential) ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) : credential; + static parse(resourceTypes) { + const accountSASResourceTypes = new _AccountSASResourceTypes(); + for (const c of resourceTypes) { + switch (c) { + case "s": + accountSASResourceTypes.service = true; + break; + case "c": + accountSASResourceTypes.container = true; + break; + case "o": + accountSASResourceTypes.object = true; + break; + default: + throw new RangeError(`Invalid resource type: ${c}`); + } } - factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); - return new Pipeline(factories, {}); + return accountSASResourceTypes; } - appendSubRequestToBody(request) { - this.body += [ - this.subRequestPrefix, - `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, - "", - `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` - // sub request start line with method - ].join(HTTP_LINE_ENDING); - for (const header of request.headers.headersArray()) { - this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + /** + * Converts the given resource types to a string. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * + */ + toString() { + const resourceTypes = []; + if (this.service) { + resourceTypes.push("s"); } - this.body += HTTP_LINE_ENDING; - } - preAddSubRequest(subRequest) { - if (this.operationCount >= BATCH_MAX_REQUEST) { - throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); + if (this.container) { + resourceTypes.push("c"); } - const path2 = getURLPath(subRequest.url); - if (!path2 || path2 === "") { - throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); + if (this.object) { + resourceTypes.push("o"); } - } - postAddSubRequest(subRequest) { - this.subRequests.set(this.operationCount, subRequest); - this.operationCount++; - } - // Return the http request body with assembling the ending line to the sub request body. - getHttpRequestBody() { - return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; - } - getMultipartContentType() { - return this.multipartContentType; - } - getSubRequests() { - return this.subRequests; + return resourceTypes.join(""); } }; - var BatchRequestAssemblePolicy = class extends coreHttp.BaseRequestPolicy { + var AccountSASServices = class _AccountSASServices { static { - __name(this, "BatchRequestAssemblePolicy"); + __name(this, "AccountSASServices"); } - constructor(batchRequest, nextPolicy, options) { - super(nextPolicy, options); - this.dummyResponse = { - request: new coreHttp.WebResource(), - status: 200, - headers: new coreHttp.HttpHeaders() - }; - this.batchRequest = batchRequest; + constructor() { + this.blob = false; + this.file = false; + this.queue = false; + this.table = false; } - async sendRequest(request) { - await this.batchRequest.appendSubRequestToBody(request); - return this.dummyResponse; + /** + * Creates an {@link AccountSASServices} from the specified services string. This method will throw an + * Error if it encounters a character that does not correspond to a valid service. + * + * @param services - + */ + static parse(services) { + const accountSASServices = new _AccountSASServices(); + for (const c of services) { + switch (c) { + case "b": + accountSASServices.blob = true; + break; + case "f": + accountSASServices.file = true; + break; + case "q": + accountSASServices.queue = true; + break; + case "t": + accountSASServices.table = true; + break; + default: + throw new RangeError(`Invalid service character: ${c}`); + } + } + return accountSASServices; } - }; - var BatchRequestAssemblePolicyFactory = class { - static { - __name(this, "BatchRequestAssemblePolicyFactory"); + /** + * Converts the given services to a string. + * + */ + toString() { + const services = []; + if (this.blob) { + services.push("b"); + } + if (this.table) { + services.push("t"); + } + if (this.queue) { + services.push("q"); + } + if (this.file) { + services.push("f"); + } + return services.join(""); } - constructor(batchRequest) { - this.batchRequest = batchRequest; + }; + function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { + const version4 = accountSASSignatureValues.version ? accountSASSignatureValues.version : SERVICE_VERSION; + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version4 < "2020-08-04") { + throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); } - create(nextPolicy, options) { - return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version4 < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); } - }; - var BatchHeaderFilterPolicy = class extends coreHttp.BaseRequestPolicy { - static { - __name(this, "BatchHeaderFilterPolicy"); + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version4 < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); } - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version4 < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); } - async sendRequest(request) { - let xMsHeaderName = ""; - for (const header of request.headers.headersArray()) { - if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { - xMsHeaderName = header.name; - } - } - if (xMsHeaderName !== "") { - request.headers.remove(xMsHeaderName); - } - return this._nextPolicy.sendRequest(request); + if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version4 < "2019-12-12") { + throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); } - }; - var BatchHeaderFilterPolicyFactory = class { - static { - __name(this, "BatchHeaderFilterPolicyFactory"); + if (accountSASSignatureValues.encryptionScope && version4 < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); } - create(nextPolicy, options) { - return new BatchHeaderFilterPolicy(nextPolicy, options); + const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); + const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); + const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); + let stringToSign; + if (version4 >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version4, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "" + // Account SAS requires an additional newline character + ].join("\n"); + } else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version4, + "" + // Account SAS requires an additional newline character + ].join("\n"); } - }; - var BlobBatchClient = class { + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(version4, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope); + } + __name(generateAccountSASQueryParameters, "generateAccountSASQueryParameters"); + var BlobServiceClient = class _BlobServiceClient extends StorageClient { static { - __name(this, "BlobBatchClient"); + __name(this, "BlobServiceClient"); } constructor(url2, credentialOrPipeline, options) { let pipeline; if (isPipelineLike(credentialOrPipeline)) { pipeline = credentialOrPipeline; - } else if (!credentialOrPipeline) { - pipeline = newPipeline(new AnonymousCredential(), options); - } else { + } else if (coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipeline)) { pipeline = newPipeline(credentialOrPipeline, options); - } - const storageClientContext = new StorageClientContext(url2, pipeline.toServiceClientOptions()); - const path2 = getURLPath(url2); - if (path2 && path2 !== "/") { - this.serviceOrContainerContext = new Container(storageClientContext); } else { - this.serviceOrContainerContext = new Service(storageClientContext); - } - } - /** - * Creates a {@link BlobBatch}. - * A BlobBatch represents an aggregated set of operations on blobs. - */ - createBatch() { - return new BlobBatch(); - } - async deleteBlobs(urlsOrBlobClients, credentialOrOptions, options) { - const batch = new BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); - } else { - await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); - } - } - return this.submitBatch(batch); - } - async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, options) { - const batch = new BlobBatch(); - for (const urlOrBlobClient of urlsOrBlobClients) { - if (typeof urlOrBlobClient === "string") { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); - } else { - await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); - } + pipeline = newPipeline(new AnonymousCredential(), options); } - return this.submitBatch(batch); + super(url2, pipeline); + this.serviceContext = new Service(this.storageClientContext); } /** - * Submit batch request which consists of multiple subrequests. - * - * Get `blobBatchClient` and other details before running the snippets. - * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` - * - * Example usage: - * - * ```js - * let batchRequest = new BlobBatch(); - * await batchRequest.deleteBlob(urlInString0, credential0); - * await batchRequest.deleteBlob(urlInString1, credential1, { - * deleteSnapshots: "include" - * }); - * const batchResp = await blobBatchClient.submitBatch(batchRequest); - * console.log(batchResp.subResponsesSucceededCount); - * ``` * - * Example using a lease: - * - * ```js - * let batchRequest = new BlobBatch(); - * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); - * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { - * conditions: { leaseId: leaseId } - * }); - * const batchResp = await blobBatchClient.submitBatch(batchRequest); - * console.log(batchResp.subResponsesSucceededCount); - * ``` - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch + * Creates an instance of BlobServiceClient from connection string. * - * @param batchRequest - A set of Delete or SetTier operations. - * @param options - + * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. + * [ Note - Account connection string can only be used in NODE.JS runtime. ] + * Account connection string example - + * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` + * SAS connection string example - + * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` + * @param options - Optional. Options to configure the HTTP pipeline. */ - async submitBatch(batchRequest, options = {}) { - if (!batchRequest || batchRequest.getSubRequests().size === 0) { - throw new RangeError("Batch request should contain one or more sub requests."); - } - const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); - try { - const batchRequestBody = batchRequest.getHttpRequestBody(); - const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); - const responseSummary = await batchResponseParser.parseBatchResponse(); - const res = { - _response: rawBatchResponse._response, - contentType: rawBatchResponse.contentType, - errorCode: rawBatchResponse.errorCode, - requestId: rawBatchResponse.requestId, - clientRequestId: rawBatchResponse.clientRequestId, - version: rawBatchResponse.version, - subResponses: responseSummary.subResponses, - subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, - subResponsesFailedCount: responseSummary.subResponsesFailedCount - }; - return res; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - }; - var ContainerClient = class extends StorageClient { - static { - __name(this, "ContainerClient"); - } - constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, options) { - let pipeline; - let url2; + static fromConnectionString(connectionString, options) { options = options || {}; - if (isPipelineLike(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = credentialOrPipelineOrContainerName; - } else if (coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { - url2 = urlOrConnectionString; - pipeline = newPipeline(credentialOrPipelineOrContainerName, options); - } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { - url2 = urlOrConnectionString; - pipeline = newPipeline(new AnonymousCredential(), options); - } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { - const containerName = credentialOrPipelineOrContainerName; - const extractedCreds = extractConnectionStringParts(urlOrConnectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - url2 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); - } - pipeline = newPipeline(sharedKeyCredential, options); - } else { - throw new Error("Account connection string is only supported in Node.js environment"); + const extractedCreds = extractConnectionStringParts(connectionString); + if (extractedCreds.kind === "AccountConnString") { + if (coreHttp.isNode) { + const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); } - } else if (extractedCreds.kind === "SASConnString") { - url2 = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; - pipeline = newPipeline(new AnonymousCredential(), options); + const pipeline = newPipeline(sharedKeyCredential, options); + return new _BlobServiceClient(extractedCreds.url, pipeline); } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); - } - } else { - throw new Error("Expecting non-empty strings for containerName parameter"); - } - super(url2, pipeline); - this._containerName = this.getContainerNameFromUrl(); - this.containerContext = new Container(this.storageClientContext); - } - /** - * The name of the container. - */ - get containerName() { - return this._containerName; - } - /** - * Creates a new container under the specified account. If the container with - * the same name already exists, the operation fails. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container - * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata - * - * @param options - Options to Container Create operation. - * - * - * Example usage: - * - * ```js - * const containerClient = blobServiceClient.getContainerClient(""); - * const createContainerResponse = await containerClient.create(); - * console.log("Container was created successfully", createContainerResponse.requestId); - * ``` - */ - async create(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-create", options); - try { - return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * Creates a new container under the specified account. If the container with - * the same name already exists, it is not changed. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container - * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata - * - * @param options - - */ - async createIfNotExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); - try { - const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a container only if it does not already exist." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * Returns true if the Azure container resource represented by this client exists; false otherwise. - * - * NOTE: use this function with care since an existing container might be deleted by other clients or - * applications. Vice versa new containers with the same name might be added by other clients or - * applications after this function completes. - * - * @param options - - */ - async exists(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-exists", options); - try { - await this.getProperties({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions - }); - return true; - } catch (e) { - if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking container existence" - }); - return false; + throw new Error("Account connection string is only supported in Node.js environment"); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } - } - /** - * Creates a {@link BlobClient} - * - * @param blobName - A blob name - * @returns A new BlobClient object for the given blob name. - */ - getBlobClient(blobName) { - return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); - } - /** - * Creates an {@link AppendBlobClient} - * - * @param blobName - An append blob name - */ - getAppendBlobClient(blobName) { - return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + } else if (extractedCreds.kind === "SASConnString") { + const pipeline = newPipeline(new AnonymousCredential(), options); + return new _BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); + } else { + throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + } } /** - * Creates a {@link BlockBlobClient} - * - * @param blobName - A block blob name + * Creates a {@link ContainerClient} object * + * @param containerName - A container name + * @returns A new ContainerClient object for the given container name. * * Example usage: * * ```js - * const content = "Hello world!"; - * - * const blockBlobClient = containerClient.getBlockBlobClient(""); - * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); + * const containerClient = blobServiceClient.getContainerClient(""); * ``` */ - getBlockBlobClient(blobName) { - return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); - } - /** - * Creates a {@link PageBlobClient} - * - * @param blobName - A page blob name - */ - getPageBlobClient(blobName) { - return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); + getContainerClient(containerName) { + return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); } /** - * Returns all user-defined metadata and system properties for the specified - * container. The data returned does not include the container's list of blobs. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties - * - * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if - * they originally contained uppercase characters. This differs from the metadata keys returned by - * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which - * will retain their original casing. + * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container * - * @param options - Options to Container Get Properties operation. + * @param containerName - Name of the container to create. + * @param options - Options to configure Container Create operation. + * @returns Container creation response and the corresponding container client. */ - async getProperties(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); + async createContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); try { - return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); + const containerClient = this.getContainerClient(containerName); + const containerCreateResponse = await containerClient.create(updatedOptions); + return { + containerClient, + containerCreateResponse + }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -87793,19 +88843,17 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Marks the specified container for deletion. The container and any blobs - * contained within it are later deleted during garbage collection. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * Deletes a Blob container. * - * @param options - Options to Container Delete operation. + * @param containerName - Name of the container to delete. + * @param options - Options to configure Container Delete operation. + * @returns Container deletion response. */ - async delete(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-delete", options); + async deleteContainer(containerName, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); try { - return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + const containerClient = this.getContainerClient(containerName); + return await containerClient.delete(updatedOptions); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -87817,26 +88865,25 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Marks the specified container for deletion if it exists. The container and any blobs - * contained within it are later deleted during garbage collection. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container + * Restore a previously deleted Blob container. + * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. * - * @param options - Options to Container Delete operation. + * @param deletedContainerName - Name of the previously deleted container. + * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. + * @param options - Options to configure Container Restore operation. + * @returns Container deletion response. */ - async deleteIfExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); + async undeleteContainer(deletedContainerName2, deletedContainerVersion2, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName2); + const containerContext = new Container(containerClient["storageClientContext"]); + const containerUndeleteResponse = await containerContext.restore(Object.assign({ + deletedContainerName: deletedContainerName2, + deletedContainerVersion: deletedContainerVersion2 + }, updatedOptions)); + return { containerClient, containerUndeleteResponse }; } catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a container only if it exists." - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); - } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, message: e.message @@ -87847,27 +88894,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Sets one or more user-defined name-value pairs for the specified container. - * - * If no option provided, or no metadata defined in the parameter, the container - * metadata will be removed. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata + * Rename an existing Blob Container. * - * @param metadata - Replace existing metadata with this value. - * If no value provided the existing metadata will be removed. - * @param options - Options to Container Set Metadata operation. + * @param sourceContainerName - The name of the source container. + * @param destinationContainerName - The new name of the container. + * @param options - Options to configure Container Rename operation. */ - async setMetadata(metadata2, options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - if (options.conditions.ifUnmodifiedSince) { - throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); - } - const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); + /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ + // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. + async renameContainer(sourceContainerName2, destinationContainerName, options = {}) { + var _a; + const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); try { - return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: metadata2, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + const containerClient = this.getContainerClient(destinationContainerName); + const containerContext = new Container(containerClient["storageClientContext"]); + const containerRenameResponse = await containerContext.rename(sourceContainerName2, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + return { containerClient, containerRenameResponse }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -87879,54 +88921,17 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Gets the permissions for the specified container. The permissions indicate - * whether container data may be accessed publicly. - * - * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. - * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl + * Gets the properties of a storage account’s Blob service, including properties + * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties * - * @param options - Options to Container Get Access Policy operation. + * @param options - Options to the Service Get Properties operation. + * @returns Response data for the Service Get Properties operation. */ - async getAccessPolicy(options = {}) { - if (!options.conditions) { - options.conditions = {}; - } - const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); + async getProperties(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); try { - const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - const res = { - _response: response._response, - blobPublicAccess: response.blobPublicAccess, - date: response.date, - etag: response.etag, - errorCode: response.errorCode, - lastModified: response.lastModified, - requestId: response.requestId, - clientRequestId: response.clientRequestId, - signedIdentifiers: [], - version: response.version - }; - for (const identifier of response) { - let accessPolicy = void 0; - if (identifier.accessPolicy) { - accessPolicy = { - permissions: identifier.accessPolicy.permissions - }; - if (identifier.accessPolicy.expiresOn) { - accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); - } - if (identifier.accessPolicy.startsOn) { - accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); - } - } - res.signedIdentifiers.push({ - accessPolicy, - id: identifier.id - }); - } - return res; + return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -87938,38 +88943,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Sets the permissions for the specified container. The permissions indicate - * whether blobs in a container may be accessed publicly. - * - * When you set permissions for a container, the existing permissions are replaced. - * If no access or containerAcl provided, the existing container ACL will be - * removed. - * - * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. - * During this interval, a shared access signature that is associated with the stored access policy will - * fail with status code 403 (Forbidden), until the access policy becomes active. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + * Sets properties for a storage account’s Blob service endpoint, including properties + * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties * - * @param access - The level of public access to data in the container. - * @param containerAcl - Array of elements each having a unique Id and details of the access policy. - * @param options - Options to Container Set Access Policy operation. + * @param properties - + * @param options - Options to the Service Set Properties operation. + * @returns Response data for the Service Set Properties operation. */ - async setAccessPolicy(access2, containerAcl2, options = {}) { - options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); + async setProperties(properties, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); try { - const acl = []; - for (const identifier of containerAcl2 || []) { - acl.push({ - accessPolicy: { - expiresOn: identifier.accessPolicy.expiresOn ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) : "", - permissions: identifier.accessPolicy.permissions, - startsOn: identifier.accessPolicy.startsOn ? truncatedISO8061Date(identifier.accessPolicy.startsOn) : "" - }, - id: identifier.id - }); - } - return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access: access2, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -87981,45 +88966,18 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Get a {@link BlobLeaseClient} that manages leases on the container. - * - * @param proposeLeaseId - Initial proposed lease Id. - * @returns A new BlobLeaseClient object for managing leases on the container. - */ - getBlobLeaseClient(proposeLeaseId) { - return new BlobLeaseClient(this, proposeLeaseId); - } - /** - * Creates a new block blob, or updates the content of an existing block blob. - * - * Updating an existing block blob overwrites any existing metadata on the blob. - * Partial updates are not supported; the content of the existing blob is - * overwritten with the new content. To perform a partial update of a block blob's, - * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. - * - * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, - * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better - * performance with concurrency uploading. - * - * @see https://docs.microsoft.com/rest/api/storageservices/put-blob + * Retrieves statistics related to replication for the Blob service. It is only + * available on the secondary location endpoint when read-access geo-redundant + * replication is enabled for the storage account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats * - * @param blobName - Name of the block blob to create or update. - * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function - * which returns a new Readable stream whose offset is from data source beginning. - * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a - * string including non non-Base64/Hex-encoded characters. - * @param options - Options to configure the Block Blob Upload operation. - * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. + * @param options - Options to the Service Get Statistics operation. + * @returns Response data for the Service Get Statistics operation. */ - async uploadBlockBlob(blobName, body2, contentLength2, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); + async getStatistics(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); try { - const blockBlobClient = this.getBlockBlobClient(blobName); - const response = await blockBlobClient.upload(body2, contentLength2, updatedOptions); - return { - blockBlobClient, - response - }; + return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -88031,24 +88989,19 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Marks the specified blob or snapshot for deletion. The blob is later deleted - * during garbage collection. Note that in order to delete a blob, you must delete - * all of its snapshots. You can delete both at the same time with the Delete - * Blob operation. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob + * The Get Account Information operation returns the sku name and account kind + * for the specified account. + * The Get Account Information operation is available on service versions beginning + * with version 2018-03-28. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information * - * @param blobName - - * @param options - Options to Blob Delete operation. - * @returns Block blob deletion response data. + * @param options - Options to the Service Get Account Info operation. + * @returns Response data for the Service Get Account Info operation. */ - async deleteBlob(blobName, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); + async getAccountInfo(options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); try { - let blobClient = this.getBlobClient(blobName); - if (options.versionId) { - blobClient = blobClient.withVersion(options.versionId); - } - return await blobClient.delete(updatedOptions); + return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -88060,24 +89013,23 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * listBlobFlatSegment returns a single segment of blobs starting from the - * specified Marker. Use an empty Marker to start enumeration from the beginning. - * After getting a segment, process it, and then call listBlobsFlatSegment again - * (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * Returns a list of the containers under the specified account. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 * - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to Container List Blob Flat Segment operation. + * @param marker - A string value that identifies the portion of + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to the Service List Container Segment operation. + * @returns Response data for the Service List Container Segment operation. */ - async listBlobFlatSegment(marker2, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); + async listContainersSegment(marker2, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); try { - const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker: marker2 }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); - return blobItem; - }) }) }); - return wrappedResponse; + return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker: marker2 }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -88089,28 +89041,35 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * listBlobHierarchySegment returns a single segment of blobs starting from - * the specified Marker. Use an empty Marker to start enumeration from the - * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment - * again (passing the the previously-returned Marker) to get the next segment. - * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs + * The Filter Blobs operation enables callers to list blobs across all containers whose tags + * match a given search expression. Filter blobs searches across all containers within a + * storage account but can be scoped within the expression to a single container. * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. - * @param options - Options to Container List Blob Hierarchy Segment operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. */ - async listBlobHierarchySegment(delimiter2, marker2, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); + async findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); try { - const response = await this.containerContext.listBlobHierarchySegment(delimiter2, Object.assign(Object.assign({ marker: marker2 }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); - return blobItem; - }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { - const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); - return blobPrefix; - }) }) }); + const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker2, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); return wrappedResponse; } catch (e) { span.setStatus({ @@ -88123,42 +89082,51 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse + * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The - * operation returns the ContinuationToken value within the response body if the + * operation returns the continuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed - * with the current page. The ContinuationToken value can be used as the value for + * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. - * @param options - Options to list blobs operation. + * @param options - Options to find blobs by tags. */ - listSegments(marker2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listSegments_1() { - let listBlobsFlatSegmentResponse; + findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsSegments_1() { + let response; if (!!marker2 || marker2 === void 0) { do { - listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker2, options)); - marker2 = listBlobsFlatSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options)); + response.blobs = response.blobs || []; + marker2 = response.continuationToken; + yield yield tslib.__await(response); } while (marker2); } - }, "listSegments_1")); + }, "findBlobsByTagsSegments_1")); } /** - * Returns an AsyncIterableIterator of {@link BlobItem} objects + * Returns an AsyncIterableIterator for blobs. * - * @param options - Options to list blobs operation. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listItems_1() { + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsItems_1() { var e_1, _a; let marker2; try { - for (var _b = tslib.__asyncValues(this.listSegments(marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const listBlobsFlatSegmentResponse = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; @@ -88169,22 +89137,22 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (e_1) throw e_1.error; } } - }, "listItems_1")); + }, "findBlobsByTagsItems_1")); } /** - * Returns an async iterable iterator to list all the blobs + * Returns an async iterable iterator to find all blobs with specified tag * under the specified account. * * .byPage() returns an async iterable iterator to list the blobs in pages. * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties + * * Example using `for await` syntax: * * ```js - * // Get the containerClient before you run these snippets, - * // Can be obtained from `blobServiceClient.getContainerClient("");` * let i = 1; - * for await (const blob of containerClient.listBlobsFlat()) { - * console.log(`Blob ${i++}: ${blob.name}`); + * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${container.name}`); * } * ``` * @@ -88192,7 +89160,7 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * ```js * let i = 1; - * let iter = containerClient.listBlobsFlat(); + * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); * let blobItem = await iter.next(); * while (!blobItem.done) { * console.log(`Blob ${i++}: ${blobItem.value.name}`); @@ -88205,9 +89173,11 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * ```js * // passing optional maxPageSize in the page settings * let i = 1; - * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * } * ``` @@ -88216,68 +89186,41 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * * ```js * let i = 1; - * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); + * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 blob names - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * * // Gets next marker * let marker = response.continuationToken; - * * // Passing next marker as continuationToken - * - * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); + * iterator = blobServiceClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * - * // Prints 10 blob names - * for (const blob of response.segment.blobItems) { - * console.log(`Blob ${i++}: ${blob.name}`); + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } * } * ``` * - * @param options - Options to list blobs. - * @returns An asyncIterableIterator that supports paging. + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. */ - listBlobsFlat(options = {}) { - const include2 = []; - if (options.includeCopy) { - include2.push("copy"); - } - if (options.includeDeleted) { - include2.push("deleted"); - } - if (options.includeMetadata) { - include2.push("metadata"); - } - if (options.includeSnapshots) { - include2.push("snapshots"); - } - if (options.includeVersions) { - include2.push("versions"); - } - if (options.includeUncommitedBlobs) { - include2.push("uncommittedblobs"); - } - if (options.includeTags) { - include2.push("tags"); - } - if (options.includeDeletedWithVersions) { - include2.push("deletedwithversions"); - } - if (options.includeImmutabilityPolicy) { - include2.push("immutabilitypolicy"); - } - if (options.includeLegalHold) { - include2.push("legalhold"); - } - if (options.prefix === "") { - options.prefix = void 0; - } - const updatedOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); - const iter = this.listItems(updatedOptions); + findBlobsByTags(tagFilterSqlExpression, options = {}) { + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); return { /** * The next method, part of the iteration protocol @@ -88295,57 +89238,48 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); }, "byPage") }; } /** - * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse + * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses * - * @param delimiter - The character or string used to define the virtual hierarchy * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the ContinuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The ContinuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to list blobs operation. + * the list of containers to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all containers remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to list containers operation. */ - listHierarchySegments(delimiter2, marker2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listHierarchySegments_1() { - let listBlobsHierarchySegmentResponse; + listSegments(marker2, options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listSegments_1() { + let listContainersSegmentResponse; if (!!marker2 || marker2 === void 0) { do { - listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter2, marker2, options)); - marker2 = listBlobsHierarchySegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); + listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker2, options)); + listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; + marker2 = listContainersSegmentResponse.continuationToken; + yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); } while (marker2); } - }, "listHierarchySegments_1")); + }, "listSegments_1")); } /** - * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. + * Returns an AsyncIterableIterator for Container Items * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param options - Options to list blobs operation. + * @param options - Options to list containers operation. */ - listItemsByHierarchy(delimiter2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listItemsByHierarchy_1() { + listItems(options = {}) { + return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listItems_1() { var e_2, _a; let marker2; try { - for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter2, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const listBlobsHierarchySegmentResponse = _c.value; - const segment = listBlobsHierarchySegmentResponse.segment; - if (segment.blobPrefixes) { - for (const prefix2 of segment.blobPrefixes) { - yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix2)); - } - } - for (const blob of segment.blobItems) { - yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); - } + for (var _b = tslib.__asyncValues(this.listSegments(marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; @@ -88356,131 +89290,103 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; if (e_2) throw e_2.error; } } - }, "listItemsByHierarchy_1")); + }, "listItems_1")); } /** - * Returns an async iterable iterator to list all the blobs by hierarchy. + * Returns an async iterable iterator to list all the containers * under the specified account. * - * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. + * .byPage() returns an async iterable iterator to list the containers in pages. * * Example using `for await` syntax: * * ```js - * for await (const item of containerClient.listBlobsByHierarchy("/")) { - * if (item.kind === "prefix") { - * console.log(`\tBlobPrefix: ${item.name}`); - * } else { - * console.log(`\tBlobItem: name - ${item.name}`); - * } + * let i = 1; + * for await (const container of blobServiceClient.listContainers()) { + * console.log(`Container ${i++}: ${container.name}`); * } * ``` * * Example using `iter.next()`: * * ```js - * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); - * let entity = await iter.next(); - * while (!entity.done) { - * let item = entity.value; - * if (item.kind === "prefix") { - * console.log(`\tBlobPrefix: ${item.name}`); - * } else { - * console.log(`\tBlobItem: name - ${item.name}`); - * } - * entity = await iter.next(); + * let i = 1; + * const iter = blobServiceClient.listContainers(); + * let containerItem = await iter.next(); + * while (!containerItem.done) { + * console.log(`Container ${i++}: ${containerItem.value.name}`); + * containerItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js - * console.log("Listing blobs by hierarchy by page"); - * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { - * const segment = response.segment; - * if (segment.blobPrefixes) { - * for (const prefix of segment.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); * } * } - * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}`); - * } * } * ``` * - * Example using paging with a max page size: + * Example using paging with a marker: * * ```js - * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); - * * let i = 1; - * for await (const response of containerClient - * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) - * .byPage({ maxPageSize: 2 })) { - * console.log(`Page ${i++}`); - * const segment = response.segment; + * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; * - * if (segment.blobPrefixes) { - * for (const prefix of segment.blobPrefixes) { - * console.log(`\tBlobPrefix: ${prefix.name}`); - * } + * // Prints 2 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); * } + * } * - * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}`); + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = blobServiceClient + * .listContainers() + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints 10 container names + * if (response.containerItems) { + * for (const container of response.containerItems) { + * console.log(`Container ${i++}: ${container.name}`); * } * } * ``` * - * @param delimiter - The character or string used to define the virtual hierarchy - * @param options - Options to list blobs operation. + * @param options - Options to list containers. + * @returns An asyncIterableIterator that supports paging. */ - listBlobsByHierarchy(delimiter2, options = {}) { - if (delimiter2 === "") { - throw new RangeError("delimiter should contain one or more characters"); + listContainers(options = {}) { + if (options.prefix === "") { + options.prefix = void 0; } const include2 = []; - if (options.includeCopy) { - include2.push("copy"); - } if (options.includeDeleted) { include2.push("deleted"); } if (options.includeMetadata) { include2.push("metadata"); } - if (options.includeSnapshots) { - include2.push("snapshots"); - } - if (options.includeVersions) { - include2.push("versions"); - } - if (options.includeUncommitedBlobs) { - include2.push("uncommittedblobs"); - } - if (options.includeTags) { - include2.push("tags"); - } - if (options.includeDeletedWithVersions) { - include2.push("deletedwithversions"); - } - if (options.includeImmutabilityPolicy) { - include2.push("immutabilitypolicy"); - } - if (options.includeLegalHold) { - include2.push("legalhold"); - } - if (options.prefix === "") { - options.prefix = void 0; + if (options.includeSystem) { + include2.push("system"); } - const updatedOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); - const iter = this.listItemsByHierarchy(delimiter2, updatedOptions); + const listSegmentOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); + const iter = this.listItems(listSegmentOptions); return { /** * The next method, part of the iteration protocol */ - async next() { + next() { return iter.next(); }, /** @@ -88493,40 +89399,39 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; * Return an AsyncIterableIterator that works a page at a time */ byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.listHierarchySegments(delimiter2, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); }, "byPage") }; } /** - * The Filter Blobs operation enables callers to list blobs in the container whose tags - * match a given search expression. + * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key + * + * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time + * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); + async getUserDelegationKey(startsOn, expiresOn2, options = {}) { + const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); try { - const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker2, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { - var _a; - let tagValue = ""; - if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { - tagValue = blob.tags.blobTagSet[0].value; - } - return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); - }) }); - return wrappedResponse; + const response = await this.serviceContext.getUserDelegationKey({ + startsOn: truncatedISO8061Date(startsOn, false), + expiresOn: truncatedISO8061Date(expiresOn2, false) + }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + const userDelegationKey = { + signedObjectId: response.signedObjectId, + signedTenantId: response.signedTenantId, + signedStartsOn: new Date(response.signedStartsOn), + signedExpiresOn: new Date(response.signedExpiresOn), + signedService: response.signedService, + signedVersion: response.signedVersion, + value: response.value + }; + const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); + return res; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, @@ -88538,1400 +89443,1829 @@ ${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } /** - * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * Creates a BlobBatchClient object to conduct batch operations. * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. - */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsSegments_1() { - let response; - if (!!marker2 || marker2 === void 0) { - do { - response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options)); - response.blobs = response.blobs || []; - marker2 = response.continuationToken; - yield yield tslib.__await(response); - } while (marker2); - } - }, "findBlobsByTagsSegments_1")); - } - /** - * Returns an AsyncIterableIterator for blobs. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to findBlobsByTagsItems. + * @returns A new BlobBatchClient object for this service. */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsItems_1() { - var e_3, _a; - let marker2; - try { - for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); - } - } catch (e_3_1) { - e_3 = { error: e_3_1 }; - } finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } finally { - if (e_3) throw e_3.error; - } - } - }, "findBlobsByTagsItems_1")); + getBlobBatchClient() { + return new BlobBatchClient(this.url, this.pipeline); } /** - * Returns an async iterable iterator to find all blobs with specified tag - * under the specified container. - * - * .byPage() returns an async iterable iterator to list the blobs in pages. - * - * Example using `for await` syntax: - * - * ```js - * let i = 1; - * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * ``` - * - * Example using `iter.next()`: - * - * ```js - * let i = 1; - * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); - * } - * ``` - * - * Example using `byPage()`: - * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * } - * ``` - * - * Example using paging with a marker: - * - * ```js - * let i = 1; - * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * - * // Prints 2 blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } + * Only available for BlobServiceClient constructed with a shared key credential. * - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = containerClient - * .findBlobsByTags("tagkey='tagvalue'") - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; + * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties + * and parameters passed in. The SAS is signed by the shared key credential of the client. * - * // Prints blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * ``` + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to find blobs by tags. + * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. + * @param permissions - Specifies the list of permissions to be associated with the SAS. + * @param resourceTypes - Specifies the resource types associated with the shared access signature. + * @param options - Optional parameters. + * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ - findBlobsByTags(tagFilterSqlExpression, options = {}) { - const listSegmentOptions = Object.assign({}, options); - const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - }, "byPage") - }; + generateAccountSasUrl(expiresOn2, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { + if (!(this.credential instanceof StorageSharedKeyCredential)) { + throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); + } + if (expiresOn2 === void 0) { + const now = /* @__PURE__ */ new Date(); + expiresOn2 = new Date(now.getTime() + 3600 * 1e3); + } + const sas = generateAccountSASQueryParameters(Object.assign({ + permissions, + expiresOn: expiresOn2, + resourceTypes, + services: AccountSASServices.parse("b").toString() + }, options), this.credential).toString(); + return appendToURLQuery(this.url, sas); + } + }; + exports2.KnownEncryptionAlgorithmType = void 0; + (function(KnownEncryptionAlgorithmType) { + KnownEncryptionAlgorithmType["AES256"] = "AES256"; + })(exports2.KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = {})); + Object.defineProperty(exports2, "BaseRequestPolicy", { + enumerable: true, + get: /* @__PURE__ */ __name(function() { + return coreHttp.BaseRequestPolicy; + }, "get") + }); + Object.defineProperty(exports2, "HttpHeaders", { + enumerable: true, + get: /* @__PURE__ */ __name(function() { + return coreHttp.HttpHeaders; + }, "get") + }); + Object.defineProperty(exports2, "RequestPolicyOptions", { + enumerable: true, + get: /* @__PURE__ */ __name(function() { + return coreHttp.RequestPolicyOptions; + }, "get") + }); + Object.defineProperty(exports2, "RestError", { + enumerable: true, + get: /* @__PURE__ */ __name(function() { + return coreHttp.RestError; + }, "get") + }); + Object.defineProperty(exports2, "WebResource", { + enumerable: true, + get: /* @__PURE__ */ __name(function() { + return coreHttp.WebResource; + }, "get") + }); + Object.defineProperty(exports2, "deserializationPolicy", { + enumerable: true, + get: /* @__PURE__ */ __name(function() { + return coreHttp.deserializationPolicy; + }, "get") + }); + exports2.AccountSASPermissions = AccountSASPermissions; + exports2.AccountSASResourceTypes = AccountSASResourceTypes; + exports2.AccountSASServices = AccountSASServices; + exports2.AnonymousCredential = AnonymousCredential; + exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; + exports2.AppendBlobClient = AppendBlobClient; + exports2.BlobBatch = BlobBatch; + exports2.BlobBatchClient = BlobBatchClient; + exports2.BlobClient = BlobClient; + exports2.BlobLeaseClient = BlobLeaseClient; + exports2.BlobSASPermissions = BlobSASPermissions; + exports2.BlobServiceClient = BlobServiceClient; + exports2.BlockBlobClient = BlockBlobClient; + exports2.ContainerClient = ContainerClient; + exports2.ContainerSASPermissions = ContainerSASPermissions; + exports2.Credential = Credential; + exports2.CredentialPolicy = CredentialPolicy; + exports2.PageBlobClient = PageBlobClient; + exports2.Pipeline = Pipeline; + exports2.SASQueryParameters = SASQueryParameters; + exports2.StorageBrowserPolicy = StorageBrowserPolicy; + exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; + exports2.StorageOAuthScopes = StorageOAuthScopes; + exports2.StorageRetryPolicy = StorageRetryPolicy; + exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; + exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; + exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; + exports2.generateAccountSASQueryParameters = generateAccountSASQueryParameters; + exports2.generateBlobSASQueryParameters = generateBlobSASQueryParameters; + exports2.getBlobServiceAccountAudience = getBlobServiceAccountAudience; + exports2.isPipelineLike = isPipelineLike; + exports2.logger = logger; + exports2.newPipeline = newPipeline; + } +}); + +// ../node_modules/@actions/cache/lib/internal/shared/errors.js +var require_errors2 = __commonJS({ + "../node_modules/@actions/cache/lib/internal/shared/errors.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.UsageError = exports2.NetworkError = exports2.GHESNotSupportedError = exports2.CacheNotFoundError = exports2.InvalidResponseError = exports2.FilesNotFoundError = void 0; + var FilesNotFoundError = class extends Error { + static { + __name(this, "FilesNotFoundError"); + } + constructor(files = []) { + let message = "No files were found to upload"; + if (files.length > 0) { + message += `: ${files.join(", ")}`; + } + super(message); + this.files = files; + this.name = "FilesNotFoundError"; + } + }; + exports2.FilesNotFoundError = FilesNotFoundError; + var InvalidResponseError = class extends Error { + static { + __name(this, "InvalidResponseError"); + } + constructor(message) { + super(message); + this.name = "InvalidResponseError"; + } + }; + exports2.InvalidResponseError = InvalidResponseError; + var CacheNotFoundError = class extends Error { + static { + __name(this, "CacheNotFoundError"); } - getContainerNameFromUrl() { - let containerName; - try { - const parsedUrl = coreHttp.URLBuilder.parse(this.url); - if (parsedUrl.getHost().split(".")[1] === "blob") { - containerName = parsedUrl.getPath().split("/")[1]; - } else if (isIpEndpointStyle(parsedUrl)) { - containerName = parsedUrl.getPath().split("/")[2]; - } else { - containerName = parsedUrl.getPath().split("/")[1]; + constructor(message = "Cache not found") { + super(message); + this.name = "CacheNotFoundError"; + } + }; + exports2.CacheNotFoundError = CacheNotFoundError; + var GHESNotSupportedError = class extends Error { + static { + __name(this, "GHESNotSupportedError"); + } + constructor(message = "@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.") { + super(message); + this.name = "GHESNotSupportedError"; + } + }; + exports2.GHESNotSupportedError = GHESNotSupportedError; + var NetworkError = class extends Error { + static { + __name(this, "NetworkError"); + } + constructor(code) { + const message = `Unable to make request: ${code} +If you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`; + super(message); + this.code = code; + this.name = "NetworkError"; + } + }; + exports2.NetworkError = NetworkError; + NetworkError.isNetworkErrorCode = (code) => { + if (!code) + return false; + return [ + "ECONNRESET", + "ENOTFOUND", + "ETIMEDOUT", + "ECONNREFUSED", + "EHOSTUNREACH" + ].includes(code); + }; + var UsageError = class extends Error { + static { + __name(this, "UsageError"); + } + constructor() { + const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours. +More info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`; + super(message); + this.name = "UsageError"; + } + }; + exports2.UsageError = UsageError; + UsageError.isUsageErrorMessage = (msg) => { + if (!msg) + return false; + return msg.includes("insufficient usage"); + }; + } +}); + +// ../node_modules/@actions/cache/lib/internal/uploadUtils.js +var require_uploadUtils = __commonJS({ + "../node_modules/@actions/cache/lib/internal/uploadUtils.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; + } + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - containerName = decodeURIComponent(containerName); - if (!containerName) { - throw new Error("Provided containerName is invalid."); + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); } - return containerName; - } catch (error) { - throw new Error("Unable to extract containerName with provided information."); } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.uploadCacheArchiveSDK = exports2.UploadProgress = void 0; + var core2 = __importStar3(require_core()); + var storage_blob_1 = require_dist4(); + var errors_1 = require_errors2(); + var UploadProgress = class { + static { + __name(this, "UploadProgress"); + } + constructor(contentLength) { + this.contentLength = contentLength; + this.sentBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); } /** - * Only available for ContainerClient constructed with a shared key credential. - * - * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas + * Sets the number of bytes sent * - * @param options - Optional parameters. - * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + * @param sentBytes the number of bytes sent */ - generateSasUrl(options) { - return new Promise((resolve) => { - if (!(this.credential instanceof StorageSharedKeyCredential)) { - throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); - } - const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); - resolve(appendToURLQuery(this.url, sas)); - }); + setSentBytes(sentBytes) { + this.sentBytes = sentBytes; } /** - * Creates a BlobBatchClient object to conduct batch operations. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch - * - * @returns A new BlobBatchClient object for this container. + * Returns the total number of bytes transferred. */ - getBlobBatchClient() { - return new BlobBatchClient(this.url, this.pipeline); + getTransferredBytes() { + return this.sentBytes; } - }; - var AccountSASPermissions = class _AccountSASPermissions { - static { - __name(this, "AccountSASPermissions"); + /** + * Returns true if the upload is complete. + */ + isDone() { + return this.getTransferredBytes() === this.contentLength; } - constructor() { - this.read = false; - this.write = false; - this.delete = false; - this.deleteVersion = false; - this.list = false; - this.add = false; - this.create = false; - this.update = false; - this.process = false; - this.tag = false; - this.filter = false; - this.setImmutabilityPolicy = false; - this.permanentDelete = false; + /** + * Prints the current upload stats. Once the upload completes, this will print one + * last line and then stop. + */ + display() { + if (this.displayedComplete) { + return; + } + const transferredBytes = this.sentBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); + core2.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; + } } /** - * Parse initializes the AccountSASPermissions fields from a string. + * Returns a function used to handle TransferProgressEvents. + */ + onProgress() { + return (progress) => { + this.setSentBytes(progress.loadedBytes); + }; + } + /** + * Starts the timer that displays the stats. * - * @param permissions - + * @param delayInMs the delay between each write */ - static parse(permissions) { - const accountSASPermissions = new _AccountSASPermissions(); - for (const c of permissions) { - switch (c) { - case "r": - accountSASPermissions.read = true; - break; - case "w": - accountSASPermissions.write = true; - break; - case "d": - accountSASPermissions.delete = true; - break; - case "x": - accountSASPermissions.deleteVersion = true; - break; - case "l": - accountSASPermissions.list = true; - break; - case "a": - accountSASPermissions.add = true; - break; - case "c": - accountSASPermissions.create = true; - break; - case "u": - accountSASPermissions.update = true; - break; - case "p": - accountSASPermissions.process = true; - break; - case "t": - accountSASPermissions.tag = true; - break; - case "f": - accountSASPermissions.filter = true; - break; - case "i": - accountSASPermissions.setImmutabilityPolicy = true; - break; - case "y": - accountSASPermissions.permanentDelete = true; - break; - default: - throw new RangeError(`Invalid permission character: ${c}`); + startDisplayTimer(delayInMs = 1e3) { + const displayCallback = /* @__PURE__ */ __name(() => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); } - } - return accountSASPermissions; + }, "displayCallback"); + this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** - * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it - * and boolean values for them. - * - * @param permissionLike - + * Stops the timer that displays the stats. As this typically indicates the upload + * is complete, this will display one last line, unless the last line has already + * been written. */ - static from(permissionLike) { - const accountSASPermissions = new _AccountSASPermissions(); - if (permissionLike.read) { - accountSASPermissions.read = true; - } - if (permissionLike.write) { - accountSASPermissions.write = true; - } - if (permissionLike.delete) { - accountSASPermissions.delete = true; - } - if (permissionLike.deleteVersion) { - accountSASPermissions.deleteVersion = true; - } - if (permissionLike.filter) { - accountSASPermissions.filter = true; + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = void 0; } - if (permissionLike.tag) { - accountSASPermissions.tag = true; + this.display(); + } + }; + exports2.UploadProgress = UploadProgress; + function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) { + var _a; + return __awaiter3(this, void 0, void 0, function* () { + const blobClient = new storage_blob_1.BlobClient(signedUploadURL); + const blockBlobClient = blobClient.getBlockBlobClient(); + const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0); + const uploadOptions = { + blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize, + concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency, + maxSingleShotSize: 128 * 1024 * 1024, + onProgress: uploadProgress.onProgress() + }; + try { + uploadProgress.startDisplayTimer(); + core2.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); + const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); + if (response._response.status >= 400) { + throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); + } + return response; + } catch (error) { + core2.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`); + throw error; + } finally { + uploadProgress.stopDisplayTimer(); } - if (permissionLike.list) { - accountSASPermissions.list = true; + }); + } + __name(uploadCacheArchiveSDK, "uploadCacheArchiveSDK"); + exports2.uploadCacheArchiveSDK = uploadCacheArchiveSDK; + } +}); + +// ../node_modules/@actions/cache/lib/internal/requestUtils.js +var require_requestUtils = __commonJS({ + "../node_modules/@actions/cache/lib/internal/requestUtils.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; + } + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - if (permissionLike.add) { - accountSASPermissions.add = true; + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - if (permissionLike.create) { - accountSASPermissions.create = true; + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - if (permissionLike.update) { - accountSASPermissions.update = true; + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; + var core2 = __importStar3(require_core()); + var http_client_1 = require_lib(); + var constants_1 = require_constants7(); + function isSuccessStatusCode(statusCode) { + if (!statusCode) { + return false; + } + return statusCode >= 200 && statusCode < 300; + } + __name(isSuccessStatusCode, "isSuccessStatusCode"); + exports2.isSuccessStatusCode = isSuccessStatusCode; + function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; + } + return statusCode >= 500; + } + __name(isServerErrorStatusCode, "isServerErrorStatusCode"); + exports2.isServerErrorStatusCode = isServerErrorStatusCode; + function isRetryableStatusCode(statusCode) { + if (!statusCode) { + return false; + } + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); + } + __name(isRetryableStatusCode, "isRetryableStatusCode"); + exports2.isRetryableStatusCode = isRetryableStatusCode; + function sleep(milliseconds) { + return __awaiter3(this, void 0, void 0, function* () { + return new Promise((resolve) => setTimeout(resolve, milliseconds)); + }); + } + __name(sleep, "sleep"); + function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = void 0) { + return __awaiter3(this, void 0, void 0, function* () { + let errorMessage = ""; + let attempt = 1; + while (attempt <= maxAttempts) { + let response = void 0; + let statusCode = void 0; + let isRetryable = false; + try { + response = yield method(); + } catch (error) { + if (onError) { + response = onError(error); + } + isRetryable = true; + errorMessage = error.message; + } + if (response) { + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + } + if (statusCode) { + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + core2.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core2.debug(`${name} - Error is not retryable`); + break; + } + yield sleep(delay); + attempt++; } - if (permissionLike.process) { - accountSASPermissions.process = true; + throw Error(`${name} failed: ${errorMessage}`); + }); + } + __name(retry, "retry"); + exports2.retry = retry; + function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter3(this, void 0, void 0, function* () { + return yield retry( + name, + method, + (response) => response.statusCode, + maxAttempts, + delay, + // If the error object contains the statusCode property, extract it and return + // an TypedResponse so it can be processed by the retry logic. + (error) => { + if (error instanceof http_client_1.HttpClientError) { + return { + statusCode: error.statusCode, + result: null, + headers: {}, + error + }; + } else { + return void 0; + } + } + ); + }); + } + __name(retryTypedResponse, "retryTypedResponse"); + exports2.retryTypedResponse = retryTypedResponse; + function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { + return __awaiter3(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay); + }); + } + __name(retryHttpClientResponse, "retryHttpClientResponse"); + exports2.retryHttpClientResponse = retryHttpClientResponse; + } +}); + +// ../node_modules/@actions/cache/lib/internal/downloadUtils.js +var require_downloadUtils = __commonJS({ + "../node_modules/@actions/cache/lib/internal/downloadUtils.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; + } + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - if (permissionLike.setImmutabilityPolicy) { - accountSASPermissions.setImmutabilityPolicy = true; + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - if (permissionLike.permanentDelete) { - accountSASPermissions.permanentDelete = true; + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - return accountSASPermissions; + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; + var core2 = __importStar3(require_core()); + var http_client_1 = require_lib(); + var storage_blob_1 = require_dist4(); + var buffer = __importStar3(require("buffer")); + var fs2 = __importStar3(require("fs")); + var stream = __importStar3(require("stream")); + var util = __importStar3(require("util")); + var utils = __importStar3(require_cacheUtils()); + var constants_1 = require_constants7(); + var requestUtils_1 = require_requestUtils(); + var abort_controller_1 = require_dist(); + function pipeResponseToStream(response, output) { + return __awaiter3(this, void 0, void 0, function* () { + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); + }); + } + __name(pipeResponseToStream, "pipeResponseToStream"); + var DownloadProgress = class { + static { + __name(this, "DownloadProgress"); + } + constructor(contentLength) { + this.contentLength = contentLength; + this.segmentIndex = 0; + this.segmentSize = 0; + this.segmentOffset = 0; + this.receivedBytes = 0; + this.displayedComplete = false; + this.startTime = Date.now(); } /** - * Produces the SAS permissions string for an Azure Storage account. - * Call this method to set AccountSASSignatureValues Permissions field. - * - * Using this method will guarantee the resource types are in - * an order accepted by the service. + * Progress to the next segment. Only call this method when the previous segment + * is complete. * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas + * @param segmentSize the length of the next segment + */ + nextSegment(segmentSize) { + this.segmentOffset = this.segmentOffset + this.segmentSize; + this.segmentIndex = this.segmentIndex + 1; + this.segmentSize = segmentSize; + this.receivedBytes = 0; + core2.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + } + /** + * Sets the number of bytes received for the current segment. * + * @param receivedBytes the number of bytes received */ - toString() { - const permissions = []; - if (this.read) { - permissions.push("r"); - } - if (this.write) { - permissions.push("w"); - } - if (this.delete) { - permissions.push("d"); - } - if (this.deleteVersion) { - permissions.push("x"); - } - if (this.filter) { - permissions.push("f"); - } - if (this.tag) { - permissions.push("t"); - } - if (this.list) { - permissions.push("l"); - } - if (this.add) { - permissions.push("a"); - } - if (this.create) { - permissions.push("c"); - } - if (this.update) { - permissions.push("u"); - } - if (this.process) { - permissions.push("p"); - } - if (this.setImmutabilityPolicy) { - permissions.push("i"); + setReceivedBytes(receivedBytes) { + this.receivedBytes = receivedBytes; + } + /** + * Returns the total number of bytes transferred. + */ + getTransferredBytes() { + return this.segmentOffset + this.receivedBytes; + } + /** + * Returns true if the download is complete. + */ + isDone() { + return this.getTransferredBytes() === this.contentLength; + } + /** + * Prints the current download stats. Once the download completes, this will print one + * last line and then stop. + */ + display() { + if (this.displayedComplete) { + return; } - if (this.permanentDelete) { - permissions.push("y"); + const transferredBytes = this.segmentOffset + this.receivedBytes; + const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); + const elapsedTime = Date.now() - this.startTime; + const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); + core2.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); + if (this.isDone()) { + this.displayedComplete = true; } - return permissions.join(""); - } - }; - var AccountSASResourceTypes = class _AccountSASResourceTypes { - static { - __name(this, "AccountSASResourceTypes"); } - constructor() { - this.service = false; - this.container = false; - this.object = false; + /** + * Returns a function used to handle TransferProgressEvents. + */ + onProgress() { + return (progress) => { + this.setReceivedBytes(progress.loadedBytes); + }; } /** - * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an - * Error if it encounters a character that does not correspond to a valid resource type. + * Starts the timer that displays the stats. * - * @param resourceTypes - + * @param delayInMs the delay between each write */ - static parse(resourceTypes) { - const accountSASResourceTypes = new _AccountSASResourceTypes(); - for (const c of resourceTypes) { - switch (c) { - case "s": - accountSASResourceTypes.service = true; - break; - case "c": - accountSASResourceTypes.container = true; - break; - case "o": - accountSASResourceTypes.object = true; - break; - default: - throw new RangeError(`Invalid resource type: ${c}`); + startDisplayTimer(delayInMs = 1e3) { + const displayCallback = /* @__PURE__ */ __name(() => { + this.display(); + if (!this.isDone()) { + this.timeoutHandle = setTimeout(displayCallback, delayInMs); } - } - return accountSASResourceTypes; + }, "displayCallback"); + this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** - * Converts the given resource types to a string. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas - * + * Stops the timer that displays the stats. As this typically indicates the download + * is complete, this will display one last line, unless the last line has already + * been written. */ - toString() { - const resourceTypes = []; - if (this.service) { - resourceTypes.push("s"); + stopDisplayTimer() { + if (this.timeoutHandle) { + clearTimeout(this.timeoutHandle); + this.timeoutHandle = void 0; } - if (this.container) { - resourceTypes.push("c"); + this.display(); + } + }; + exports2.DownloadProgress = DownloadProgress; + function downloadCacheHttpClient(archiveLocation, archivePath) { + return __awaiter3(this, void 0, void 0, function* () { + const writeStream = fs2.createWriteStream(archivePath); + const httpClient = new http_client_1.HttpClient("actions/cache"); + const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter3(this, void 0, void 0, function* () { + return httpClient.get(archiveLocation); + })); + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core2.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); + yield pipeResponseToStream(downloadResponse, writeStream); + const contentLengthHeader = downloadResponse.message.headers["content-length"]; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSizeInBytes(archivePath); + if (actualLength !== expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } else { + core2.debug("Unable to validate download, no Content-Length header"); } - if (this.object) { - resourceTypes.push("o"); + }); + } + __name(downloadCacheHttpClient, "downloadCacheHttpClient"); + exports2.downloadCacheHttpClient = downloadCacheHttpClient; + function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { + var _a; + return __awaiter3(this, void 0, void 0, function* () { + const archiveDescriptor = yield fs2.promises.open(archivePath, "w"); + const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { + socketTimeout: options.timeoutInMs, + keepAlive: true + }); + try { + const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter3(this, void 0, void 0, function* () { + return yield httpClient.request("HEAD", archiveLocation, null, {}); + })); + const lengthHeader = res.message.headers["content-length"]; + if (lengthHeader === void 0 || lengthHeader === null) { + throw new Error("Content-Length not found on blob response"); + } + const length = parseInt(lengthHeader); + if (Number.isNaN(length)) { + throw new Error(`Could not interpret Content-Length: ${length}`); + } + const downloads = []; + const blockSize = 4 * 1024 * 1024; + for (let offset = 0; offset < length; offset += blockSize) { + const count = Math.min(blockSize, length - offset); + downloads.push({ + offset, + promiseGetter: /* @__PURE__ */ __name(() => __awaiter3(this, void 0, void 0, function* () { + return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); + }), "promiseGetter") + }); + } + downloads.reverse(); + let actives = 0; + let bytesDownloaded = 0; + const progress = new DownloadProgress(length); + progress.startDisplayTimer(); + const progressFn = progress.onProgress(); + const activeDownloads = []; + let nextDownload; + const waitAndWrite = /* @__PURE__ */ __name(() => __awaiter3(this, void 0, void 0, function* () { + const segment = yield Promise.race(Object.values(activeDownloads)); + yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); + actives--; + delete activeDownloads[segment.offset]; + bytesDownloaded += segment.count; + progressFn({ loadedBytes: bytesDownloaded }); + }), "waitAndWrite"); + while (nextDownload = downloads.pop()) { + activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); + actives++; + if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { + yield waitAndWrite(); + } + } + while (actives > 0) { + yield waitAndWrite(); + } + } finally { + httpClient.dispose(); + yield archiveDescriptor.close(); } - return resourceTypes.join(""); + }); + } + __name(downloadCacheHttpClientConcurrent, "downloadCacheHttpClientConcurrent"); + exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; + function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { + return __awaiter3(this, void 0, void 0, function* () { + const retries = 5; + let failures = 0; + while (true) { + try { + const timeout = 3e4; + const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); + if (typeof result === "string") { + throw new Error("downloadSegmentRetry failed due to timeout"); + } + return result; + } catch (err) { + if (failures >= retries) { + throw err; + } + failures++; + } + } + }); + } + __name(downloadSegmentRetry, "downloadSegmentRetry"); + function downloadSegment(httpClient, archiveLocation, offset, count) { + return __awaiter3(this, void 0, void 0, function* () { + const partRes = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCachePart", () => __awaiter3(this, void 0, void 0, function* () { + return yield httpClient.get(archiveLocation, { + Range: `bytes=${offset}-${offset + count - 1}` + }); + })); + if (!partRes.readBodyBuffer) { + throw new Error("Expected HttpClientResponse to implement readBodyBuffer"); + } + return { + offset, + count, + buffer: yield partRes.readBodyBuffer() + }; + }); + } + __name(downloadSegment, "downloadSegment"); + function downloadCacheStorageSDK(archiveLocation, archivePath, options) { + var _a; + return __awaiter3(this, void 0, void 0, function* () { + const client = new storage_blob_1.BlockBlobClient(archiveLocation, void 0, { + retryOptions: { + // Override the timeout used when downloading each 4 MB chunk + // The default is 2 min / MB, which is way too slow + tryTimeoutInMs: options.timeoutInMs + } + }); + const properties = yield client.getProperties(); + const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; + if (contentLength < 0) { + core2.debug("Unable to determine content length, downloading file with http-client..."); + yield downloadCacheHttpClient(archiveLocation, archivePath); + } else { + const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); + const downloadProgress = new DownloadProgress(contentLength); + const fd = fs2.openSync(archivePath, "w"); + try { + downloadProgress.startDisplayTimer(); + const controller = new abort_controller_1.AbortController(); + const abortSignal = controller.signal; + while (!downloadProgress.isDone()) { + const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; + const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); + downloadProgress.nextSegment(segmentSize); + const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 36e5, client.downloadToBuffer(segmentStart, segmentSize, { + abortSignal, + concurrency: options.downloadConcurrency, + onProgress: downloadProgress.onProgress() + })); + if (result === "timeout") { + controller.abort(); + throw new Error("Aborting cache download as the download time exceeded the timeout."); + } else if (Buffer.isBuffer(result)) { + fs2.writeFileSync(fd, result); + } + } + } finally { + downloadProgress.stopDisplayTimer(); + fs2.closeSync(fd); + } + } + }); + } + __name(downloadCacheStorageSDK, "downloadCacheStorageSDK"); + exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; + var promiseWithTimeout = /* @__PURE__ */ __name((timeoutMs, promise) => __awaiter3(void 0, void 0, void 0, function* () { + let timeoutHandle; + const timeoutPromise = new Promise((resolve) => { + timeoutHandle = setTimeout(() => resolve("timeout"), timeoutMs); + }); + return Promise.race([promise, timeoutPromise]).then((result) => { + clearTimeout(timeoutHandle); + return result; + }); + }), "promiseWithTimeout"); + } +}); + +// ../node_modules/@actions/cache/lib/options.js +var require_options = __commonJS({ + "../node_modules/@actions/cache/lib/options.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; } - }; - var AccountSASServices = class _AccountSASServices { - static { - __name(this, "AccountSASServices"); + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } - constructor() { - this.blob = false; - this.file = false; - this.queue = false; - this.table = false; + __setModuleDefault3(result, mod); + return result; + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getDownloadOptions = exports2.getUploadOptions = void 0; + var core2 = __importStar3(require_core()); + function getUploadOptions(copy) { + const result = { + useAzureSdk: false, + uploadConcurrency: 4, + uploadChunkSize: 32 * 1024 * 1024 + }; + if (copy) { + if (typeof copy.useAzureSdk === "boolean") { + result.useAzureSdk = copy.useAzureSdk; + } + if (typeof copy.uploadConcurrency === "number") { + result.uploadConcurrency = copy.uploadConcurrency; + } + if (typeof copy.uploadChunkSize === "number") { + result.uploadChunkSize = copy.uploadChunkSize; + } } - /** - * Creates an {@link AccountSASServices} from the specified services string. This method will throw an - * Error if it encounters a character that does not correspond to a valid service. - * - * @param services - - */ - static parse(services) { - const accountSASServices = new _AccountSASServices(); - for (const c of services) { - switch (c) { - case "b": - accountSASServices.blob = true; - break; - case "f": - accountSASServices.file = true; - break; - case "q": - accountSASServices.queue = true; - break; - case "t": - accountSASServices.table = true; - break; - default: - throw new RangeError(`Invalid service character: ${c}`); - } + result.uploadConcurrency = !isNaN(Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) ? Math.min(32, Number(process.env["CACHE_UPLOAD_CONCURRENCY"])) : result.uploadConcurrency; + result.uploadChunkSize = !isNaN(Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"])) ? Math.min(128 * 1024 * 1024, Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) * 1024 * 1024) : result.uploadChunkSize; + core2.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core2.debug(`Upload concurrency: ${result.uploadConcurrency}`); + core2.debug(`Upload chunk size: ${result.uploadChunkSize}`); + return result; + } + __name(getUploadOptions, "getUploadOptions"); + exports2.getUploadOptions = getUploadOptions; + function getDownloadOptions(copy) { + const result = { + useAzureSdk: false, + concurrentBlobDownloads: true, + downloadConcurrency: 8, + timeoutInMs: 3e4, + segmentTimeoutInMs: 6e5, + lookupOnly: false + }; + if (copy) { + if (typeof copy.useAzureSdk === "boolean") { + result.useAzureSdk = copy.useAzureSdk; } - return accountSASServices; - } - /** - * Converts the given services to a string. - * - */ - toString() { - const services = []; - if (this.blob) { - services.push("b"); + if (typeof copy.concurrentBlobDownloads === "boolean") { + result.concurrentBlobDownloads = copy.concurrentBlobDownloads; } - if (this.table) { - services.push("t"); + if (typeof copy.downloadConcurrency === "number") { + result.downloadConcurrency = copy.downloadConcurrency; } - if (this.queue) { - services.push("q"); + if (typeof copy.timeoutInMs === "number") { + result.timeoutInMs = copy.timeoutInMs; } - if (this.file) { - services.push("f"); + if (typeof copy.segmentTimeoutInMs === "number") { + result.segmentTimeoutInMs = copy.segmentTimeoutInMs; + } + if (typeof copy.lookupOnly === "boolean") { + result.lookupOnly = copy.lookupOnly; } - return services.join(""); } - }; - function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { - const version5 = accountSASSignatureValues.version ? accountSASSignatureValues.version : SERVICE_VERSION; - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version5 < "2020-08-04") { - throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); + const segmentDownloadTimeoutMins = process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]; + if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { + result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version5 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); + core2.debug(`Use Azure SDK: ${result.useAzureSdk}`); + core2.debug(`Download concurrency: ${result.downloadConcurrency}`); + core2.debug(`Request timeout (ms): ${result.timeoutInMs}`); + core2.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); + core2.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); + core2.debug(`Lookup only: ${result.lookupOnly}`); + return result; + } + __name(getDownloadOptions, "getDownloadOptions"); + exports2.getDownloadOptions = getDownloadOptions; + } +}); + +// ../node_modules/@actions/cache/lib/internal/config.js +var require_config = __commonJS({ + "../node_modules/@actions/cache/lib/internal/config.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getCacheServiceURL = exports2.getCacheServiceVersion = exports2.isGhes = void 0; + function isGhes() { + const ghUrl = new URL(process.env["GITHUB_SERVER_URL"] || "https://github.com"); + const hostname = ghUrl.hostname.trimEnd().toUpperCase(); + const isGitHubHost = hostname === "GITHUB.COM"; + const isGheHost = hostname.endsWith(".GHE.COM"); + const isLocalHost = hostname.endsWith(".LOCALHOST"); + return !isGitHubHost && !isGheHost && !isLocalHost; + } + __name(isGhes, "isGhes"); + exports2.isGhes = isGhes; + function getCacheServiceVersion() { + if (isGhes()) + return "v1"; + return process.env["ACTIONS_CACHE_SERVICE_V2"] ? "v2" : "v1"; + } + __name(getCacheServiceVersion, "getCacheServiceVersion"); + exports2.getCacheServiceVersion = getCacheServiceVersion; + function getCacheServiceURL() { + const version3 = getCacheServiceVersion(); + switch (version3) { + case "v1": + return process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_RESULTS_URL"] || ""; + case "v2": + return process.env["ACTIONS_RESULTS_URL"] || ""; + default: + throw new Error(`Unsupported cache service version: ${version3}`); } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version5 < "2019-10-10") { - throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } + __name(getCacheServiceURL, "getCacheServiceURL"); + exports2.getCacheServiceURL = getCacheServiceURL; + } +}); + +// ../node_modules/@actions/cache/package.json +var require_package = __commonJS({ + "../node_modules/@actions/cache/package.json"(exports2, module2) { + module2.exports = { + name: "@actions/cache", + version: "4.0.0", + preview: true, + description: "Actions cache lib", + keywords: [ + "github", + "actions", + "cache" + ], + homepage: "https://github.com/actions/toolkit/tree/main/packages/cache", + license: "MIT", + main: "lib/cache.js", + types: "lib/cache.d.ts", + directories: { + lib: "lib", + test: "__tests__" + }, + files: [ + "lib", + "!.DS_Store" + ], + publishConfig: { + access: "public" + }, + repository: { + type: "git", + url: "git+https://github.com/actions/toolkit.git", + directory: "packages/cache" + }, + scripts: { + "audit-moderate": "npm install && npm audit --json --audit-level=moderate > audit.json", + test: 'echo "Error: run tests from root" && exit 1', + tsc: "tsc" + }, + bugs: { + url: "https://github.com/actions/toolkit/issues" + }, + dependencies: { + "@actions/core": "^1.11.1", + "@actions/exec": "^1.0.1", + "@actions/glob": "^0.1.0", + "@actions/http-client": "^2.1.1", + "@actions/io": "^1.0.1", + "@azure/abort-controller": "^1.1.0", + "@azure/ms-rest-js": "^2.6.0", + "@azure/storage-blob": "^12.13.0", + "@protobuf-ts/plugin": "^2.9.4", + semver: "^6.3.1", + "twirp-ts": "^2.5.0" + }, + devDependencies: { + "@types/semver": "^6.0.0", + typescript: "^5.2.2" } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version5 < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); + }; + } +}); + +// ../node_modules/@actions/cache/lib/internal/shared/user-agent.js +var require_user_agent = __commonJS({ + "../node_modules/@actions/cache/lib/internal/shared/user-agent.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getUserAgentString = void 0; + var packageJson = require_package(); + function getUserAgentString() { + return `@actions/cache-${packageJson.version}`; + } + __name(getUserAgentString, "getUserAgentString"); + exports2.getUserAgentString = getUserAgentString; + } +}); + +// ../node_modules/@actions/cache/lib/internal/cacheHttpClient.js +var require_cacheHttpClient = __commonJS({ + "../node_modules/@actions/cache/lib/internal/cacheHttpClient.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; } - if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version5 < "2019-12-12") { - throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } - if (accountSASSignatureValues.encryptionScope && version5 < "2020-12-06") { - throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); - const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); - const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); - let stringToSign; - if (version5 >= "2020-12-06") { - stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version5, - accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", - "" - // Account SAS requires an additional newline character - ].join("\n"); - } else { - stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version5, - "" - // Account SAS requires an additional newline character - ].join("\n"); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = void 0; + var core2 = __importStar3(require_core()); + var http_client_1 = require_lib(); + var auth_1 = require_auth(); + var fs2 = __importStar3(require("fs")); + var url_1 = require("url"); + var utils = __importStar3(require_cacheUtils()); + var uploadUtils_1 = require_uploadUtils(); + var downloadUtils_1 = require_downloadUtils(); + var options_1 = require_options(); + var requestUtils_1 = require_requestUtils(); + var config_1 = require_config(); + var user_agent_1 = require_user_agent(); + function getCacheApiUrl(resource) { + const baseUrl = (0, config_1.getCacheServiceURL)(); + if (!baseUrl) { + throw new Error("Cache Service Url not found, unable to restore cache."); } - const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(version5, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, void 0, accountSASSignatureValues.encryptionScope); + const url = `${baseUrl}_apis/artifactcache/${resource}`; + core2.debug(`Resource Url: ${url}`); + return url; } - __name(generateAccountSASQueryParameters, "generateAccountSASQueryParameters"); - var BlobServiceClient = class _BlobServiceClient extends StorageClient { - static { - __name(this, "BlobServiceClient"); - } - constructor(url2, credentialOrPipeline, options) { - let pipeline; - if (isPipelineLike(credentialOrPipeline)) { - pipeline = credentialOrPipeline; - } else if (coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential || credentialOrPipeline instanceof AnonymousCredential || coreHttp.isTokenCredential(credentialOrPipeline)) { - pipeline = newPipeline(credentialOrPipeline, options); - } else { - pipeline = newPipeline(new AnonymousCredential(), options); + __name(getCacheApiUrl, "getCacheApiUrl"); + function createAcceptHeader(type, apiVersion) { + return `${type};api-version=${apiVersion}`; + } + __name(createAcceptHeader, "createAcceptHeader"); + function getRequestOptions() { + const requestOptions = { + headers: { + Accept: createAcceptHeader("application/json", "6.0-preview.1") } - super(url2, pipeline); - this.serviceContext = new Service(this.storageClientContext); - } - /** - * - * Creates an instance of BlobServiceClient from connection string. - * - * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. - * [ Note - Account connection string can only be used in NODE.JS runtime. ] - * Account connection string example - - * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` - * SAS connection string example - - * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` - * @param options - Optional. Options to configure the HTTP pipeline. - */ - static fromConnectionString(connectionString, options) { - options = options || {}; - const extractedCreds = extractConnectionStringParts(connectionString); - if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { - const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + }; + return requestOptions; + } + __name(getRequestOptions, "getRequestOptions"); + function createHttpClient() { + const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; + const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); + return new http_client_1.HttpClient((0, user_agent_1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions()); + } + __name(createHttpClient, "createHttpClient"); + function getCacheEntry(keys, paths, options) { + return __awaiter3(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version3 = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); + const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version3}`; + const response = yield (0, requestUtils_1.retryTypedResponse)("getCacheEntry", () => __awaiter3(this, void 0, void 0, function* () { + return httpClient.getJson(getCacheApiUrl(resource)); + })); + if (response.statusCode === 204) { + if (core2.isDebug()) { + yield printCachesListForDiagnostics(keys[0], httpClient, version3); + } + return null; + } + if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { + throw new Error(`Cache service responded with ${response.statusCode}`); + } + const cacheResult = response.result; + const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; + if (!cacheDownloadUrl) { + throw new Error("Cache not found."); + } + core2.setSecret(cacheDownloadUrl); + core2.debug(`Cache Result:`); + core2.debug(JSON.stringify(cacheResult)); + return cacheResult; + }); + } + __name(getCacheEntry, "getCacheEntry"); + exports2.getCacheEntry = getCacheEntry; + function printCachesListForDiagnostics(key, httpClient, version3) { + return __awaiter3(this, void 0, void 0, function* () { + const resource = `caches?key=${encodeURIComponent(key)}`; + const response = yield (0, requestUtils_1.retryTypedResponse)("listCache", () => __awaiter3(this, void 0, void 0, function* () { + return httpClient.getJson(getCacheApiUrl(resource)); + })); + if (response.statusCode === 200) { + const cacheListResult = response.result; + const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; + if (totalCount && totalCount > 0) { + core2.debug(`No matching cache found for cache key '${key}', version '${version3} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key +Other caches with similar key:`); + for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { + core2.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } - const pipeline = newPipeline(sharedKeyCredential, options); - return new _BlobServiceClient(extractedCreds.url, pipeline); + } + } + }); + } + __name(printCachesListForDiagnostics, "printCachesListForDiagnostics"); + function downloadCache(archiveLocation, archivePath, options) { + return __awaiter3(this, void 0, void 0, function* () { + const archiveUrl = new url_1.URL(archiveLocation); + const downloadOptions = (0, options_1.getDownloadOptions)(options); + if (archiveUrl.hostname.endsWith(".blob.core.windows.net")) { + if (downloadOptions.useAzureSdk) { + yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); + } else if (downloadOptions.concurrentBlobDownloads) { + yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); } else { - throw new Error("Account connection string is only supported in Node.js environment"); + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } - } else if (extractedCreds.kind === "SASConnString") { - const pipeline = newPipeline(new AnonymousCredential(), options); - return new _BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); } else { - throw new Error("Connection string must be either an Account connection string or a SAS connection string"); + yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } - } - /** - * Creates a {@link ContainerClient} object - * - * @param containerName - A container name - * @returns A new ContainerClient object for the given container name. - * - * Example usage: - * - * ```js - * const containerClient = blobServiceClient.getContainerClient(""); - * ``` - */ - getContainerClient(containerName) { - return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); - } - /** - * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container - * - * @param containerName - Name of the container to create. - * @param options - Options to configure Container Create operation. - * @returns Container creation response and the corresponding container client. - */ - async createContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); - try { - const containerClient = this.getContainerClient(containerName); - const containerCreateResponse = await containerClient.create(updatedOptions); - return { - containerClient, - containerCreateResponse - }; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + }); + } + __name(downloadCache, "downloadCache"); + exports2.downloadCache = downloadCache; + function reserveCache(key, paths, options) { + return __awaiter3(this, void 0, void 0, function* () { + const httpClient = createHttpClient(); + const version3 = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); + const reserveCacheRequest = { + key, + version: version3, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize + }; + const response = yield (0, requestUtils_1.retryTypedResponse)("reserveCache", () => __awaiter3(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); + })); + return response; + }); + } + __name(reserveCache, "reserveCache"); + exports2.reserveCache = reserveCache; + function getContentRange(start, end) { + return `bytes ${start}-${end}/*`; + } + __name(getContentRange, "getContentRange"); + function uploadChunk(httpClient, resourceUrl, openStream, start, end) { + return __awaiter3(this, void 0, void 0, function* () { + core2.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); + const additionalHeaders = { + "Content-Type": "application/octet-stream", + "Content-Range": getContentRange(start, end) + }; + const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter3(this, void 0, void 0, function* () { + return httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); + })); + if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { + throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); } - } - /** - * Deletes a Blob container. - * - * @param containerName - Name of the container to delete. - * @param options - Options to configure Container Delete operation. - * @returns Container deletion response. - */ - async deleteContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); + }); + } + __name(uploadChunk, "uploadChunk"); + function uploadFile(httpClient, cacheId, archivePath, options) { + return __awaiter3(this, void 0, void 0, function* () { + const fileSize = utils.getArchiveFileSizeInBytes(archivePath); + const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); + const fd = fs2.openSync(archivePath, "r"); + const uploadOptions = (0, options_1.getUploadOptions)(options); + const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); + const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); + const parallelUploads = [...new Array(concurrency).keys()]; + core2.debug("Awaiting all uploads"); + let offset = 0; try { - const containerClient = this.getContainerClient(containerName); - return await containerClient.delete(updatedOptions); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; + yield Promise.all(parallelUploads.map(() => __awaiter3(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, maxChunkSize); + const start = offset; + const end = offset + chunkSize - 1; + offset += maxChunkSize; + yield uploadChunk(httpClient, resourceUrl, () => fs2.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }).on("error", (error) => { + throw new Error(`Cache upload failed because file read failed with ${error.message}`); + }), start, end); + } + }))); } finally { - span.end(); + fs2.closeSync(fd); } - } - /** - * Restore a previously deleted Blob container. - * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. - * - * @param deletedContainerName - Name of the previously deleted container. - * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. - * @param options - Options to configure Container Restore operation. - * @returns Container deletion response. - */ - async undeleteContainer(deletedContainerName2, deletedContainerVersion2, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); - try { - const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName2); - const containerContext = new Container(containerClient["storageClientContext"]); - const containerUndeleteResponse = await containerContext.restore(Object.assign({ - deletedContainerName: deletedContainerName2, - deletedContainerVersion: deletedContainerVersion2 - }, updatedOptions)); - return { containerClient, containerUndeleteResponse }; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + return; + }); + } + __name(uploadFile, "uploadFile"); + function commitCache(httpClient, cacheId, filesize) { + return __awaiter3(this, void 0, void 0, function* () { + const commitCacheRequest = { size: filesize }; + return yield (0, requestUtils_1.retryTypedResponse)("commitCache", () => __awaiter3(this, void 0, void 0, function* () { + return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + })); + }); + } + __name(commitCache, "commitCache"); + function saveCache(cacheId, archivePath, signedUploadURL, options) { + return __awaiter3(this, void 0, void 0, function* () { + const uploadOptions = (0, options_1.getUploadOptions)(options); + if (uploadOptions.useAzureSdk) { + if (!signedUploadURL) { + throw new Error("Azure Storage SDK can only be used when a signed URL is provided."); + } + yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); + } else { + const httpClient = createHttpClient(); + core2.debug("Upload cache"); + yield uploadFile(httpClient, cacheId, archivePath, options); + core2.debug("Commiting cache"); + const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); + core2.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); + const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); + if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { + throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); + } + core2.info("Cache saved successfully"); } + }); + } + __name(saveCache, "saveCache"); + exports2.saveCache = saveCache; + } +}); + +// ../node_modules/twirp-ts/build/twirp/context.js +var require_context2 = __commonJS({ + "../node_modules/twirp-ts/build/twirp/context.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + } +}); + +// ../node_modules/twirp-ts/build/twirp/hooks.js +var require_hooks = __commonJS({ + "../node_modules/twirp-ts/build/twirp/hooks.js"(exports2) { + "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - /** - * Rename an existing Blob Container. - * - * @param sourceContainerName - The name of the source container. - * @param destinationContainerName - The new name of the container. - * @param options - Options to configure Container Rename operation. - */ - /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ - // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. - async renameContainer(sourceContainerName2, destinationContainerName, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); - try { - const containerClient = this.getContainerClient(destinationContainerName); - const containerContext = new Container(containerClient["storageClientContext"]); - const containerRenameResponse = await containerContext.rename(sourceContainerName2, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); - return { containerClient, containerRenameResponse }; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } } - } - /** - * Gets the properties of a storage account’s Blob service, including properties - * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties - * - * @param options - Options to the Service Get Properties operation. - * @returns Response data for the Service Get Properties operation. - */ - async getProperties(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); - try { - return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } } - } - /** - * Sets properties for a storage account’s Blob service endpoint, including properties - * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties - * - * @param properties - - * @param options - Options to the Service Set Properties operation. - * @returns Response data for the Service Set Properties operation. - */ - async setProperties(properties, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); - try { - return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isHook = exports2.chainHooks = void 0; + function chainHooks(...hooks) { + if (hooks.length === 0) { + return null; } - /** - * Retrieves statistics related to replication for the Blob service. It is only - * available on the secondary location endpoint when read-access geo-redundant - * replication is enabled for the storage account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats - * - * @param options - Options to the Service Get Statistics operation. - * @returns Response data for the Service Get Statistics operation. - */ - async getStatistics(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); - try { - return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message - }); - throw e; - } finally { - span.end(); - } + if (hooks.length === 1) { + return hooks[0]; } - /** - * The Get Account Information operation returns the sku name and account kind - * for the specified account. - * The Get Account Information operation is available on service versions beginning - * with version 2018-03-28. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information - * - * @param options - Options to the Service Get Account Info operation. - * @returns Response data for the Service Get Account Info operation. - */ - async getAccountInfo(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); - try { - return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + const serverHook = { + requestReceived(ctx) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.requestReceived) { + continue; + } + yield hook.requestReceived(ctx); + } }); - throw e; - } finally { - span.end(); - } - } - /** - * Returns a list of the containers under the specified account. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 - * - * @param marker - A string value that identifies the portion of - * the list of containers to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all containers remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to the Service List Container Segment operation. - * @returns Response data for the Service List Container Segment operation. - */ - async listContainersSegment(marker2, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); - try { - return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker: marker2 }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + }, + requestPrepared(ctx) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.requestPrepared) { + continue; + } + console.warn("hook requestPrepared is deprecated and will be removed in the next release. Please use responsePrepared instead."); + yield hook.requestPrepared(ctx); + } }); - throw e; - } finally { - span.end(); - } - } - /** - * The Filter Blobs operation enables callers to list blobs across all containers whose tags - * match a given search expression. Filter blobs searches across all containers within a - * storage account but can be scoped within the expression to a single container. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. - */ - async findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); - try { - const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker: marker2, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { - var _a; - let tagValue = ""; - if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { - tagValue = blob.tags.blobTagSet[0].value; + }, + responsePrepared(ctx) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.responsePrepared) { + continue; + } + yield hook.responsePrepared(ctx); } - return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); - }) }); - return wrappedResponse; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message }); - throw e; - } finally { - span.end(); - } - } - /** - * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param marker - A string value that identifies the portion of - * the list of blobs to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all blobs remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to find blobs by tags. - */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsSegments_1() { - let response; - if (!!marker2 || marker2 === void 0) { - do { - response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker2, options)); - response.blobs = response.blobs || []; - marker2 = response.continuationToken; - yield yield tslib.__await(response); - } while (marker2); - } - }, "findBlobsByTagsSegments_1")); - } - /** - * Returns an AsyncIterableIterator for blobs. - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to findBlobsByTagsItems. - */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* findBlobsByTagsItems_1() { - var e_1, _a; - let marker2; - try { - for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + }, + requestSent(ctx) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.requestSent) { + continue; + } + console.warn("hook requestSent is deprecated and will be removed in the next release. Please use responseSent instead."); + yield hook.requestSent(ctx); } - } catch (e_1_1) { - e_1 = { error: e_1_1 }; - } finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } finally { - if (e_1) throw e_1.error; + }); + }, + responseSent(ctx) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.responseSent) { + continue; + } + yield hook.responseSent(ctx); } - } - }, "findBlobsByTagsItems_1")); + }); + }, + requestRouted(ctx) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.requestRouted) { + continue; + } + yield hook.requestRouted(ctx); + } + }); + }, + error(ctx, err) { + return __awaiter3(this, void 0, void 0, function* () { + for (const hook of hooks) { + if (!hook.error) { + continue; + } + yield hook.error(ctx, err); + } + }); + } + }; + return serverHook; + } + __name(chainHooks, "chainHooks"); + exports2.chainHooks = chainHooks; + function isHook(object) { + return "requestReceived" in object || "requestPrepared" in object || "requestSent" in object || "requestRouted" in object || "responsePrepared" in object || "responseSent" in object || "error" in object; + } + __name(isHook, "isHook"); + exports2.isHook = isHook; + } +}); + +// ../node_modules/twirp-ts/build/twirp/errors.js +var require_errors3 = __commonJS({ + "../node_modules/twirp-ts/build/twirp/errors.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isValidErrorCode = exports2.httpStatusFromErrorCode = exports2.TwirpErrorCode = exports2.BadRouteError = exports2.InternalServerErrorWith = exports2.InternalServerError = exports2.RequiredArgumentError = exports2.InvalidArgumentError = exports2.NotFoundError = exports2.TwirpError = void 0; + var TwirpError = class _TwirpError extends Error { + static { + __name(this, "TwirpError"); } - /** - * Returns an async iterable iterator to find all blobs with specified tag - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the blobs in pages. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties - * - * Example using `for await` syntax: - * - * ```js - * let i = 1; - * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { - * console.log(`Blob ${i++}: ${container.name}`); - * } - * ``` - * - * Example using `iter.next()`: - * - * ```js - * let i = 1; - * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); - * let blobItem = await iter.next(); - * while (!blobItem.done) { - * console.log(`Blob ${i++}: ${blobItem.value.name}`); - * blobItem = await iter.next(); - * } - * ``` - * - * Example using `byPage()`: - * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * } - * ``` - * - * Example using paging with a marker: - * - * ```js - * let i = 1; - * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * - * // Prints 2 blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = blobServiceClient - * .findBlobsByTags("tagkey='tagvalue'") - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * - * // Prints blob names - * if (response.blobs) { - * for (const blob of response.blobs) { - * console.log(`Blob ${i++}: ${blob.name}`); - * } - * } - * ``` - * - * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. - * The given expression must evaluate to true for a blob to be returned in the results. - * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; - * however, only a subset of the OData filter syntax is supported in the Blob service. - * @param options - Options to find blobs by tags. - */ - findBlobsByTags(tagFilterSqlExpression, options = {}) { - const listSegmentOptions = Object.assign({}, options); - const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - }, "byPage") - }; + constructor(code, msg) { + super(msg); + this.code = TwirpErrorCode.Internal; + this.meta = {}; + this.code = code; + this.msg = msg; + Object.setPrototypeOf(this, _TwirpError.prototype); } /** - * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses - * - * @param marker - A string value that identifies the portion of - * the list of containers to be returned with the next listing operation. The - * operation returns the continuationToken value within the response body if the - * listing operation did not return all containers remaining to be listed - * with the current page. The continuationToken value can be used as the value for - * the marker parameter in a subsequent call to request the next page of list - * items. The marker value is opaque to the client. - * @param options - Options to list containers operation. + * Adds a metadata kv to the error + * @param key + * @param value */ - listSegments(marker2, options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listSegments_1() { - let listContainersSegmentResponse; - if (!!marker2 || marker2 === void 0) { - do { - listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker2, options)); - listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; - marker2 = listContainersSegmentResponse.continuationToken; - yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); - } while (marker2); - } - }, "listSegments_1")); + withMeta(key, value) { + this.meta[key] = value; + return this; } /** - * Returns an AsyncIterableIterator for Container Items - * - * @param options - Options to list containers operation. + * Returns a single metadata value + * return "" if not found + * @param key */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, /* @__PURE__ */ __name(function* listItems_1() { - var e_2, _a; - let marker2; - try { - for (var _b = tslib.__asyncValues(this.listSegments(marker2, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done; ) { - const segment = _c.value; - yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); - } - } catch (e_2_1) { - e_2 = { error: e_2_1 }; - } finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); - } finally { - if (e_2) throw e_2.error; - } - } - }, "listItems_1")); + getMeta(key) { + return this.meta[key] || ""; } /** - * Returns an async iterable iterator to list all the containers - * under the specified account. - * - * .byPage() returns an async iterable iterator to list the containers in pages. - * - * Example using `for await` syntax: - * - * ```js - * let i = 1; - * for await (const container of blobServiceClient.listContainers()) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * ``` - * - * Example using `iter.next()`: - * - * ```js - * let i = 1; - * const iter = blobServiceClient.listContainers(); - * let containerItem = await iter.next(); - * while (!containerItem.done) { - * console.log(`Container ${i++}: ${containerItem.value.name}`); - * containerItem = await iter.next(); - * } - * ``` - * - * Example using `byPage()`: - * - * ```js - * // passing optional maxPageSize in the page settings - * let i = 1; - * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * } - * } - * ``` - * - * Example using paging with a marker: - * - * ```js - * let i = 1; - * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); - * let response = (await iterator.next()).value; - * - * // Prints 2 container names - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * } - * - * // Gets next marker - * let marker = response.continuationToken; - * // Passing next marker as continuationToken - * iterator = blobServiceClient - * .listContainers() - * .byPage({ continuationToken: marker, maxPageSize: 10 }); - * response = (await iterator.next()).value; - * - * // Prints 10 container names - * if (response.containerItems) { - * for (const container of response.containerItems) { - * console.log(`Container ${i++}: ${container.name}`); - * } - * } - * ``` - * - * @param options - Options to list containers. - * @returns An asyncIterableIterator that supports paging. + * Add the original error cause + * @param err + * @param addMeta */ - listContainers(options = {}) { - if (options.prefix === "") { - options.prefix = void 0; - } - const include2 = []; - if (options.includeDeleted) { - include2.push("deleted"); - } - if (options.includeMetadata) { - include2.push("metadata"); - } - if (options.includeSystem) { - include2.push("system"); + withCause(err, addMeta = false) { + this._originalCause = err; + if (addMeta) { + this.withMeta("cause", err.message); } - const listSegmentOptions = Object.assign(Object.assign({}, options), include2.length > 0 ? { include: include2 } : {}); - const iter = this.listItems(listSegmentOptions); - return { - /** - * The next method, part of the iteration protocol - */ - next() { - return iter.next(); - }, - /** - * The connection to the async iterator, part of the iteration protocol - */ - [Symbol.asyncIterator]() { - return this; - }, - /** - * Return an AsyncIterableIterator that works a page at a time - */ - byPage: /* @__PURE__ */ __name((settings = {}) => { - return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - }, "byPage") - }; + return this; + } + cause() { + return this._originalCause; } /** - * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). - * - * Retrieves a user delegation key for the Blob service. This is only a valid operation when using - * bearer token authentication. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key - * - * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time - * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time + * Returns the error representation to JSON */ - async getUserDelegationKey(startsOn, expiresOn2, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); + toJSON() { try { - const response = await this.serviceContext.getUserDelegationKey({ - startsOn: truncatedISO8061Date(startsOn, false), - expiresOn: truncatedISO8061Date(expiresOn2, false) - }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - const userDelegationKey = { - signedObjectId: response.signedObjectId, - signedTenantId: response.signedTenantId, - signedStartsOn: new Date(response.signedStartsOn), - signedExpiresOn: new Date(response.signedExpiresOn), - signedService: response.signedService, - signedVersion: response.signedVersion, - value: response.value - }; - const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); - return res; - } catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message + return JSON.stringify({ + code: this.code, + msg: this.msg, + meta: this.meta }); - throw e; - } finally { - span.end(); + } catch (e) { + return `{"code": "internal", "msg": "There was an error but it could not be serialized into JSON"}`; } } /** - * Creates a BlobBatchClient object to conduct batch operations. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch - * - * @returns A new BlobBatchClient object for this service. - */ - getBlobBatchClient() { - return new BlobBatchClient(this.url, this.pipeline); - } - /** - * Only available for BlobServiceClient constructed with a shared key credential. - * - * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties - * and parameters passed in. The SAS is signed by the shared key credential of the client. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas - * - * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. - * @param permissions - Specifies the list of permissions to be associated with the SAS. - * @param resourceTypes - Specifies the resource types associated with the shared access signature. - * @param options - Optional parameters. - * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. + * Create a twirp error from an object + * @param obj */ - generateAccountSasUrl(expiresOn2, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { - if (!(this.credential instanceof StorageSharedKeyCredential)) { - throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); - } - if (expiresOn2 === void 0) { - const now = /* @__PURE__ */ new Date(); - expiresOn2 = new Date(now.getTime() + 3600 * 1e3); + static fromObject(obj) { + const code = obj["code"] || TwirpErrorCode.Unknown; + const msg = obj["msg"] || "unknown"; + const error = new _TwirpError(code, msg); + if (obj["meta"]) { + Object.keys(obj["meta"]).forEach((key) => { + error.withMeta(key, obj["meta"][key]); + }); } - const sas = generateAccountSASQueryParameters(Object.assign({ - permissions, - expiresOn: expiresOn2, - resourceTypes, - services: AccountSASServices.parse("b").toString() - }, options), this.credential).toString(); - return appendToURLQuery(this.url, sas); + return error; } }; - exports2.KnownEncryptionAlgorithmType = void 0; - (function(KnownEncryptionAlgorithmType) { - KnownEncryptionAlgorithmType["AES256"] = "AES256"; - })(exports2.KnownEncryptionAlgorithmType || (exports2.KnownEncryptionAlgorithmType = {})); - Object.defineProperty(exports2, "BaseRequestPolicy", { - enumerable: true, - get: /* @__PURE__ */ __name(function() { - return coreHttp.BaseRequestPolicy; - }, "get") - }); - Object.defineProperty(exports2, "HttpHeaders", { - enumerable: true, - get: /* @__PURE__ */ __name(function() { - return coreHttp.HttpHeaders; - }, "get") - }); - Object.defineProperty(exports2, "RequestPolicyOptions", { - enumerable: true, - get: /* @__PURE__ */ __name(function() { - return coreHttp.RequestPolicyOptions; - }, "get") - }); - Object.defineProperty(exports2, "RestError", { - enumerable: true, - get: /* @__PURE__ */ __name(function() { - return coreHttp.RestError; - }, "get") - }); - Object.defineProperty(exports2, "WebResource", { - enumerable: true, - get: /* @__PURE__ */ __name(function() { - return coreHttp.WebResource; - }, "get") - }); - Object.defineProperty(exports2, "deserializationPolicy", { - enumerable: true, - get: /* @__PURE__ */ __name(function() { - return coreHttp.deserializationPolicy; - }, "get") - }); - exports2.AccountSASPermissions = AccountSASPermissions; - exports2.AccountSASResourceTypes = AccountSASResourceTypes; - exports2.AccountSASServices = AccountSASServices; - exports2.AnonymousCredential = AnonymousCredential; - exports2.AnonymousCredentialPolicy = AnonymousCredentialPolicy; - exports2.AppendBlobClient = AppendBlobClient; - exports2.BlobBatch = BlobBatch; - exports2.BlobBatchClient = BlobBatchClient; - exports2.BlobClient = BlobClient; - exports2.BlobLeaseClient = BlobLeaseClient; - exports2.BlobSASPermissions = BlobSASPermissions; - exports2.BlobServiceClient = BlobServiceClient; - exports2.BlockBlobClient = BlockBlobClient; - exports2.ContainerClient = ContainerClient; - exports2.ContainerSASPermissions = ContainerSASPermissions; - exports2.Credential = Credential; - exports2.CredentialPolicy = CredentialPolicy; - exports2.PageBlobClient = PageBlobClient; - exports2.Pipeline = Pipeline; - exports2.SASQueryParameters = SASQueryParameters; - exports2.StorageBrowserPolicy = StorageBrowserPolicy; - exports2.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; - exports2.StorageOAuthScopes = StorageOAuthScopes; - exports2.StorageRetryPolicy = StorageRetryPolicy; - exports2.StorageRetryPolicyFactory = StorageRetryPolicyFactory; - exports2.StorageSharedKeyCredential = StorageSharedKeyCredential; - exports2.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; - exports2.generateAccountSASQueryParameters = generateAccountSASQueryParameters; - exports2.generateBlobSASQueryParameters = generateBlobSASQueryParameters; - exports2.getBlobServiceAccountAudience = getBlobServiceAccountAudience; - exports2.isPipelineLike = isPipelineLike; - exports2.logger = logger; - exports2.newPipeline = newPipeline; + exports2.TwirpError = TwirpError; + var NotFoundError = class extends TwirpError { + static { + __name(this, "NotFoundError"); + } + constructor(msg) { + super(TwirpErrorCode.NotFound, msg); + } + }; + exports2.NotFoundError = NotFoundError; + var InvalidArgumentError = class extends TwirpError { + static { + __name(this, "InvalidArgumentError"); + } + constructor(argument, validationMsg) { + super(TwirpErrorCode.InvalidArgument, argument + " " + validationMsg); + this.withMeta("argument", argument); + } + }; + exports2.InvalidArgumentError = InvalidArgumentError; + var RequiredArgumentError = class extends InvalidArgumentError { + static { + __name(this, "RequiredArgumentError"); + } + constructor(argument) { + super(argument, "is required"); + } + }; + exports2.RequiredArgumentError = RequiredArgumentError; + var InternalServerError = class extends TwirpError { + static { + __name(this, "InternalServerError"); + } + constructor(msg) { + super(TwirpErrorCode.Internal, msg); + } + }; + exports2.InternalServerError = InternalServerError; + var InternalServerErrorWith = class extends InternalServerError { + static { + __name(this, "InternalServerErrorWith"); + } + constructor(err) { + super(err.message); + this.withMeta("cause", err.name); + this.withCause(err); + } + }; + exports2.InternalServerErrorWith = InternalServerErrorWith; + var BadRouteError = class extends TwirpError { + static { + __name(this, "BadRouteError"); + } + constructor(msg, method, url) { + super(TwirpErrorCode.BadRoute, msg); + this.withMeta("twirp_invalid_route", method + " " + url); + } + }; + exports2.BadRouteError = BadRouteError; + var TwirpErrorCode; + (function(TwirpErrorCode2) { + TwirpErrorCode2["Canceled"] = "canceled"; + TwirpErrorCode2["Unknown"] = "unknown"; + TwirpErrorCode2["InvalidArgument"] = "invalid_argument"; + TwirpErrorCode2["Malformed"] = "malformed"; + TwirpErrorCode2["DeadlineExceeded"] = "deadline_exceeded"; + TwirpErrorCode2["NotFound"] = "not_found"; + TwirpErrorCode2["BadRoute"] = "bad_route"; + TwirpErrorCode2["AlreadyExists"] = "already_exists"; + TwirpErrorCode2["PermissionDenied"] = "permission_denied"; + TwirpErrorCode2["Unauthenticated"] = "unauthenticated"; + TwirpErrorCode2["ResourceExhausted"] = "resource_exhausted"; + TwirpErrorCode2["FailedPrecondition"] = "failed_precondition"; + TwirpErrorCode2["Aborted"] = "aborted"; + TwirpErrorCode2["OutOfRange"] = "out_of_range"; + TwirpErrorCode2["Unimplemented"] = "unimplemented"; + TwirpErrorCode2["Internal"] = "internal"; + TwirpErrorCode2["Unavailable"] = "unavailable"; + TwirpErrorCode2["DataLoss"] = "data_loss"; + })(TwirpErrorCode = exports2.TwirpErrorCode || (exports2.TwirpErrorCode = {})); + function httpStatusFromErrorCode(code) { + switch (code) { + case TwirpErrorCode.Canceled: + return 408; + // RequestTimeout + case TwirpErrorCode.Unknown: + return 500; + // Internal Server Error + case TwirpErrorCode.InvalidArgument: + return 400; + // BadRequest + case TwirpErrorCode.Malformed: + return 400; + // BadRequest + case TwirpErrorCode.DeadlineExceeded: + return 408; + // RequestTimeout + case TwirpErrorCode.NotFound: + return 404; + // Not Found + case TwirpErrorCode.BadRoute: + return 404; + // Not Found + case TwirpErrorCode.AlreadyExists: + return 409; + // Conflict + case TwirpErrorCode.PermissionDenied: + return 403; + // Forbidden + case TwirpErrorCode.Unauthenticated: + return 401; + // Unauthorized + case TwirpErrorCode.ResourceExhausted: + return 429; + // Too Many Requests + case TwirpErrorCode.FailedPrecondition: + return 412; + // Precondition Failed + case TwirpErrorCode.Aborted: + return 409; + // Conflict + case TwirpErrorCode.OutOfRange: + return 400; + // Bad Request + case TwirpErrorCode.Unimplemented: + return 501; + // Not Implemented + case TwirpErrorCode.Internal: + return 500; + // Internal Server Error + case TwirpErrorCode.Unavailable: + return 503; + // Service Unavailable + case TwirpErrorCode.DataLoss: + return 500; + // Internal Server Error + default: + return 0; + } + } + __name(httpStatusFromErrorCode, "httpStatusFromErrorCode"); + exports2.httpStatusFromErrorCode = httpStatusFromErrorCode; + function isValidErrorCode(code) { + return httpStatusFromErrorCode(code) != 0; + } + __name(isValidErrorCode, "isValidErrorCode"); + exports2.isValidErrorCode = isValidErrorCode; } }); -// ../node_modules/@actions/cache/lib/internal/requestUtils.js -var require_requestUtils = __commonJS({ - "../node_modules/@actions/cache/lib/internal/requestUtils.js"(exports2) { +// ../node_modules/twirp-ts/build/twirp/request.js +var require_request3 = __commonJS({ + "../node_modules/twirp-ts/build/twirp/request.js"(exports2) { "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; - } - Object.defineProperty(o, k2, desc); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -89964,153 +91298,100 @@ var require_requestUtils = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.retryHttpClientResponse = exports2.retryTypedResponse = exports2.retry = exports2.isRetryableStatusCode = exports2.isServerErrorStatusCode = exports2.isSuccessStatusCode = void 0; - var core2 = __importStar3(require_core()); - var http_client_1 = require_lib(); - var constants_1 = require_constants7(); - function isSuccessStatusCode(statusCode) { - if (!statusCode) { - return false; + exports2.parseTwirpPath = exports2.getRequestData = exports2.validateRequest = exports2.getContentType = exports2.TwirpContentType = void 0; + var errors_1 = require_errors3(); + var TwirpContentType; + (function(TwirpContentType2) { + TwirpContentType2[TwirpContentType2["Protobuf"] = 0] = "Protobuf"; + TwirpContentType2[TwirpContentType2["JSON"] = 1] = "JSON"; + TwirpContentType2[TwirpContentType2["Unknown"] = 2] = "Unknown"; + })(TwirpContentType = exports2.TwirpContentType || (exports2.TwirpContentType = {})); + function getContentType(mimeType) { + switch (mimeType) { + case "application/protobuf": + return TwirpContentType.Protobuf; + case "application/json": + return TwirpContentType.JSON; + default: + return TwirpContentType.Unknown; } - return statusCode >= 200 && statusCode < 300; } - __name(isSuccessStatusCode, "isSuccessStatusCode"); - exports2.isSuccessStatusCode = isSuccessStatusCode; - function isServerErrorStatusCode(statusCode) { - if (!statusCode) { - return true; + __name(getContentType, "getContentType"); + exports2.getContentType = getContentType; + function validateRequest(ctx, request, pathPrefix) { + if (request.method !== "POST") { + const msg = `unsupported method ${request.method} (only POST is allowed)`; + throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); } - return statusCode >= 500; - } - __name(isServerErrorStatusCode, "isServerErrorStatusCode"); - exports2.isServerErrorStatusCode = isServerErrorStatusCode; - function isRetryableStatusCode(statusCode) { - if (!statusCode) { - return false; + const path2 = parseTwirpPath(request.url || ""); + if (path2.pkgService !== (ctx.packageName ? ctx.packageName + "." : "") + ctx.serviceName) { + const msg = `no handler for path ${request.url}`; + throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); } - const retryableStatusCodes = [ - http_client_1.HttpCodes.BadGateway, - http_client_1.HttpCodes.ServiceUnavailable, - http_client_1.HttpCodes.GatewayTimeout - ]; - return retryableStatusCodes.includes(statusCode); - } - __name(isRetryableStatusCode, "isRetryableStatusCode"); - exports2.isRetryableStatusCode = isRetryableStatusCode; - function sleep(milliseconds) { - return __awaiter3(this, void 0, void 0, function* () { - return new Promise((resolve) => setTimeout(resolve, milliseconds)); - }); + if (path2.prefix !== pathPrefix) { + const msg = `invalid path prefix ${path2.prefix}, expected ${pathPrefix}, on path ${request.url}`; + throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); + } + const mimeContentType = request.headers["content-type"] || ""; + if (ctx.contentType === TwirpContentType.Unknown) { + const msg = `unexpected Content-Type: ${request.headers["content-type"]}`; + throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); + } + return Object.assign(Object.assign({}, path2), { mimeContentType, contentType: ctx.contentType }); } - __name(sleep, "sleep"); - function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = void 0) { - return __awaiter3(this, void 0, void 0, function* () { - let errorMessage = ""; - let attempt = 1; - while (attempt <= maxAttempts) { - let response = void 0; - let statusCode = void 0; - let isRetryable = false; - try { - response = yield method(); - } catch (error) { - if (onError) { - response = onError(error); - } - isRetryable = true; - errorMessage = error.message; - } - if (response) { - statusCode = getStatusCode(response); - if (!isServerErrorStatusCode(statusCode)) { - return response; - } - } - if (statusCode) { - isRetryable = isRetryableStatusCode(statusCode); - errorMessage = `Cache service responded with ${statusCode}`; - } - core2.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); - if (!isRetryable) { - core2.debug(`${name} - Error is not retryable`); - break; - } - yield sleep(delay); - attempt++; + __name(validateRequest, "validateRequest"); + exports2.validateRequest = validateRequest; + function getRequestData(req) { + return new Promise((resolve, reject) => { + const reqWithRawBody = req; + if (reqWithRawBody.rawBody instanceof Buffer) { + resolve(reqWithRawBody.rawBody); + return; } - throw Error(`${name} failed: ${errorMessage}`); - }); - } - __name(retry, "retry"); - exports2.retry = retry; - function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { - return __awaiter3(this, void 0, void 0, function* () { - return yield retry( - name, - method, - (response) => response.statusCode, - maxAttempts, - delay, - // If the error object contains the statusCode property, extract it and return - // an TypedResponse so it can be processed by the retry logic. - (error) => { - if (error instanceof http_client_1.HttpClientError) { - return { - statusCode: error.statusCode, - result: null, - headers: {}, - error - }; - } else { - return void 0; - } + const chunks = []; + req.on("data", (chunk) => chunks.push(chunk)); + req.on("end", () => __awaiter3(this, void 0, void 0, function* () { + const data = Buffer.concat(chunks); + resolve(data); + })); + req.on("error", (err) => { + if (req.aborted) { + reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.DeadlineExceeded, "failed to read request: deadline exceeded")); + } else { + reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.Malformed, err.message).withCause(err)); } - ); + }); + req.on("close", () => { + reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.Canceled, "failed to read request: context canceled")); + }); }); } - __name(retryTypedResponse, "retryTypedResponse"); - exports2.retryTypedResponse = retryTypedResponse; - function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { - return __awaiter3(this, void 0, void 0, function* () { - return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay); - }); + __name(getRequestData, "getRequestData"); + exports2.getRequestData = getRequestData; + function parseTwirpPath(path2) { + const parts = path2.split("/"); + if (parts.length < 2) { + return { + pkgService: "", + method: "", + prefix: "" + }; + } + return { + method: parts[parts.length - 1], + pkgService: parts[parts.length - 2], + prefix: parts.slice(0, parts.length - 2).join("/") + }; } - __name(retryHttpClientResponse, "retryHttpClientResponse"); - exports2.retryHttpClientResponse = retryHttpClientResponse; + __name(parseTwirpPath, "parseTwirpPath"); + exports2.parseTwirpPath = parseTwirpPath; } }); -// ../node_modules/@actions/cache/lib/internal/downloadUtils.js -var require_downloadUtils = __commonJS({ - "../node_modules/@actions/cache/lib/internal/downloadUtils.js"(exports2) { +// ../node_modules/twirp-ts/build/twirp/server.js +var require_server = __commonJS({ + "../node_modules/twirp-ts/build/twirp/server.js"(exports2) { "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; - } - Object.defineProperty(o, k2, desc); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -90143,446 +91424,185 @@ var require_downloadUtils = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.downloadCacheStorageSDK = exports2.downloadCacheHttpClientConcurrent = exports2.downloadCacheHttpClient = exports2.DownloadProgress = void 0; - var core2 = __importStar3(require_core()); - var http_client_1 = require_lib(); - var storage_blob_1 = require_dist4(); - var buffer = __importStar3(require("buffer")); - var fs2 = __importStar3(require("fs")); - var stream = __importStar3(require("stream")); - var util = __importStar3(require("util")); - var utils = __importStar3(require_cacheUtils()); - var constants_1 = require_constants7(); - var requestUtils_1 = require_requestUtils(); - var abort_controller_1 = require_dist(); - function pipeResponseToStream(response, output) { - return __awaiter3(this, void 0, void 0, function* () { - const pipeline = util.promisify(stream.pipeline); - yield pipeline(response.message, output); - }); - } - __name(pipeResponseToStream, "pipeResponseToStream"); - var DownloadProgress = class { + exports2.writeError = exports2.TwirpServer = void 0; + var hooks_1 = require_hooks(); + var request_1 = require_request3(); + var errors_1 = require_errors3(); + var TwirpServer = class { static { - __name(this, "DownloadProgress"); - } - constructor(contentLength) { - this.contentLength = contentLength; - this.segmentIndex = 0; - this.segmentSize = 0; - this.segmentOffset = 0; - this.receivedBytes = 0; - this.displayedComplete = false; - this.startTime = Date.now(); + __name(this, "TwirpServer"); } - /** - * Progress to the next segment. Only call this method when the previous segment - * is complete. - * - * @param segmentSize the length of the next segment - */ - nextSegment(segmentSize) { - this.segmentOffset = this.segmentOffset + this.segmentSize; - this.segmentIndex = this.segmentIndex + 1; - this.segmentSize = segmentSize; - this.receivedBytes = 0; - core2.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); + constructor(options) { + this.pathPrefix = "/twirp"; + this.hooks = []; + this.interceptors = []; + this.packageName = options.packageName; + this.serviceName = options.serviceName; + this.methodList = options.methodList; + this.matchRoute = options.matchRoute; + this.service = options.service; } /** - * Sets the number of bytes received for the current segment. - * - * @param receivedBytes the number of bytes received + * Returns the prefix for this server */ - setReceivedBytes(receivedBytes) { - this.receivedBytes = receivedBytes; + get prefix() { + return this.pathPrefix; } /** - * Returns the total number of bytes transferred. + * The http handler for twirp complaint endpoints + * @param options */ - getTransferredBytes() { - return this.segmentOffset + this.receivedBytes; + httpHandler(options) { + return (req, resp) => { + if ((options === null || options === void 0 ? void 0 : options.prefix) !== void 0) { + this.withPrefix(options.prefix); + } + return this._httpHandler(req, resp); + }; } /** - * Returns true if the download is complete. + * Adds interceptors or hooks to the request stack + * @param middlewares */ - isDone() { - return this.getTransferredBytes() === this.contentLength; + use(...middlewares) { + middlewares.forEach((middleware) => { + if (hooks_1.isHook(middleware)) { + this.hooks.push(middleware); + return this; + } + this.interceptors.push(middleware); + }); + return this; } /** - * Prints the current download stats. Once the download completes, this will print one - * last line and then stop. + * Adds a prefix to the service url path + * @param prefix */ - display() { - if (this.displayedComplete) { - return; - } - const transferredBytes = this.segmentOffset + this.receivedBytes; - const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); - const elapsedTime = Date.now() - this.startTime; - const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1e3)).toFixed(1); - core2.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); - if (this.isDone()) { - this.displayedComplete = true; + withPrefix(prefix) { + if (prefix === false) { + this.pathPrefix = ""; + } else { + this.pathPrefix = prefix; } + return this; } /** - * Returns a function used to handle TransferProgressEvents. + * Returns the regex matching path for this twirp server */ - onProgress() { - return (progress) => { - this.setReceivedBytes(progress.loadedBytes); - }; + matchingPath() { + const baseRegex = this.baseURI().replace(/\./g, "\\."); + return new RegExp(`${baseRegex}/(${this.methodList.join("|")})`); } /** - * Starts the timer that displays the stats. - * - * @param delayInMs the delay between each write + * Returns the base URI for this twirp server */ - startDisplayTimer(delayInMs = 1e3) { - const displayCallback = /* @__PURE__ */ __name(() => { - this.display(); - if (!this.isDone()) { - this.timeoutHandle = setTimeout(displayCallback, delayInMs); - } - }, "displayCallback"); - this.timeoutHandle = setTimeout(displayCallback, delayInMs); + baseURI() { + return `${this.pathPrefix}/${this.packageName ? this.packageName + "." : ""}${this.serviceName}`; } /** - * Stops the timer that displays the stats. As this typically indicates the download - * is complete, this will display one last line, unless the last line has already - * been written. + * Create a twirp context + * @param req + * @param res + * @private */ - stopDisplayTimer() { - if (this.timeoutHandle) { - clearTimeout(this.timeoutHandle); - this.timeoutHandle = void 0; - } - this.display(); - } - }; - exports2.DownloadProgress = DownloadProgress; - function downloadCacheHttpClient(archiveLocation, archivePath) { - return __awaiter3(this, void 0, void 0, function* () { - const writeStream = fs2.createWriteStream(archivePath); - const httpClient = new http_client_1.HttpClient("actions/cache"); - const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCache", () => __awaiter3(this, void 0, void 0, function* () { - return httpClient.get(archiveLocation); - })); - downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { - downloadResponse.message.destroy(); - core2.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); - }); - yield pipeResponseToStream(downloadResponse, writeStream); - const contentLengthHeader = downloadResponse.message.headers["content-length"]; - if (contentLengthHeader) { - const expectedLength = parseInt(contentLengthHeader); - const actualLength = utils.getArchiveFileSizeInBytes(archivePath); - if (actualLength !== expectedLength) { - throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); - } - } else { - core2.debug("Unable to validate download, no Content-Length header"); - } - }); - } - __name(downloadCacheHttpClient, "downloadCacheHttpClient"); - exports2.downloadCacheHttpClient = downloadCacheHttpClient; - function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { - var _a; - return __awaiter3(this, void 0, void 0, function* () { - const archiveDescriptor = yield fs2.promises.open(archivePath, "w"); - const httpClient = new http_client_1.HttpClient("actions/cache", void 0, { - socketTimeout: options.timeoutInMs, - keepAlive: true - }); - try { - const res = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCacheMetadata", () => __awaiter3(this, void 0, void 0, function* () { - return yield httpClient.request("HEAD", archiveLocation, null, {}); - })); - const lengthHeader = res.message.headers["content-length"]; - if (lengthHeader === void 0 || lengthHeader === null) { - throw new Error("Content-Length not found on blob response"); - } - const length = parseInt(lengthHeader); - if (Number.isNaN(length)) { - throw new Error(`Could not interpret Content-Length: ${length}`); - } - const downloads = []; - const blockSize = 4 * 1024 * 1024; - for (let offset = 0; offset < length; offset += blockSize) { - const count = Math.min(blockSize, length - offset); - downloads.push({ - offset, - promiseGetter: /* @__PURE__ */ __name(() => __awaiter3(this, void 0, void 0, function* () { - return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); - }), "promiseGetter") - }); - } - downloads.reverse(); - let actives = 0; - let bytesDownloaded = 0; - const progress = new DownloadProgress(length); - progress.startDisplayTimer(); - const progressFn = progress.onProgress(); - const activeDownloads = []; - let nextDownload; - const waitAndWrite = /* @__PURE__ */ __name(() => __awaiter3(this, void 0, void 0, function* () { - const segment = yield Promise.race(Object.values(activeDownloads)); - yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); - actives--; - delete activeDownloads[segment.offset]; - bytesDownloaded += segment.count; - progressFn({ loadedBytes: bytesDownloaded }); - }), "waitAndWrite"); - while (nextDownload = downloads.pop()) { - activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); - actives++; - if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { - yield waitAndWrite(); - } - } - while (actives > 0) { - yield waitAndWrite(); - } - } finally { - httpClient.dispose(); - yield archiveDescriptor.close(); - } - }); - } - __name(downloadCacheHttpClientConcurrent, "downloadCacheHttpClientConcurrent"); - exports2.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; - function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { - return __awaiter3(this, void 0, void 0, function* () { - const retries = 5; - let failures = 0; - while (true) { - try { - const timeout = 3e4; - const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); - if (typeof result === "string") { - throw new Error("downloadSegmentRetry failed due to timeout"); - } - return result; - } catch (err) { - if (failures >= retries) { - throw err; - } - failures++; - } - } - }); - } - __name(downloadSegmentRetry, "downloadSegmentRetry"); - function downloadSegment(httpClient, archiveLocation, offset, count) { - return __awaiter3(this, void 0, void 0, function* () { - const partRes = yield (0, requestUtils_1.retryHttpClientResponse)("downloadCachePart", () => __awaiter3(this, void 0, void 0, function* () { - return yield httpClient.get(archiveLocation, { - Range: `bytes=${offset}-${offset + count - 1}` - }); - })); - if (!partRes.readBodyBuffer) { - throw new Error("Expected HttpClientResponse to implement readBodyBuffer"); - } + createContext(req, res) { return { - offset, - count, - buffer: yield partRes.readBodyBuffer() + packageName: this.packageName, + serviceName: this.serviceName, + methodName: "", + contentType: request_1.getContentType(req.headers["content-type"]), + req, + res }; - }); - } - __name(downloadSegment, "downloadSegment"); - function downloadCacheStorageSDK(archiveLocation, archivePath, options) { - var _a; - return __awaiter3(this, void 0, void 0, function* () { - const client = new storage_blob_1.BlockBlobClient(archiveLocation, void 0, { - retryOptions: { - // Override the timeout used when downloading each 4 MB chunk - // The default is 2 min / MB, which is way too slow - tryTimeoutInMs: options.timeoutInMs - } - }); - const properties = yield client.getProperties(); - const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; - if (contentLength < 0) { - core2.debug("Unable to determine content length, downloading file with http-client..."); - yield downloadCacheHttpClient(archiveLocation, archivePath); - } else { - const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); - const downloadProgress = new DownloadProgress(contentLength); - const fd = fs2.openSync(archivePath, "w"); + } + /** + * Twrip server http handler implementation + * @param req + * @param resp + * @private + */ + _httpHandler(req, resp) { + return __awaiter3(this, void 0, void 0, function* () { + const ctx = this.createContext(req, resp); try { - downloadProgress.startDisplayTimer(); - const controller = new abort_controller_1.AbortController(); - const abortSignal = controller.signal; - while (!downloadProgress.isDone()) { - const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; - const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); - downloadProgress.nextSegment(segmentSize); - const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 36e5, client.downloadToBuffer(segmentStart, segmentSize, { - abortSignal, - concurrency: options.downloadConcurrency, - onProgress: downloadProgress.onProgress() - })); - if (result === "timeout") { - controller.abort(); - throw new Error("Aborting cache download as the download time exceeded the timeout."); - } else if (Buffer.isBuffer(result)) { - fs2.writeFileSync(fd, result); - } + yield this.invokeHook("requestReceived", ctx); + const { method, mimeContentType } = request_1.validateRequest(ctx, req, this.pathPrefix || ""); + const handler = this.matchRoute(method, { + onMatch: /* @__PURE__ */ __name((ctx2) => { + return this.invokeHook("requestRouted", ctx2); + }, "onMatch"), + onNotFound: /* @__PURE__ */ __name(() => { + const msg = `no handler for path ${req.url}`; + throw new errors_1.BadRouteError(msg, req.method || "", req.url || ""); + }, "onNotFound") + }); + const body = yield request_1.getRequestData(req); + const response = yield handler(ctx, this.service, body, this.interceptors); + yield Promise.all([ + this.invokeHook("responsePrepared", ctx), + // keep backwards compatibility till next release + this.invokeHook("requestPrepared", ctx) + ]); + resp.statusCode = 200; + resp.setHeader("Content-Type", mimeContentType); + resp.end(response); + } catch (e) { + yield this.invokeHook("error", ctx, mustBeTwirpError(e)); + if (!resp.headersSent) { + writeError(resp, e); } } finally { - downloadProgress.stopDisplayTimer(); - fs2.closeSync(fd); + yield Promise.all([ + this.invokeHook("responseSent", ctx), + // keep backwards compatibility till next release + this.invokeHook("requestSent", ctx) + ]); } - } - }); - } - __name(downloadCacheStorageSDK, "downloadCacheStorageSDK"); - exports2.downloadCacheStorageSDK = downloadCacheStorageSDK; - var promiseWithTimeout = /* @__PURE__ */ __name((timeoutMs, promise) => __awaiter3(void 0, void 0, void 0, function* () { - let timeoutHandle; - const timeoutPromise = new Promise((resolve) => { - timeoutHandle = setTimeout(() => resolve("timeout"), timeoutMs); - }); - return Promise.race([promise, timeoutPromise]).then((result) => { - clearTimeout(timeoutHandle); - return result; - }); - }), "promiseWithTimeout"); - } -}); - -// ../node_modules/@actions/cache/lib/options.js -var require_options = __commonJS({ - "../node_modules/@actions/cache/lib/options.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; + }); } - Object.defineProperty(o, k2, desc); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + /** + * Invoke a hook + * @param hookName + * @param ctx + * @param err + * @protected + */ + invokeHook(hookName, ctx, err) { + return __awaiter3(this, void 0, void 0, function* () { + if (this.hooks.length === 0) { + return; + } + const chainedHooks = hooks_1.chainHooks(...this.hooks); + const hook = chainedHooks === null || chainedHooks === void 0 ? void 0 : chainedHooks[hookName]; + if (hook) { + yield hook(ctx, err || new errors_1.InternalServerError("internal server error")); + } + }); } - __setModuleDefault3(result, mod); - return result; }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getDownloadOptions = exports2.getUploadOptions = void 0; - var core2 = __importStar3(require_core()); - function getUploadOptions(copy) { - const result = { - uploadConcurrency: 4, - uploadChunkSize: 32 * 1024 * 1024 - }; - if (copy) { - if (typeof copy.uploadConcurrency === "number") { - result.uploadConcurrency = copy.uploadConcurrency; - } - if (typeof copy.uploadChunkSize === "number") { - result.uploadChunkSize = copy.uploadChunkSize; - } - } - core2.debug(`Upload concurrency: ${result.uploadConcurrency}`); - core2.debug(`Upload chunk size: ${result.uploadChunkSize}`); - return result; + exports2.TwirpServer = TwirpServer; + function writeError(res, error) { + const twirpError = mustBeTwirpError(error); + res.setHeader("Content-Type", "application/json"); + res.statusCode = errors_1.httpStatusFromErrorCode(twirpError.code); + res.end(twirpError.toJSON()); } - __name(getUploadOptions, "getUploadOptions"); - exports2.getUploadOptions = getUploadOptions; - function getDownloadOptions(copy) { - const result = { - useAzureSdk: false, - concurrentBlobDownloads: true, - downloadConcurrency: 8, - timeoutInMs: 3e4, - segmentTimeoutInMs: 6e5, - lookupOnly: false - }; - if (copy) { - if (typeof copy.useAzureSdk === "boolean") { - result.useAzureSdk = copy.useAzureSdk; - } - if (typeof copy.concurrentBlobDownloads === "boolean") { - result.concurrentBlobDownloads = copy.concurrentBlobDownloads; - } - if (typeof copy.downloadConcurrency === "number") { - result.downloadConcurrency = copy.downloadConcurrency; - } - if (typeof copy.timeoutInMs === "number") { - result.timeoutInMs = copy.timeoutInMs; - } - if (typeof copy.segmentTimeoutInMs === "number") { - result.segmentTimeoutInMs = copy.segmentTimeoutInMs; - } - if (typeof copy.lookupOnly === "boolean") { - result.lookupOnly = copy.lookupOnly; - } - } - const segmentDownloadTimeoutMins = process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]; - if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { - result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1e3; + __name(writeError, "writeError"); + exports2.writeError = writeError; + function mustBeTwirpError(err) { + if (err instanceof errors_1.TwirpError) { + return err; } - core2.debug(`Use Azure SDK: ${result.useAzureSdk}`); - core2.debug(`Download concurrency: ${result.downloadConcurrency}`); - core2.debug(`Request timeout (ms): ${result.timeoutInMs}`); - core2.debug(`Cache segment download timeout mins env var: ${process.env["SEGMENT_DOWNLOAD_TIMEOUT_MINS"]}`); - core2.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); - core2.debug(`Lookup only: ${result.lookupOnly}`); - return result; + return new errors_1.InternalServerErrorWith(err); } - __name(getDownloadOptions, "getDownloadOptions"); - exports2.getDownloadOptions = getDownloadOptions; + __name(mustBeTwirpError, "mustBeTwirpError"); } }); -// ../node_modules/@actions/cache/lib/internal/cacheHttpClient.js -var require_cacheHttpClient = __commonJS({ - "../node_modules/@actions/cache/lib/internal/cacheHttpClient.js"(exports2) { +// ../node_modules/twirp-ts/build/twirp/interceptors.js +var require_interceptors = __commonJS({ + "../node_modules/twirp-ts/build/twirp/interceptors.js"(exports2) { "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; - } - Object.defineProperty(o, k2, desc); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -90615,243 +91635,408 @@ var require_cacheHttpClient = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.saveCache = exports2.reserveCache = exports2.downloadCache = exports2.getCacheEntry = exports2.getCacheVersion = void 0; - var core2 = __importStar3(require_core()); - var http_client_1 = require_lib(); - var auth_1 = require_auth(); - var crypto7 = __importStar3(require("crypto")); - var fs2 = __importStar3(require("fs")); - var url_1 = require("url"); - var utils = __importStar3(require_cacheUtils()); - var downloadUtils_1 = require_downloadUtils(); - var options_1 = require_options(); - var requestUtils_1 = require_requestUtils(); - var versionSalt = "1.0"; - function getCacheApiUrl(resource) { - const baseUrl = process.env["ACTIONS_CACHE_URL"] || ""; - if (!baseUrl) { - throw new Error("Cache Service Url not found, unable to restore cache."); + exports2.chainInterceptors = void 0; + function chainInterceptors(...interceptors) { + if (interceptors.length === 0) { + return; } - const url = `${baseUrl}_apis/artifactcache/${resource}`; - core2.debug(`Resource Url: ${url}`); - return url; - } - __name(getCacheApiUrl, "getCacheApiUrl"); - function createAcceptHeader(type, apiVersion) { - return `${type};api-version=${apiVersion}`; + if (interceptors.length === 1) { + return interceptors[0]; + } + const first = interceptors[0]; + return (ctx, request, handler) => __awaiter3(this, void 0, void 0, function* () { + let next = handler; + for (let i = interceptors.length - 1; i > 0; i--) { + next = /* @__PURE__ */ ((next2) => (ctx2, typedRequest) => { + return interceptors[i](ctx2, typedRequest, next2); + })(next); + } + return first(ctx, request, next); + }); } - __name(createAcceptHeader, "createAcceptHeader"); - function getRequestOptions() { - const requestOptions = { - headers: { - Accept: createAcceptHeader("application/json", "6.0-preview.1") + __name(chainInterceptors, "chainInterceptors"); + exports2.chainInterceptors = chainInterceptors; + } +}); + +// ../node_modules/dot-object/index.js +var require_dot_object = __commonJS({ + "../node_modules/dot-object/index.js"(exports2, module2) { + "use strict"; + function _process(v, mod) { + var i; + var r; + if (typeof mod === "function") { + r = mod(v); + if (r !== void 0) { + v = r; } - }; - return requestOptions; + } else if (Array.isArray(mod)) { + for (i = 0; i < mod.length; i++) { + r = mod[i](v); + if (r !== void 0) { + v = r; + } + } + } + return v; } - __name(getRequestOptions, "getRequestOptions"); - function createHttpClient() { - const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; - const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); - return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions()); + __name(_process, "_process"); + function parseKey(key, val) { + if (key[0] === "-" && Array.isArray(val) && /^-\d+$/.test(key)) { + return val.length + parseInt(key, 10); + } + return key; } - __name(createHttpClient, "createHttpClient"); - function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { - const components = paths.slice(); - if (compressionMethod) { - components.push(compressionMethod); + __name(parseKey, "parseKey"); + function isIndex(k) { + return /^\d+$/.test(k); + } + __name(isIndex, "isIndex"); + function isObject(val) { + return Object.prototype.toString.call(val) === "[object Object]"; + } + __name(isObject, "isObject"); + function isArrayOrObject(val) { + return Object(val) === val; + } + __name(isArrayOrObject, "isArrayOrObject"); + function isEmptyObject(val) { + return Object.keys(val).length === 0; + } + __name(isEmptyObject, "isEmptyObject"); + var blacklist = ["__proto__", "prototype", "constructor"]; + var blacklistFilter = /* @__PURE__ */ __name(function(part) { + return blacklist.indexOf(part) === -1; + }, "blacklistFilter"); + function parsePath(path2, sep) { + if (path2.indexOf("[") >= 0) { + path2 = path2.replace(/\[/g, sep).replace(/]/g, ""); } - if (process.platform === "win32" && !enableCrossOsArchive) { - components.push("windows-only"); + var parts = path2.split(sep); + var check = parts.filter(blacklistFilter); + if (check.length !== parts.length) { + throw Error("Refusing to update blacklisted property " + path2); } - components.push(versionSalt); - return crypto7.createHash("sha256").update(components.join("|")).digest("hex"); + return parts; } - __name(getCacheVersion, "getCacheVersion"); - exports2.getCacheVersion = getCacheVersion; - function getCacheEntry(keys, paths, options) { - return __awaiter3(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - const version4 = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); - const resource = `cache?keys=${encodeURIComponent(keys.join(","))}&version=${version4}`; - const response = yield (0, requestUtils_1.retryTypedResponse)("getCacheEntry", () => __awaiter3(this, void 0, void 0, function* () { - return httpClient.getJson(getCacheApiUrl(resource)); - })); - if (response.statusCode === 204) { - if (core2.isDebug()) { - yield printCachesListForDiagnostics(keys[0], httpClient, version4); + __name(parsePath, "parsePath"); + var hasOwnProperty = Object.prototype.hasOwnProperty; + function DotObject(separator, override, useArray, useBrackets) { + if (!(this instanceof DotObject)) { + return new DotObject(separator, override, useArray, useBrackets); + } + if (typeof override === "undefined") override = false; + if (typeof useArray === "undefined") useArray = true; + if (typeof useBrackets === "undefined") useBrackets = true; + this.separator = separator || "."; + this.override = override; + this.useArray = useArray; + this.useBrackets = useBrackets; + this.keepArray = false; + this.cleanup = []; + } + __name(DotObject, "DotObject"); + var dotDefault = new DotObject(".", false, true, true); + function wrap(method) { + return function() { + return dotDefault[method].apply(dotDefault, arguments); + }; + } + __name(wrap, "wrap"); + DotObject.prototype._fill = function(a, obj, v, mod) { + var k = a.shift(); + if (a.length > 0) { + obj[k] = obj[k] || (this.useArray && isIndex(a[0]) ? [] : {}); + if (!isArrayOrObject(obj[k])) { + if (this.override) { + obj[k] = {}; + } else { + if (!(isArrayOrObject(v) && isEmptyObject(v))) { + throw new Error( + "Trying to redefine `" + k + "` which is a " + typeof obj[k] + ); + } + return; } - return null; } - if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { - throw new Error(`Cache service responded with ${response.statusCode}`); + this._fill(a, obj[k], v, mod); + } else { + if (!this.override && isArrayOrObject(obj[k]) && !isEmptyObject(obj[k])) { + if (!(isArrayOrObject(v) && isEmptyObject(v))) { + throw new Error("Trying to redefine non-empty obj['" + k + "']"); + } + return; } - const cacheResult = response.result; - const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; - if (!cacheDownloadUrl) { - throw new Error("Cache not found."); + obj[k] = _process(v, mod); + } + }; + DotObject.prototype.object = function(obj, mods) { + var self2 = this; + Object.keys(obj).forEach(function(k) { + var mod = mods === void 0 ? null : mods[k]; + var ok = parsePath(k, self2.separator).join(self2.separator); + if (ok.indexOf(self2.separator) !== -1) { + self2._fill(ok.split(self2.separator), obj, obj[k], mod); + delete obj[k]; + } else { + obj[k] = _process(obj[k], mod); } - core2.setSecret(cacheDownloadUrl); - core2.debug(`Cache Result:`); - core2.debug(JSON.stringify(cacheResult)); - return cacheResult; }); - } - __name(getCacheEntry, "getCacheEntry"); - exports2.getCacheEntry = getCacheEntry; - function printCachesListForDiagnostics(key, httpClient, version4) { - return __awaiter3(this, void 0, void 0, function* () { - const resource = `caches?key=${encodeURIComponent(key)}`; - const response = yield (0, requestUtils_1.retryTypedResponse)("listCache", () => __awaiter3(this, void 0, void 0, function* () { - return httpClient.getJson(getCacheApiUrl(resource)); - })); - if (response.statusCode === 200) { - const cacheListResult = response.result; - const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; - if (totalCount && totalCount > 0) { - core2.debug(`No matching cache found for cache key '${key}', version '${version4} and scope ${process.env["GITHUB_REF"]}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key -Other caches with similar key:`); - for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { - core2.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); + return obj; + }; + DotObject.prototype.str = function(path2, v, obj, mod) { + var ok = parsePath(path2, this.separator).join(this.separator); + if (path2.indexOf(this.separator) !== -1) { + this._fill(ok.split(this.separator), obj, v, mod); + } else { + obj[path2] = _process(v, mod); + } + return obj; + }; + DotObject.prototype.pick = function(path2, obj, remove, reindexArray) { + var i; + var keys; + var val; + var key; + var cp; + keys = parsePath(path2, this.separator); + for (i = 0; i < keys.length; i++) { + key = parseKey(keys[i], obj); + if (obj && typeof obj === "object" && key in obj) { + if (i === keys.length - 1) { + if (remove) { + val = obj[key]; + if (reindexArray && Array.isArray(obj)) { + obj.splice(key, 1); + } else { + delete obj[key]; + } + if (Array.isArray(obj)) { + cp = keys.slice(0, -1).join("."); + if (this.cleanup.indexOf(cp) === -1) { + this.cleanup.push(cp); + } + } + return val; + } else { + return obj[key]; } - } - } - }); - } - __name(printCachesListForDiagnostics, "printCachesListForDiagnostics"); - function downloadCache(archiveLocation, archivePath, options) { - return __awaiter3(this, void 0, void 0, function* () { - const archiveUrl = new url_1.URL(archiveLocation); - const downloadOptions = (0, options_1.getDownloadOptions)(options); - if (archiveUrl.hostname.endsWith(".blob.core.windows.net")) { - if (downloadOptions.useAzureSdk) { - yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); - } else if (downloadOptions.concurrentBlobDownloads) { - yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); } else { - yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + obj = obj[key]; } } else { - yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); + return void 0; } - }); - } - __name(downloadCache, "downloadCache"); - exports2.downloadCache = downloadCache; - function reserveCache(key, paths, options) { - return __awaiter3(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - const version4 = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); - const reserveCacheRequest = { - key, - version: version4, - cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize - }; - const response = yield (0, requestUtils_1.retryTypedResponse)("reserveCache", () => __awaiter3(this, void 0, void 0, function* () { - return httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); - })); - return response; - }); - } - __name(reserveCache, "reserveCache"); - exports2.reserveCache = reserveCache; - function getContentRange(start, end) { - return `bytes ${start}-${end}/*`; - } - __name(getContentRange, "getContentRange"); - function uploadChunk(httpClient, resourceUrl, openStream, start, end) { - return __awaiter3(this, void 0, void 0, function* () { - core2.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); - const additionalHeaders = { - "Content-Type": "application/octet-stream", - "Content-Range": getContentRange(start, end) - }; - const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter3(this, void 0, void 0, function* () { - return httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); - })); - if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { - throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); + } + if (remove && Array.isArray(obj)) { + obj = obj.filter(function(n) { + return n !== void 0; + }); + } + return obj; + }; + DotObject.prototype.delete = function(path2, obj) { + return this.remove(path2, obj, true); + }; + DotObject.prototype.remove = function(path2, obj, reindexArray) { + var i; + this.cleanup = []; + if (Array.isArray(path2)) { + for (i = 0; i < path2.length; i++) { + this.pick(path2[i], obj, true, reindexArray); } - }); - } - __name(uploadChunk, "uploadChunk"); - function uploadFile(httpClient, cacheId, archivePath, options) { - return __awaiter3(this, void 0, void 0, function* () { - const fileSize = utils.getArchiveFileSizeInBytes(archivePath); - const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); - const fd = fs2.openSync(archivePath, "r"); - const uploadOptions = (0, options_1.getUploadOptions)(options); - const concurrency = utils.assertDefined("uploadConcurrency", uploadOptions.uploadConcurrency); - const maxChunkSize = utils.assertDefined("uploadChunkSize", uploadOptions.uploadChunkSize); - const parallelUploads = [...new Array(concurrency).keys()]; - core2.debug("Awaiting all uploads"); - let offset = 0; - try { - yield Promise.all(parallelUploads.map(() => __awaiter3(this, void 0, void 0, function* () { - while (offset < fileSize) { - const chunkSize = Math.min(fileSize - offset, maxChunkSize); - const start = offset; - const end = offset + chunkSize - 1; - offset += maxChunkSize; - yield uploadChunk(httpClient, resourceUrl, () => fs2.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }).on("error", (error) => { - throw new Error(`Cache upload failed because file read failed with ${error.message}`); - }), start, end); + if (!reindexArray) { + this._cleanup(obj); + } + return obj; + } else { + return this.pick(path2, obj, true, reindexArray); + } + }; + DotObject.prototype._cleanup = function(obj) { + var ret; + var i; + var keys; + var root; + if (this.cleanup.length) { + for (i = 0; i < this.cleanup.length; i++) { + keys = this.cleanup[i].split("."); + root = keys.splice(0, -1).join("."); + ret = root ? this.pick(root, obj) : obj; + ret = ret[keys[0]].filter(function(v) { + return v !== void 0; + }); + this.set(this.cleanup[i], ret, obj); + } + this.cleanup = []; + } + }; + DotObject.prototype.del = DotObject.prototype.remove; + DotObject.prototype.move = function(source, target, obj, mods, merge) { + if (typeof mods === "function" || Array.isArray(mods)) { + this.set(target, _process(this.pick(source, obj, true), mods), obj, merge); + } else { + merge = mods; + this.set(target, this.pick(source, obj, true), obj, merge); + } + return obj; + }; + DotObject.prototype.transfer = function(source, target, obj1, obj2, mods, merge) { + if (typeof mods === "function" || Array.isArray(mods)) { + this.set( + target, + _process(this.pick(source, obj1, true), mods), + obj2, + merge + ); + } else { + merge = mods; + this.set(target, this.pick(source, obj1, true), obj2, merge); + } + return obj2; + }; + DotObject.prototype.copy = function(source, target, obj1, obj2, mods, merge) { + if (typeof mods === "function" || Array.isArray(mods)) { + this.set( + target, + _process( + // clone what is picked + JSON.parse(JSON.stringify(this.pick(source, obj1, false))), + mods + ), + obj2, + merge + ); + } else { + merge = mods; + this.set(target, this.pick(source, obj1, false), obj2, merge); + } + return obj2; + }; + DotObject.prototype.set = function(path2, val, obj, merge) { + var i; + var k; + var keys; + var key; + if (typeof val === "undefined") { + return obj; + } + keys = parsePath(path2, this.separator); + for (i = 0; i < keys.length; i++) { + key = keys[i]; + if (i === keys.length - 1) { + if (merge && isObject(val) && isObject(obj[key])) { + for (k in val) { + if (hasOwnProperty.call(val, k)) { + obj[key][k] = val[k]; + } } - }))); - } finally { - fs2.closeSync(fd); + } else if (merge && Array.isArray(obj[key]) && Array.isArray(val)) { + for (var j = 0; j < val.length; j++) { + obj[keys[i]].push(val[j]); + } + } else { + obj[key] = val; + } + } else if ( + // force the value to be an object + !hasOwnProperty.call(obj, key) || !isObject(obj[key]) && !Array.isArray(obj[key]) + ) { + if (/^\d+$/.test(keys[i + 1])) { + obj[key] = []; + } else { + obj[key] = {}; + } } - return; - }); - } - __name(uploadFile, "uploadFile"); - function commitCache(httpClient, cacheId, filesize) { - return __awaiter3(this, void 0, void 0, function* () { - const commitCacheRequest = { size: filesize }; - return yield (0, requestUtils_1.retryTypedResponse)("commitCache", () => __awaiter3(this, void 0, void 0, function* () { - return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); - })); + obj = obj[key]; + } + return obj; + }; + DotObject.prototype.transform = function(recipe, obj, tgt) { + obj = obj || {}; + tgt = tgt || {}; + Object.keys(recipe).forEach( + function(key) { + this.set(recipe[key], this.pick(key, obj), tgt); + }.bind(this) + ); + return tgt; + }; + DotObject.prototype.dot = function(obj, tgt, path2) { + tgt = tgt || {}; + path2 = path2 || []; + var isArray = Array.isArray(obj); + Object.keys(obj).forEach( + function(key) { + var index = isArray && this.useBrackets ? "[" + key + "]" : key; + if (isArrayOrObject(obj[key]) && (isObject(obj[key]) && !isEmptyObject(obj[key]) || Array.isArray(obj[key]) && !this.keepArray && obj[key].length !== 0)) { + if (isArray && this.useBrackets) { + var previousKey = path2[path2.length - 1] || ""; + return this.dot( + obj[key], + tgt, + path2.slice(0, -1).concat(previousKey + index) + ); + } else { + return this.dot(obj[key], tgt, path2.concat(index)); + } + } else { + if (isArray && this.useBrackets) { + tgt[path2.join(this.separator).concat("[" + key + "]")] = obj[key]; + } else { + tgt[path2.concat(index).join(this.separator)] = obj[key]; + } + } + }.bind(this) + ); + return tgt; + }; + DotObject.pick = wrap("pick"); + DotObject.move = wrap("move"); + DotObject.transfer = wrap("transfer"); + DotObject.transform = wrap("transform"); + DotObject.copy = wrap("copy"); + DotObject.object = wrap("object"); + DotObject.str = wrap("str"); + DotObject.set = wrap("set"); + DotObject.delete = wrap("delete"); + DotObject.del = DotObject.remove = wrap("remove"); + DotObject.dot = wrap("dot"); + ["override", "overwrite"].forEach(function(prop) { + Object.defineProperty(DotObject, prop, { + get: /* @__PURE__ */ __name(function() { + return dotDefault.override; + }, "get"), + set: /* @__PURE__ */ __name(function(val) { + dotDefault.override = !!val; + }, "set") }); - } - __name(commitCache, "commitCache"); - function saveCache(cacheId, archivePath, options) { - return __awaiter3(this, void 0, void 0, function* () { - const httpClient = createHttpClient(); - core2.debug("Upload cache"); - yield uploadFile(httpClient, cacheId, archivePath, options); - core2.debug("Commiting cache"); - const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); - core2.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); - const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); - if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { - throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); - } - core2.info("Cache saved successfully"); + }); + ["useArray", "keepArray", "useBrackets"].forEach(function(prop) { + Object.defineProperty(DotObject, prop, { + get: /* @__PURE__ */ __name(function() { + return dotDefault[prop]; + }, "get"), + set: /* @__PURE__ */ __name(function(val) { + dotDefault[prop] = val; + }, "set") }); - } - __name(saveCache, "saveCache"); - exports2.saveCache = saveCache; + }); + DotObject._process = _process; + module2.exports = DotObject; } }); -// ../node_modules/@actions/cache/lib/internal/tar.js -var require_tar = __commonJS({ - "../node_modules/@actions/cache/lib/internal/tar.js"(exports2) { +// ../node_modules/twirp-ts/build/twirp/http.client.js +var require_http_client = __commonJS({ + "../node_modules/twirp-ts/build/twirp/http.client.js"(exports2) { "use strict"; var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; - } - Object.defineProperty(o, k2, desc); + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); } : function(o, m, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m[k]; @@ -90902,213 +92087,84 @@ var require_tar = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.createTar = exports2.extractTar = exports2.listTar = void 0; - var exec_1 = require_exec(); - var io2 = __importStar3(require_io()); - var fs_1 = require("fs"); - var path2 = __importStar3(require("path")); - var utils = __importStar3(require_cacheUtils()); - var constants_1 = require_constants7(); - var IS_WINDOWS = process.platform === "win32"; - function getTarPath() { - return __awaiter3(this, void 0, void 0, function* () { - switch (process.platform) { - case "win32": { - const gnuTar = yield utils.getGnuTarPathOnWindows(); - const systemTar = constants_1.SystemTarPathOnWindows; - if (gnuTar) { - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; - } else if ((0, fs_1.existsSync)(systemTar)) { - return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; - } - break; - } - case "darwin": { - const gnuTar = yield io2.which("gtar", false); - if (gnuTar) { - return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; - } else { - return { - path: yield io2.which("tar", true), - type: constants_1.ArchiveToolType.BSD - }; - } - } - default: - break; - } - return { - path: yield io2.which("tar", true), - type: constants_1.ArchiveToolType.GNU - }; - }); - } - __name(getTarPath, "getTarPath"); - function getTarArgs(tarPath, compressionMethod, type, archivePath = "") { - return __awaiter3(this, void 0, void 0, function* () { - const args = [`"${tarPath.path}"`]; - const cacheFileName = utils.getCacheFileName(compressionMethod); - const tarFile = "cache.tar"; - const workingDirectory = getWorkingDirectory(); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - switch (type) { - case "create": - args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); - break; - case "extract": - args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path2.sep}`, "g"), "/")); - break; - case "list": - args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "-P"); - break; - } - if (tarPath.type === constants_1.ArchiveToolType.GNU) { - switch (process.platform) { - case "win32": - args.push("--force-local"); - break; - case "darwin": - args.push("--delay-directory-restore"); - break; + exports2.FetchRPC = exports2.wrapErrorResponseToTwirpError = exports2.NodeHttpRPC = void 0; + var http = __importStar3(require("http")); + var https = __importStar3(require("https")); + var url_1 = require("url"); + var errors_1 = require_errors3(); + var NodeHttpRPC = /* @__PURE__ */ __name((options) => ({ + request(service, method, contentType, data) { + let client; + return new Promise((resolve, rejected) => { + const responseChunks = []; + const requestData = contentType === "application/protobuf" ? Buffer.from(data) : JSON.stringify(data); + const url = new url_1.URL(options.baseUrl); + const isHttps = url.protocol === "https:"; + if (isHttps) { + client = https; + } else { + client = http; } - } - return args; - }); - } - __name(getTarArgs, "getTarArgs"); - function getCommands(compressionMethod, type, archivePath = "") { - return __awaiter3(this, void 0, void 0, function* () { - let args; - const tarPath = yield getTarPath(); - const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); - const compressionArgs = type !== "create" ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) : yield getCompressionProgram(tarPath, compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - if (BSD_TAR_ZSTD && type !== "create") { - args = [[...compressionArgs].join(" "), [...tarArgs].join(" ")]; - } else { - args = [[...tarArgs].join(" "), [...compressionArgs].join(" ")]; - } - if (BSD_TAR_ZSTD) { - return args; - } - return [args.join(" ")]; - }); - } - __name(getCommands, "getCommands"); - function getWorkingDirectory() { - var _a; - return (_a = process.env["GITHUB_WORKSPACE"]) !== null && _a !== void 0 ? _a : process.cwd(); - } - __name(getWorkingDirectory, "getWorkingDirectory"); - function getDecompressionProgram(tarPath, compressionMethod, archivePath) { - return __awaiter3(this, void 0, void 0, function* () { - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD ? [ - "zstd -d --long=30 --force -o", - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/") - ] : [ - "--use-compress-program", - IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD ? [ - "zstd -d --force -o", - constants_1.TarFilename, - archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/") - ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; - default: - return ["-z"]; - } - }); - } - __name(getDecompressionProgram, "getDecompressionProgram"); - function getCompressionProgram(tarPath, compressionMethod) { - return __awaiter3(this, void 0, void 0, function* () { - const cacheFileName = utils.getCacheFileName(compressionMethod); - const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; - switch (compressionMethod) { - case constants_1.CompressionMethod.Zstd: - return BSD_TAR_ZSTD ? [ - "zstd -T0 --long=30 --force -o", - cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), - constants_1.TarFilename - ] : [ - "--use-compress-program", - IS_WINDOWS ? '"zstd -T0 --long=30"' : "zstdmt --long=30" - ]; - case constants_1.CompressionMethod.ZstdWithoutLong: - return BSD_TAR_ZSTD ? [ - "zstd -T0 --force -o", - cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), - constants_1.TarFilename - ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; - default: - return ["-z"]; - } - }); - } - __name(getCompressionProgram, "getCompressionProgram"); - function execCommands(commands, cwd) { - return __awaiter3(this, void 0, void 0, function* () { - for (const command of commands) { - try { - yield (0, exec_1.exec)(command, void 0, { - cwd, - env: Object.assign(Object.assign({}, process.env), { MSYS: "winsymlinks:nativestrict" }) + const prefix = url.pathname !== "/" ? url.pathname : ""; + const req = client.request(Object.assign(Object.assign({}, options ? options : {}), { method: "POST", protocol: url.protocol, host: url.hostname, port: url.port ? url.port : isHttps ? 443 : 80, path: `${prefix}/${service}/${method}`, headers: Object.assign(Object.assign({}, options.headers ? options.headers : {}), { "Content-Type": contentType, "Content-Length": contentType === "application/protobuf" ? Buffer.byteLength(requestData) : Buffer.from(requestData).byteLength }) }), (res) => { + res.on("data", (chunk) => responseChunks.push(chunk)); + res.on("end", () => { + const data2 = Buffer.concat(responseChunks); + if (res.statusCode != 200) { + rejected(wrapErrorResponseToTwirpError(data2.toString())); + } else { + if (contentType === "application/json") { + resolve(JSON.parse(data2.toString())); + } else { + resolve(data2); + } + } }); - } catch (error) { - throw new Error(`${command.split(" ")[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); - } - } - }); - } - __name(execCommands, "execCommands"); - function listTar(archivePath, compressionMethod) { - return __awaiter3(this, void 0, void 0, function* () { - const commands = yield getCommands(compressionMethod, "list", archivePath); - yield execCommands(commands); - }); - } - __name(listTar, "listTar"); - exports2.listTar = listTar; - function extractTar(archivePath, compressionMethod) { - return __awaiter3(this, void 0, void 0, function* () { - const workingDirectory = getWorkingDirectory(); - yield io2.mkdirP(workingDirectory); - const commands = yield getCommands(compressionMethod, "extract", archivePath); - yield execCommands(commands); - }); - } - __name(extractTar, "extractTar"); - exports2.extractTar = extractTar; - function createTar(archiveFolder, sourceDirectories, compressionMethod) { - return __awaiter3(this, void 0, void 0, function* () { - (0, fs_1.writeFileSync)(path2.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); - const commands = yield getCommands(compressionMethod, "create"); - yield execCommands(commands, archiveFolder); - }); + res.on("error", (err) => { + rejected(err); + }); + }).on("error", (err) => { + rejected(err); + }); + req.end(requestData); + }); + } + }), "NodeHttpRPC"); + exports2.NodeHttpRPC = NodeHttpRPC; + function wrapErrorResponseToTwirpError(errorResponse) { + return errors_1.TwirpError.fromObject(JSON.parse(errorResponse)); } - __name(createTar, "createTar"); - exports2.createTar = createTar; + __name(wrapErrorResponseToTwirpError, "wrapErrorResponseToTwirpError"); + exports2.wrapErrorResponseToTwirpError = wrapErrorResponseToTwirpError; + var FetchRPC = /* @__PURE__ */ __name((options) => ({ + request(service, method, contentType, data) { + return __awaiter3(this, void 0, void 0, function* () { + const headers = new Headers(options.headers); + headers.set("content-type", contentType); + const response = yield fetch(`${options.baseUrl}/${service}/${method}`, Object.assign(Object.assign({}, options), { method: "POST", headers, body: data instanceof Uint8Array ? data : JSON.stringify(data) })); + if (response.status === 200) { + if (contentType === "application/json") { + return yield response.json(); + } + return new Uint8Array(yield response.arrayBuffer()); + } + throw errors_1.TwirpError.fromObject(yield response.json()); + }); + } + }), "FetchRPC"); + exports2.FetchRPC = FetchRPC; } }); -// ../node_modules/@actions/cache/lib/cache.js -var require_cache2 = __commonJS({ - "../node_modules/@actions/cache/lib/cache.js"(exports2) { +// ../node_modules/twirp-ts/build/twirp/gateway.js +var require_gateway = __commonJS({ + "../node_modules/twirp-ts/build/twirp/gateway.js"(exports2) { "use strict"; var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { if (k2 === void 0) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }; - } - Object.defineProperty(o, k2, desc); + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); } : function(o, m, k, k2) { if (k2 === void 0) k2 = k; o[k2] = m[k]; @@ -91158,6623 +92214,8596 @@ var require_cache2 = __commonJS({ step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; - var core2 = __importStar3(require_core()); - var path2 = __importStar3(require("path")); - var utils = __importStar3(require_cacheUtils()); - var cacheHttpClient = __importStar3(require_cacheHttpClient()); - var tar_1 = require_tar(); - var ValidationError = class _ValidationError extends Error { - static { - __name(this, "ValidationError"); - } - constructor(message) { - super(message); - this.name = "ValidationError"; - Object.setPrototypeOf(this, _ValidationError.prototype); - } + var __rest2 = exports2 && exports2.__rest || function(s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; }; - exports2.ValidationError = ValidationError; - var ReserveCacheError = class _ReserveCacheError extends Error { + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Gateway = exports2.Pattern = void 0; + var querystring_1 = require("querystring"); + var dotObject = __importStar3(require_dot_object()); + var request_1 = require_request3(); + var errors_1 = require_errors3(); + var http_client_1 = require_http_client(); + var server_1 = require_server(); + var Pattern; + (function(Pattern2) { + Pattern2["POST"] = "post"; + Pattern2["GET"] = "get"; + Pattern2["PATCH"] = "patch"; + Pattern2["PUT"] = "put"; + Pattern2["DELETE"] = "delete"; + })(Pattern = exports2.Pattern || (exports2.Pattern = {})); + var Gateway = class { static { - __name(this, "ReserveCacheError"); - } - constructor(message) { - super(message); - this.name = "ReserveCacheError"; - Object.setPrototypeOf(this, _ReserveCacheError.prototype); - } - }; - exports2.ReserveCacheError = ReserveCacheError; - function checkPaths(paths) { - if (!paths || paths.length === 0) { - throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); - } - } - __name(checkPaths, "checkPaths"); - function checkKey(key) { - if (key.length > 512) { - throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); + __name(this, "Gateway"); } - const regex = /^[^,]*$/; - if (!regex.test(key)) { - throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); + constructor(routes) { + this.routes = routes; } - } - __name(checkKey, "checkKey"); - function isFeatureAvailable() { - return !!process.env["ACTIONS_CACHE_URL"]; - } - __name(isFeatureAvailable, "isFeatureAvailable"); - exports2.isFeatureAvailable = isFeatureAvailable; - function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { - return __awaiter3(this, void 0, void 0, function* () { - checkPaths(paths); - restoreKeys = restoreKeys || []; - const keys = [primaryKey, ...restoreKeys]; - core2.debug("Resolved Keys:"); - core2.debug(JSON.stringify(keys)); - if (keys.length > 10) { - throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); - } - for (const key of keys) { - checkKey(key); - } - const compressionMethod = yield utils.getCompressionMethod(); - let archivePath = ""; - try { - const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { - compressionMethod, - enableCrossOsArchive + /** + * Middleware that rewrite the current request + * to a Twirp compliant request + */ + twirpRewrite(prefix = "/twirp") { + return (req, resp, next) => { + this.rewrite(req, resp, prefix).then(() => next()).catch((e) => { + if (e instanceof errors_1.TwirpError) { + if (e.code !== errors_1.TwirpErrorCode.NotFound) { + server_1.writeError(resp, e); + } else { + next(); + } + } }); - if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { - return void 0; - } - if (options === null || options === void 0 ? void 0 : options.lookupOnly) { - core2.info("Lookup only - skipping download"); - return cacheEntry.cacheKey; - } - archivePath = path2.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); - core2.debug(`Archive Path: ${archivePath}`); - yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); - if (core2.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core2.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - yield (0, tar_1.extractTar)(archivePath, compressionMethod); - core2.info("Cache restored successfully"); - return cacheEntry.cacheKey; - } catch (error) { - const typedError = error; - if (typedError.name === ValidationError.name) { - throw error; - } else { - core2.warning(`Failed to restore: ${error.message}`); + }; + } + /** + * Rewrite an incoming request to a Twirp compliant request + * @param req + * @param resp + * @param prefix + */ + rewrite(req, resp, prefix = "/twirp") { + return __awaiter3(this, void 0, void 0, function* () { + const [match, route] = this.matchRoute(req); + const body = yield this.prepareTwirpBody(req, match, route); + const twirpUrl = `${prefix}/${route.packageName}.${route.serviceName}/${route.methodName}`; + req.url = twirpUrl; + req.originalUrl = twirpUrl; + req.method = "POST"; + req.headers["content-type"] = "application/json"; + req.rawBody = Buffer.from(JSON.stringify(body)); + if (route.responseBodyKey) { + const endFn = resp.end.bind(resp); + resp.end = function(chunk) { + if (resp.statusCode === 200) { + endFn(`{ "${route.responseBodyKey}": ${chunk} }`); + } else { + endFn(chunk); + } + }; } - } finally { + }); + } + /** + * Create a reverse proxy handler to + * proxy http requests to Twirp Compliant handlers + * @param httpClientOption + */ + reverseProxy(httpClientOption) { + const client = http_client_1.NodeHttpRPC(httpClientOption); + return (req, res) => __awaiter3(this, void 0, void 0, function* () { try { - yield utils.unlinkFile(archivePath); - } catch (error) { - core2.debug(`Failed to delete archive: ${error}`); - } - } - return void 0; - }); - } - __name(restoreCache, "restoreCache"); - exports2.restoreCache = restoreCache; - function saveCache(paths, key, options, enableCrossOsArchive = false) { - var _a, _b, _c, _d, _e; - return __awaiter3(this, void 0, void 0, function* () { - checkPaths(paths); - checkKey(key); - const compressionMethod = yield utils.getCompressionMethod(); - let cacheId = -1; - const cachePaths = yield utils.resolvePaths(paths); - core2.debug("Cache Paths:"); - core2.debug(`${JSON.stringify(cachePaths)}`); - if (cachePaths.length === 0) { - throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); - } - const archiveFolder = yield utils.createTempDirectory(); - const archivePath = path2.join(archiveFolder, utils.getCacheFileName(compressionMethod)); - core2.debug(`Archive Path: ${archivePath}`); - try { - yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); - if (core2.isDebug()) { - yield (0, tar_1.listTar)(archivePath, compressionMethod); - } - const fileSizeLimit = 10 * 1024 * 1024 * 1024; - const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); - core2.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { - throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); + const [match, route] = this.matchRoute(req); + const body = yield this.prepareTwirpBody(req, match, route); + const response = yield client.request(`${route.packageName}.${route.serviceName}`, route.methodName, "application/json", body); + res.statusCode = 200; + res.setHeader("content-type", "application/json"); + let jsonResponse; + if (route.responseBodyKey) { + jsonResponse = JSON.stringify({ [route.responseBodyKey]: response }); + } else { + jsonResponse = JSON.stringify(response); + } + res.end(jsonResponse); + } catch (e) { + server_1.writeError(res, e); } - core2.debug("Reserving Cache"); - const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { - compressionMethod, - enableCrossOsArchive, - cacheSize: archiveFileSize - }); - if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { - cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; - } else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { - throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); - } else { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + }); + } + /** + * Prepares twirp body requests using http.google.annotions + * compliant spec + * + * @param req + * @param match + * @param route + * @protected + */ + prepareTwirpBody(req, match, route) { + return __awaiter3(this, void 0, void 0, function* () { + const _a = match.params, { query_string } = _a, params = __rest2(_a, ["query_string"]); + let requestBody = Object.assign({}, params); + if (query_string && route.bodyKey !== "*") { + const queryParams = this.parseQueryString(query_string); + requestBody = Object.assign(Object.assign({}, queryParams), requestBody); } - core2.debug(`Saving Cache (ID: ${cacheId})`); - yield cacheHttpClient.saveCache(cacheId, archivePath, options); - } catch (error) { - const typedError = error; - if (typedError.name === ValidationError.name) { - throw error; - } else if (typedError.name === ReserveCacheError.name) { - core2.info(`Failed to save: ${typedError.message}`); - } else { - core2.warning(`Failed to save: ${typedError.message}`); + let body = {}; + if (route.bodyKey) { + const data = yield request_1.getRequestData(req); + try { + const jsonBody = JSON.parse(data.toString() || "{}"); + if (route.bodyKey === "*") { + body = jsonBody; + } else { + body[route.bodyKey] = jsonBody; + } + } catch (e) { + const msg = "the json request could not be decoded"; + throw new errors_1.TwirpError(errors_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } } - } finally { - try { - yield utils.unlinkFile(archivePath); - } catch (error) { - core2.debug(`Failed to delete archive: ${error}`); + return Object.assign(Object.assign({}, body), requestBody); + }); + } + /** + * Matches a route + * @param req + */ + matchRoute(req) { + var _a; + const httpMethod = (_a = req.method) === null || _a === void 0 ? void 0 : _a.toLowerCase(); + if (!httpMethod) { + throw new errors_1.BadRouteError(`method not allowed`, req.method || "", req.url || ""); + } + const routes = this.routes[httpMethod]; + for (const route of routes) { + const match = route.matcher(req.url || "/"); + if (match) { + return [match, route]; } } - return cacheId; - }); - } - __name(saveCache, "saveCache"); - exports2.saveCache = saveCache; + throw new errors_1.NotFoundError(`url ${req.url} not found`); + } + /** + * Parse query string + * @param queryString + */ + parseQueryString(queryString) { + const queryParams = querystring_1.parse(queryString.replace("?", "")); + return dotObject.object(queryParams); + } + }; + exports2.Gateway = Gateway; } }); -// ../node_modules/@actions/tool-cache/node_modules/semver/semver.js -var require_semver2 = __commonJS({ - "../node_modules/@actions/tool-cache/node_modules/semver/semver.js"(exports2, module2) { - exports2 = module2.exports = SemVer; - var debug; - if (typeof process === "object" && process.env && process.env.NODE_DEBUG && /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = /* @__PURE__ */ __name(function() { - var args = Array.prototype.slice.call(arguments, 0); - args.unshift("SEMVER"); - console.log.apply(console, args); - }, "debug"); - } else { - debug = /* @__PURE__ */ __name(function() { - }, "debug"); - } - exports2.SEMVER_SPEC_VERSION = "2.0.0"; - var MAX_LENGTH = 256; - var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || /* istanbul ignore next */ - 9007199254740991; - var MAX_SAFE_COMPONENT_LENGTH = 16; - var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6; - var re2 = exports2.re = []; - var safeRe = exports2.safeRe = []; - var src = exports2.src = []; - var t = exports2.tokens = {}; - var R = 0; - function tok(n) { - t[n] = R++; - } - __name(tok, "tok"); - var LETTERDASHNUMBER = "[a-zA-Z0-9-]"; - var safeRegexReplacements = [ - ["\\s", 1], - ["\\d", MAX_LENGTH], - [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH] - ]; - function makeSafeRe(value) { - for (var i2 = 0; i2 < safeRegexReplacements.length; i2++) { - var token = safeRegexReplacements[i2][0]; - var max = safeRegexReplacements[i2][1]; - value = value.split(token + "*").join(token + "{0," + max + "}").split(token + "+").join(token + "{1," + max + "}"); +// ../node_modules/twirp-ts/build/twirp/index.js +var require_twirp = __commonJS({ + "../node_modules/twirp-ts/build/twirp/index.js"(exports2) { + "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __exportStar2 = exports2 && exports2.__exportStar || function(m, exports3) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports3, p)) __createBinding3(exports3, m, p); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.TwirpContentType = void 0; + __exportStar2(require_context2(), exports2); + __exportStar2(require_server(), exports2); + __exportStar2(require_interceptors(), exports2); + __exportStar2(require_hooks(), exports2); + __exportStar2(require_errors3(), exports2); + __exportStar2(require_gateway(), exports2); + __exportStar2(require_http_client(), exports2); + var request_1 = require_request3(); + Object.defineProperty(exports2, "TwirpContentType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return request_1.TwirpContentType; + }, "get") }); + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js +var require_json_typings = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.isJsonObject = exports2.typeofJsonValue = void 0; + function typeofJsonValue(value) { + let t = typeof value; + if (t == "object") { + if (Array.isArray(value)) + return "array"; + if (value === null) + return "null"; } - return value; + return t; } - __name(makeSafeRe, "makeSafeRe"); - tok("NUMERICIDENTIFIER"); - src[t.NUMERICIDENTIFIER] = "0|[1-9]\\d*"; - tok("NUMERICIDENTIFIERLOOSE"); - src[t.NUMERICIDENTIFIERLOOSE] = "\\d+"; - tok("NONNUMERICIDENTIFIER"); - src[t.NONNUMERICIDENTIFIER] = "\\d*[a-zA-Z-]" + LETTERDASHNUMBER + "*"; - tok("MAINVERSION"); - src[t.MAINVERSION] = "(" + src[t.NUMERICIDENTIFIER] + ")\\.(" + src[t.NUMERICIDENTIFIER] + ")\\.(" + src[t.NUMERICIDENTIFIER] + ")"; - tok("MAINVERSIONLOOSE"); - src[t.MAINVERSIONLOOSE] = "(" + src[t.NUMERICIDENTIFIERLOOSE] + ")\\.(" + src[t.NUMERICIDENTIFIERLOOSE] + ")\\.(" + src[t.NUMERICIDENTIFIERLOOSE] + ")"; - tok("PRERELEASEIDENTIFIER"); - src[t.PRERELEASEIDENTIFIER] = "(?:" + src[t.NUMERICIDENTIFIER] + "|" + src[t.NONNUMERICIDENTIFIER] + ")"; - tok("PRERELEASEIDENTIFIERLOOSE"); - src[t.PRERELEASEIDENTIFIERLOOSE] = "(?:" + src[t.NUMERICIDENTIFIERLOOSE] + "|" + src[t.NONNUMERICIDENTIFIER] + ")"; - tok("PRERELEASE"); - src[t.PRERELEASE] = "(?:-(" + src[t.PRERELEASEIDENTIFIER] + "(?:\\." + src[t.PRERELEASEIDENTIFIER] + ")*))"; - tok("PRERELEASELOOSE"); - src[t.PRERELEASELOOSE] = "(?:-?(" + src[t.PRERELEASEIDENTIFIERLOOSE] + "(?:\\." + src[t.PRERELEASEIDENTIFIERLOOSE] + ")*))"; - tok("BUILDIDENTIFIER"); - src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + "+"; - tok("BUILD"); - src[t.BUILD] = "(?:\\+(" + src[t.BUILDIDENTIFIER] + "(?:\\." + src[t.BUILDIDENTIFIER] + ")*))"; - tok("FULL"); - tok("FULLPLAIN"); - src[t.FULLPLAIN] = "v?" + src[t.MAINVERSION] + src[t.PRERELEASE] + "?" + src[t.BUILD] + "?"; - src[t.FULL] = "^" + src[t.FULLPLAIN] + "$"; - tok("LOOSEPLAIN"); - src[t.LOOSEPLAIN] = "[v=\\s]*" + src[t.MAINVERSIONLOOSE] + src[t.PRERELEASELOOSE] + "?" + src[t.BUILD] + "?"; - tok("LOOSE"); - src[t.LOOSE] = "^" + src[t.LOOSEPLAIN] + "$"; - tok("GTLT"); - src[t.GTLT] = "((?:<|>)?=?)"; - tok("XRANGEIDENTIFIERLOOSE"); - src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + "|x|X|\\*"; - tok("XRANGEIDENTIFIER"); - src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + "|x|X|\\*"; - tok("XRANGEPLAIN"); - src[t.XRANGEPLAIN] = "[v=\\s]*(" + src[t.XRANGEIDENTIFIER] + ")(?:\\.(" + src[t.XRANGEIDENTIFIER] + ")(?:\\.(" + src[t.XRANGEIDENTIFIER] + ")(?:" + src[t.PRERELEASE] + ")?" + src[t.BUILD] + "?)?)?"; - tok("XRANGEPLAINLOOSE"); - src[t.XRANGEPLAINLOOSE] = "[v=\\s]*(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:\\.(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:\\.(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:" + src[t.PRERELEASELOOSE] + ")?" + src[t.BUILD] + "?)?)?"; - tok("XRANGE"); - src[t.XRANGE] = "^" + src[t.GTLT] + "\\s*" + src[t.XRANGEPLAIN] + "$"; - tok("XRANGELOOSE"); - src[t.XRANGELOOSE] = "^" + src[t.GTLT] + "\\s*" + src[t.XRANGEPLAINLOOSE] + "$"; - tok("COERCE"); - src[t.COERCE] = "(^|[^\\d])(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "})(?:\\.(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "}))?(?:\\.(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "}))?(?:$|[^\\d])"; - tok("COERCERTL"); - re2[t.COERCERTL] = new RegExp(src[t.COERCE], "g"); - safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), "g"); - tok("LONETILDE"); - src[t.LONETILDE] = "(?:~>?)"; - tok("TILDETRIM"); - src[t.TILDETRIM] = "(\\s*)" + src[t.LONETILDE] + "\\s+"; - re2[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], "g"); - safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), "g"); - var tildeTrimReplace = "$1~"; - tok("TILDE"); - src[t.TILDE] = "^" + src[t.LONETILDE] + src[t.XRANGEPLAIN] + "$"; - tok("TILDELOOSE"); - src[t.TILDELOOSE] = "^" + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + "$"; - tok("LONECARET"); - src[t.LONECARET] = "(?:\\^)"; - tok("CARETTRIM"); - src[t.CARETTRIM] = "(\\s*)" + src[t.LONECARET] + "\\s+"; - re2[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], "g"); - safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), "g"); - var caretTrimReplace = "$1^"; - tok("CARET"); - src[t.CARET] = "^" + src[t.LONECARET] + src[t.XRANGEPLAIN] + "$"; - tok("CARETLOOSE"); - src[t.CARETLOOSE] = "^" + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + "$"; - tok("COMPARATORLOOSE"); - src[t.COMPARATORLOOSE] = "^" + src[t.GTLT] + "\\s*(" + src[t.LOOSEPLAIN] + ")$|^$"; - tok("COMPARATOR"); - src[t.COMPARATOR] = "^" + src[t.GTLT] + "\\s*(" + src[t.FULLPLAIN] + ")$|^$"; - tok("COMPARATORTRIM"); - src[t.COMPARATORTRIM] = "(\\s*)" + src[t.GTLT] + "\\s*(" + src[t.LOOSEPLAIN] + "|" + src[t.XRANGEPLAIN] + ")"; - re2[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], "g"); - safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), "g"); - var comparatorTrimReplace = "$1$2$3"; - tok("HYPHENRANGE"); - src[t.HYPHENRANGE] = "^\\s*(" + src[t.XRANGEPLAIN] + ")\\s+-\\s+(" + src[t.XRANGEPLAIN] + ")\\s*$"; - tok("HYPHENRANGELOOSE"); - src[t.HYPHENRANGELOOSE] = "^\\s*(" + src[t.XRANGEPLAINLOOSE] + ")\\s+-\\s+(" + src[t.XRANGEPLAINLOOSE] + ")\\s*$"; - tok("STAR"); - src[t.STAR] = "(<|>)?=?\\s*\\*"; - for (i = 0; i < R; i++) { - debug(i, src[i]); - if (!re2[i]) { - re2[i] = new RegExp(src[i]); - safeRe[i] = new RegExp(makeSafeRe(src[i])); - } + __name(typeofJsonValue, "typeofJsonValue"); + exports2.typeofJsonValue = typeofJsonValue; + function isJsonObject(value) { + return value !== null && typeof value == "object" && !Array.isArray(value); } - var i; - exports2.parse = parse3; - function parse3(version4, options) { - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; - } - if (version4 instanceof SemVer) { - return version4; + __name(isJsonObject, "isJsonObject"); + exports2.isJsonObject = isJsonObject; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/base64.js +var require_base642 = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/base64.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.base64encode = exports2.base64decode = void 0; + var encTable = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split(""); + var decTable = []; + for (let i = 0; i < encTable.length; i++) + decTable[encTable[i].charCodeAt(0)] = i; + decTable["-".charCodeAt(0)] = encTable.indexOf("+"); + decTable["_".charCodeAt(0)] = encTable.indexOf("/"); + function base64decode(base64Str) { + let es = base64Str.length * 3 / 4; + if (base64Str[base64Str.length - 2] == "=") + es -= 2; + else if (base64Str[base64Str.length - 1] == "=") + es -= 1; + let bytes = new Uint8Array(es), bytePos = 0, groupPos = 0, b, p = 0; + for (let i = 0; i < base64Str.length; i++) { + b = decTable[base64Str.charCodeAt(i)]; + if (b === void 0) { + switch (base64Str[i]) { + case "=": + groupPos = 0; + // reset state when padding found + case "\n": + case "\r": + case " ": + case " ": + continue; + // skip white-space, and padding + default: + throw Error(`invalid base64 string.`); + } + } + switch (groupPos) { + case 0: + p = b; + groupPos = 1; + break; + case 1: + bytes[bytePos++] = p << 2 | (b & 48) >> 4; + p = b; + groupPos = 2; + break; + case 2: + bytes[bytePos++] = (p & 15) << 4 | (b & 60) >> 2; + p = b; + groupPos = 3; + break; + case 3: + bytes[bytePos++] = (p & 3) << 6 | b; + groupPos = 0; + break; + } } - if (typeof version4 !== "string") { - return null; + if (groupPos == 1) + throw Error(`invalid base64 string.`); + return bytes.subarray(0, bytePos); + } + __name(base64decode, "base64decode"); + exports2.base64decode = base64decode; + function base64encode(bytes) { + let base64 = "", groupPos = 0, b, p = 0; + for (let i = 0; i < bytes.length; i++) { + b = bytes[i]; + switch (groupPos) { + case 0: + base64 += encTable[b >> 2]; + p = (b & 3) << 4; + groupPos = 1; + break; + case 1: + base64 += encTable[p | b >> 4]; + p = (b & 15) << 2; + groupPos = 2; + break; + case 2: + base64 += encTable[p | b >> 6]; + base64 += encTable[b & 63]; + groupPos = 0; + break; + } } - if (version4.length > MAX_LENGTH) { - return null; + if (groupPos) { + base64 += encTable[p]; + base64 += "="; + if (groupPos == 1) + base64 += "="; } - var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]; - if (!r.test(version4)) { - return null; + return base64; + } + __name(base64encode, "base64encode"); + exports2.base64encode = base64encode; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js +var require_protobufjs_utf8 = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.utf8read = void 0; + var fromCharCodes = /* @__PURE__ */ __name((chunk) => String.fromCharCode.apply(String, chunk), "fromCharCodes"); + function utf8read(bytes) { + if (bytes.length < 1) + return ""; + let pos = 0, parts = [], chunk = [], i = 0, t; + let len = bytes.length; + while (pos < len) { + t = bytes[pos++]; + if (t < 128) + chunk[i++] = t; + else if (t > 191 && t < 224) + chunk[i++] = (t & 31) << 6 | bytes[pos++] & 63; + else if (t > 239 && t < 365) { + t = ((t & 7) << 18 | (bytes[pos++] & 63) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63) - 65536; + chunk[i++] = 55296 + (t >> 10); + chunk[i++] = 56320 + (t & 1023); + } else + chunk[i++] = (t & 15) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63; + if (i > 8191) { + parts.push(fromCharCodes(chunk)); + i = 0; + } } - try { - return new SemVer(version4, options); - } catch (er) { - return null; + if (parts.length) { + if (i) + parts.push(fromCharCodes(chunk.slice(0, i))); + return parts.join(""); } + return fromCharCodes(chunk.slice(0, i)); } - __name(parse3, "parse"); - exports2.valid = valid; - function valid(version4, options) { - var v = parse3(version4, options); - return v ? v.version : null; - } - __name(valid, "valid"); - exports2.clean = clean; - function clean(version4, options) { - var s = parse3(version4.trim().replace(/^[=v]+/, ""), options); - return s ? s.version : null; + __name(utf8read, "utf8read"); + exports2.utf8read = utf8read; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js +var require_binary_format_contract = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.WireType = exports2.mergeBinaryOptions = exports2.UnknownFieldHandler = void 0; + var UnknownFieldHandler; + (function(UnknownFieldHandler2) { + UnknownFieldHandler2.symbol = Symbol.for("protobuf-ts/unknown"); + UnknownFieldHandler2.onRead = (typeName, message, fieldNo, wireType, data) => { + let container = is(message) ? message[UnknownFieldHandler2.symbol] : message[UnknownFieldHandler2.symbol] = []; + container.push({ no: fieldNo, wireType, data }); + }; + UnknownFieldHandler2.onWrite = (typeName, message, writer) => { + for (let { no, wireType, data } of UnknownFieldHandler2.list(message)) + writer.tag(no, wireType).raw(data); + }; + UnknownFieldHandler2.list = (message, fieldNo) => { + if (is(message)) { + let all = message[UnknownFieldHandler2.symbol]; + return fieldNo ? all.filter((uf) => uf.no == fieldNo) : all; + } + return []; + }; + UnknownFieldHandler2.last = (message, fieldNo) => UnknownFieldHandler2.list(message, fieldNo).slice(-1)[0]; + const is = /* @__PURE__ */ __name((message) => message && Array.isArray(message[UnknownFieldHandler2.symbol]), "is"); + })(UnknownFieldHandler = exports2.UnknownFieldHandler || (exports2.UnknownFieldHandler = {})); + function mergeBinaryOptions(a, b) { + return Object.assign(Object.assign({}, a), b); } - __name(clean, "clean"); - exports2.SemVer = SemVer; - function SemVer(version4, options) { - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; - } - if (version4 instanceof SemVer) { - if (version4.loose === options.loose) { - return version4; - } else { - version4 = version4.version; + __name(mergeBinaryOptions, "mergeBinaryOptions"); + exports2.mergeBinaryOptions = mergeBinaryOptions; + var WireType; + (function(WireType2) { + WireType2[WireType2["Varint"] = 0] = "Varint"; + WireType2[WireType2["Bit64"] = 1] = "Bit64"; + WireType2[WireType2["LengthDelimited"] = 2] = "LengthDelimited"; + WireType2[WireType2["StartGroup"] = 3] = "StartGroup"; + WireType2[WireType2["EndGroup"] = 4] = "EndGroup"; + WireType2[WireType2["Bit32"] = 5] = "Bit32"; + })(WireType = exports2.WireType || (exports2.WireType = {})); + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js +var require_goog_varint = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.varint32read = exports2.varint32write = exports2.int64toString = exports2.int64fromString = exports2.varint64write = exports2.varint64read = void 0; + function varint64read() { + let lowBits = 0; + let highBits = 0; + for (let shift = 0; shift < 28; shift += 7) { + let b = this.buf[this.pos++]; + lowBits |= (b & 127) << shift; + if ((b & 128) == 0) { + this.assertBounds(); + return [lowBits, highBits]; } - } else if (typeof version4 !== "string") { - throw new TypeError("Invalid Version: " + version4); - } - if (version4.length > MAX_LENGTH) { - throw new TypeError("version is longer than " + MAX_LENGTH + " characters"); - } - if (!(this instanceof SemVer)) { - return new SemVer(version4, options); - } - debug("SemVer", version4, options); - this.options = options; - this.loose = !!options.loose; - var m = version4.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]); - if (!m) { - throw new TypeError("Invalid Version: " + version4); - } - this.raw = version4; - this.major = +m[1]; - this.minor = +m[2]; - this.patch = +m[3]; - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError("Invalid major version"); } - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError("Invalid minor version"); - } - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError("Invalid patch version"); + let middleByte = this.buf[this.pos++]; + lowBits |= (middleByte & 15) << 28; + highBits = (middleByte & 112) >> 4; + if ((middleByte & 128) == 0) { + this.assertBounds(); + return [lowBits, highBits]; } - if (!m[4]) { - this.prerelease = []; - } else { - this.prerelease = m[4].split(".").map(function(id) { - if (/^[0-9]+$/.test(id)) { - var num = +id; - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num; - } - } - return id; - }); + for (let shift = 3; shift <= 31; shift += 7) { + let b = this.buf[this.pos++]; + highBits |= (b & 127) << shift; + if ((b & 128) == 0) { + this.assertBounds(); + return [lowBits, highBits]; + } } - this.build = m[5] ? m[5].split(".") : []; - this.format(); + throw new Error("invalid varint"); } - __name(SemVer, "SemVer"); - SemVer.prototype.format = function() { - this.version = this.major + "." + this.minor + "." + this.patch; - if (this.prerelease.length) { - this.version += "-" + this.prerelease.join("."); - } - return this.version; - }; - SemVer.prototype.toString = function() { - return this.version; - }; - SemVer.prototype.compare = function(other) { - debug("SemVer.compare", this.version, this.options, other); - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options); - } - return this.compareMain(other) || this.comparePre(other); - }; - SemVer.prototype.compareMain = function(other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options); - } - return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch); - }; - SemVer.prototype.comparePre = function(other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options); + __name(varint64read, "varint64read"); + exports2.varint64read = varint64read; + function varint64write(lo, hi, bytes) { + for (let i = 0; i < 28; i = i + 7) { + const shift = lo >>> i; + const hasNext = !(shift >>> 7 == 0 && hi == 0); + const byte = (hasNext ? shift | 128 : shift) & 255; + bytes.push(byte); + if (!hasNext) { + return; + } } - if (this.prerelease.length && !other.prerelease.length) { - return -1; - } else if (!this.prerelease.length && other.prerelease.length) { - return 1; - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0; + const splitBits = lo >>> 28 & 15 | (hi & 7) << 4; + const hasMoreBits = !(hi >> 3 == 0); + bytes.push((hasMoreBits ? splitBits | 128 : splitBits) & 255); + if (!hasMoreBits) { + return; } - var i2 = 0; - do { - var a = this.prerelease[i2]; - var b = other.prerelease[i2]; - debug("prerelease compare", i2, a, b); - if (a === void 0 && b === void 0) { - return 0; - } else if (b === void 0) { - return 1; - } else if (a === void 0) { - return -1; - } else if (a === b) { - continue; - } else { - return compareIdentifiers(a, b); + for (let i = 3; i < 31; i = i + 7) { + const shift = hi >>> i; + const hasNext = !(shift >>> 7 == 0); + const byte = (hasNext ? shift | 128 : shift) & 255; + bytes.push(byte); + if (!hasNext) { + return; } - } while (++i2); - }; - SemVer.prototype.compareBuild = function(other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options); } - var i2 = 0; - do { - var a = this.build[i2]; - var b = other.build[i2]; - debug("prerelease compare", i2, a, b); - if (a === void 0 && b === void 0) { - return 0; - } else if (b === void 0) { - return 1; - } else if (a === void 0) { - return -1; - } else if (a === b) { - continue; - } else { - return compareIdentifiers(a, b); + bytes.push(hi >>> 31 & 1); + } + __name(varint64write, "varint64write"); + exports2.varint64write = varint64write; + var TWO_PWR_32_DBL = (1 << 16) * (1 << 16); + function int64fromString(dec) { + let minus = dec[0] == "-"; + if (minus) + dec = dec.slice(1); + const base = 1e6; + let lowBits = 0; + let highBits = 0; + function add1e6digit(begin, end) { + const digit1e6 = Number(dec.slice(begin, end)); + highBits *= base; + lowBits = lowBits * base + digit1e6; + if (lowBits >= TWO_PWR_32_DBL) { + highBits = highBits + (lowBits / TWO_PWR_32_DBL | 0); + lowBits = lowBits % TWO_PWR_32_DBL; } - } while (++i2); - }; - SemVer.prototype.inc = function(release, identifier) { - switch (release) { - case "premajor": - this.prerelease.length = 0; - this.patch = 0; - this.minor = 0; - this.major++; - this.inc("pre", identifier); - break; - case "preminor": - this.prerelease.length = 0; - this.patch = 0; - this.minor++; - this.inc("pre", identifier); - break; - case "prepatch": - this.prerelease.length = 0; - this.inc("patch", identifier); - this.inc("pre", identifier); - break; - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case "prerelease": - if (this.prerelease.length === 0) { - this.inc("patch", identifier); - } - this.inc("pre", identifier); - break; - case "major": - if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) { - this.major++; - } - this.minor = 0; - this.patch = 0; - this.prerelease = []; - break; - case "minor": - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++; - } - this.patch = 0; - this.prerelease = []; - break; - case "patch": - if (this.prerelease.length === 0) { - this.patch++; - } - this.prerelease = []; - break; - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case "pre": - if (this.prerelease.length === 0) { - this.prerelease = [0]; - } else { - var i2 = this.prerelease.length; - while (--i2 >= 0) { - if (typeof this.prerelease[i2] === "number") { - this.prerelease[i2]++; - i2 = -2; - } - } - if (i2 === -1) { - this.prerelease.push(0); - } - } - if (identifier) { - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0]; - } - } else { - this.prerelease = [identifier, 0]; - } - } - break; - default: - throw new Error("invalid increment argument: " + release); } - this.format(); - this.raw = this.version; - return this; - }; - exports2.inc = inc; - function inc(version4, release, loose, identifier) { - if (typeof loose === "string") { - identifier = loose; - loose = void 0; + __name(add1e6digit, "add1e6digit"); + add1e6digit(-24, -18); + add1e6digit(-18, -12); + add1e6digit(-12, -6); + add1e6digit(-6); + return [minus, lowBits, highBits]; + } + __name(int64fromString, "int64fromString"); + exports2.int64fromString = int64fromString; + function int64toString(bitsLow, bitsHigh) { + if (bitsHigh >>> 0 <= 2097151) { + return "" + (TWO_PWR_32_DBL * bitsHigh + (bitsLow >>> 0)); + } + let low = bitsLow & 16777215; + let mid = (bitsLow >>> 24 | bitsHigh << 8) >>> 0 & 16777215; + let high = bitsHigh >> 16 & 65535; + let digitA = low + mid * 6777216 + high * 6710656; + let digitB = mid + high * 8147497; + let digitC = high * 2; + let base = 1e7; + if (digitA >= base) { + digitB += Math.floor(digitA / base); + digitA %= base; + } + if (digitB >= base) { + digitC += Math.floor(digitB / base); + digitB %= base; } - try { - return new SemVer(version4, loose).inc(release, identifier).version; - } catch (er) { - return null; + function decimalFrom1e7(digit1e7, needLeadingZeros) { + let partial = digit1e7 ? String(digit1e7) : ""; + if (needLeadingZeros) { + return "0000000".slice(partial.length) + partial; + } + return partial; } + __name(decimalFrom1e7, "decimalFrom1e7"); + return decimalFrom1e7( + digitC, + /*needLeadingZeros=*/ + 0 + ) + decimalFrom1e7( + digitB, + /*needLeadingZeros=*/ + digitC + ) + // If the final 1e7 digit didn't need leading zeros, we would have + // returned via the trivial code path at the top. + decimalFrom1e7( + digitA, + /*needLeadingZeros=*/ + 1 + ); } - __name(inc, "inc"); - exports2.diff = diff; - function diff(version1, version22) { - if (eq(version1, version22)) { - return null; - } else { - var v13 = parse3(version1); - var v2 = parse3(version22); - var prefix = ""; - if (v13.prerelease.length || v2.prerelease.length) { - prefix = "pre"; - var defaultResult = "prerelease"; + __name(int64toString, "int64toString"); + exports2.int64toString = int64toString; + function varint32write(value, bytes) { + if (value >= 0) { + while (value > 127) { + bytes.push(value & 127 | 128); + value = value >>> 7; } - for (var key in v13) { - if (key === "major" || key === "minor" || key === "patch") { - if (v13[key] !== v2[key]) { - return prefix + key; - } - } + bytes.push(value); + } else { + for (let i = 0; i < 9; i++) { + bytes.push(value & 127 | 128); + value = value >> 7; } - return defaultResult; + bytes.push(1); } } - __name(diff, "diff"); - exports2.compareIdentifiers = compareIdentifiers; - var numeric = /^[0-9]+$/; - function compareIdentifiers(a, b) { - var anum = numeric.test(a); - var bnum = numeric.test(b); - if (anum && bnum) { - a = +a; - b = +b; + __name(varint32write, "varint32write"); + exports2.varint32write = varint32write; + function varint32read() { + let b = this.buf[this.pos++]; + let result = b & 127; + if ((b & 128) == 0) { + this.assertBounds(); + return result; } - return a === b ? 0 : anum && !bnum ? -1 : bnum && !anum ? 1 : a < b ? -1 : 1; - } - __name(compareIdentifiers, "compareIdentifiers"); - exports2.rcompareIdentifiers = rcompareIdentifiers; - function rcompareIdentifiers(a, b) { - return compareIdentifiers(b, a); - } - __name(rcompareIdentifiers, "rcompareIdentifiers"); - exports2.major = major2; - function major2(a, loose) { - return new SemVer(a, loose).major; - } - __name(major2, "major"); - exports2.minor = minor; - function minor(a, loose) { - return new SemVer(a, loose).minor; - } - __name(minor, "minor"); - exports2.patch = patch; - function patch(a, loose) { - return new SemVer(a, loose).patch; - } - __name(patch, "patch"); - exports2.compare = compare; - function compare(a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)); - } - __name(compare, "compare"); - exports2.compareLoose = compareLoose; - function compareLoose(a, b) { - return compare(a, b, true); - } - __name(compareLoose, "compareLoose"); - exports2.compareBuild = compareBuild; - function compareBuild(a, b, loose) { - var versionA = new SemVer(a, loose); - var versionB = new SemVer(b, loose); - return versionA.compare(versionB) || versionA.compareBuild(versionB); - } - __name(compareBuild, "compareBuild"); - exports2.rcompare = rcompare; - function rcompare(a, b, loose) { - return compare(b, a, loose); - } - __name(rcompare, "rcompare"); - exports2.sort = sort; - function sort(list, loose) { - return list.sort(function(a, b) { - return exports2.compareBuild(a, b, loose); - }); - } - __name(sort, "sort"); - exports2.rsort = rsort; - function rsort(list, loose) { - return list.sort(function(a, b) { - return exports2.compareBuild(b, a, loose); - }); - } - __name(rsort, "rsort"); - exports2.gt = gt; - function gt(a, b, loose) { - return compare(a, b, loose) > 0; - } - __name(gt, "gt"); - exports2.lt = lt; - function lt(a, b, loose) { - return compare(a, b, loose) < 0; - } - __name(lt, "lt"); - exports2.eq = eq; - function eq(a, b, loose) { - return compare(a, b, loose) === 0; - } - __name(eq, "eq"); - exports2.neq = neq; - function neq(a, b, loose) { - return compare(a, b, loose) !== 0; + b = this.buf[this.pos++]; + result |= (b & 127) << 7; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 127) << 14; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 127) << 21; + if ((b & 128) == 0) { + this.assertBounds(); + return result; + } + b = this.buf[this.pos++]; + result |= (b & 15) << 28; + for (let readBytes = 5; (b & 128) !== 0 && readBytes < 10; readBytes++) + b = this.buf[this.pos++]; + if ((b & 128) != 0) + throw new Error("invalid varint"); + this.assertBounds(); + return result >>> 0; } - __name(neq, "neq"); - exports2.gte = gte; - function gte(a, b, loose) { - return compare(a, b, loose) >= 0; + __name(varint32read, "varint32read"); + exports2.varint32read = varint32read; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js +var require_pb_long = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.PbLong = exports2.PbULong = exports2.detectBi = void 0; + var goog_varint_1 = require_goog_varint(); + var BI; + function detectBi() { + const dv = new DataView(new ArrayBuffer(8)); + const ok = globalThis.BigInt !== void 0 && typeof dv.getBigInt64 === "function" && typeof dv.getBigUint64 === "function" && typeof dv.setBigInt64 === "function" && typeof dv.setBigUint64 === "function"; + BI = ok ? { + MIN: BigInt("-9223372036854775808"), + MAX: BigInt("9223372036854775807"), + UMIN: BigInt("0"), + UMAX: BigInt("18446744073709551615"), + C: BigInt, + V: dv + } : void 0; } - __name(gte, "gte"); - exports2.lte = lte; - function lte(a, b, loose) { - return compare(a, b, loose) <= 0; + __name(detectBi, "detectBi"); + exports2.detectBi = detectBi; + detectBi(); + function assertBi(bi) { + if (!bi) + throw new Error("BigInt unavailable, see https://github.com/timostamm/protobuf-ts/blob/v1.0.8/MANUAL.md#bigint-support"); } - __name(lte, "lte"); - exports2.cmp = cmp; - function cmp(a, op, b, loose) { - switch (op) { - case "===": - if (typeof a === "object") - a = a.version; - if (typeof b === "object") - b = b.version; - return a === b; - case "!==": - if (typeof a === "object") - a = a.version; - if (typeof b === "object") - b = b.version; - return a !== b; - case "": - case "=": - case "==": - return eq(a, b, loose); - case "!=": - return neq(a, b, loose); - case ">": - return gt(a, b, loose); - case ">=": - return gte(a, b, loose); - case "<": - return lt(a, b, loose); - case "<=": - return lte(a, b, loose); - default: - throw new TypeError("Invalid operator: " + op); + __name(assertBi, "assertBi"); + var RE_DECIMAL_STR = /^-?[0-9]+$/; + var TWO_PWR_32_DBL = 4294967296; + var HALF_2_PWR_32 = 2147483648; + var SharedPbLong = class { + static { + __name(this, "SharedPbLong"); } - } - __name(cmp, "cmp"); - exports2.Comparator = Comparator; - function Comparator(comp, options) { - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; + /** + * Create a new instance with the given bits. + */ + constructor(lo, hi) { + this.lo = lo | 0; + this.hi = hi | 0; } - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp; - } else { - comp = comp.value; - } + /** + * Is this instance equal to 0? + */ + isZero() { + return this.lo == 0 && this.hi == 0; } - if (!(this instanceof Comparator)) { - return new Comparator(comp, options); + /** + * Convert to a native number. + */ + toNumber() { + let result = this.hi * TWO_PWR_32_DBL + (this.lo >>> 0); + if (!Number.isSafeInteger(result)) + throw new Error("cannot convert to safe number"); + return result; } - comp = comp.trim().split(/\s+/).join(" "); - debug("comparator", comp, options); - this.options = options; - this.loose = !!options.loose; - this.parse(comp); - if (this.semver === ANY) { - this.value = ""; - } else { - this.value = this.operator + this.semver.version; + }; + var PbULong = class _PbULong extends SharedPbLong { + static { + __name(this, "PbULong"); } - debug("comp", this); - } - __name(Comparator, "Comparator"); - var ANY = {}; - Comparator.prototype.parse = function(comp) { - var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]; - var m = comp.match(r); - if (!m) { - throw new TypeError("Invalid comparator: " + comp); + /** + * Create instance from a `string`, `number` or `bigint`. + */ + static from(value) { + if (BI) + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + if (value == "") + throw new Error("string is no integer"); + value = BI.C(value); + case "number": + if (value === 0) + return this.ZERO; + value = BI.C(value); + case "bigint": + if (!value) + return this.ZERO; + if (value < BI.UMIN) + throw new Error("signed value for ulong"); + if (value > BI.UMAX) + throw new Error("ulong too large"); + BI.V.setBigUint64(0, value, true); + return new _PbULong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + } + else + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + value = value.trim(); + if (!RE_DECIMAL_STR.test(value)) + throw new Error("string is no integer"); + let [minus, lo, hi] = goog_varint_1.int64fromString(value); + if (minus) + throw new Error("signed value for ulong"); + return new _PbULong(lo, hi); + case "number": + if (value == 0) + return this.ZERO; + if (!Number.isSafeInteger(value)) + throw new Error("number is no integer"); + if (value < 0) + throw new Error("signed value for ulong"); + return new _PbULong(value, value / TWO_PWR_32_DBL); + } + throw new Error("unknown value " + typeof value); } - this.operator = m[1] !== void 0 ? m[1] : ""; - if (this.operator === "=") { - this.operator = ""; + /** + * Convert to decimal string. + */ + toString() { + return BI ? this.toBigInt().toString() : goog_varint_1.int64toString(this.lo, this.hi); } - if (!m[2]) { - this.semver = ANY; - } else { - this.semver = new SemVer(m[2], this.options.loose); + /** + * Convert to native bigint. + */ + toBigInt() { + assertBi(BI); + BI.V.setInt32(0, this.lo, true); + BI.V.setInt32(4, this.hi, true); + return BI.V.getBigUint64(0, true); } }; - Comparator.prototype.toString = function() { - return this.value; - }; - Comparator.prototype.test = function(version4) { - debug("Comparator.test", version4, this.options.loose); - if (this.semver === ANY || version4 === ANY) { - return true; + exports2.PbULong = PbULong; + PbULong.ZERO = new PbULong(0, 0); + var PbLong = class _PbLong extends SharedPbLong { + static { + __name(this, "PbLong"); } - if (typeof version4 === "string") { - try { - version4 = new SemVer(version4, this.options); - } catch (er) { - return false; - } + /** + * Create instance from a `string`, `number` or `bigint`. + */ + static from(value) { + if (BI) + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + if (value == "") + throw new Error("string is no integer"); + value = BI.C(value); + case "number": + if (value === 0) + return this.ZERO; + value = BI.C(value); + case "bigint": + if (!value) + return this.ZERO; + if (value < BI.MIN) + throw new Error("signed long too small"); + if (value > BI.MAX) + throw new Error("signed long too large"); + BI.V.setBigInt64(0, value, true); + return new _PbLong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + } + else + switch (typeof value) { + case "string": + if (value == "0") + return this.ZERO; + value = value.trim(); + if (!RE_DECIMAL_STR.test(value)) + throw new Error("string is no integer"); + let [minus, lo, hi] = goog_varint_1.int64fromString(value); + if (minus) { + if (hi > HALF_2_PWR_32 || hi == HALF_2_PWR_32 && lo != 0) + throw new Error("signed long too small"); + } else if (hi >= HALF_2_PWR_32) + throw new Error("signed long too large"); + let pbl = new _PbLong(lo, hi); + return minus ? pbl.negate() : pbl; + case "number": + if (value == 0) + return this.ZERO; + if (!Number.isSafeInteger(value)) + throw new Error("number is no integer"); + return value > 0 ? new _PbLong(value, value / TWO_PWR_32_DBL) : new _PbLong(-value, -value / TWO_PWR_32_DBL).negate(); + } + throw new Error("unknown value " + typeof value); } - return cmp(version4, this.operator, this.semver, this.options); - }; - Comparator.prototype.intersects = function(comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError("a Comparator is required"); + /** + * Do we have a minus sign? + */ + isNegative() { + return (this.hi & HALF_2_PWR_32) !== 0; } - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; + /** + * Negate two's complement. + * Invert all the bits and add one to the result. + */ + negate() { + let hi = ~this.hi, lo = this.lo; + if (lo) + lo = ~lo + 1; + else + hi += 1; + return new _PbLong(lo, hi); } - var rangeTmp; - if (this.operator === "") { - if (this.value === "") { - return true; - } - rangeTmp = new Range(comp.value, options); - return satisfies(this.value, rangeTmp, options); - } else if (comp.operator === "") { - if (comp.value === "") { - return true; + /** + * Convert to decimal string. + */ + toString() { + if (BI) + return this.toBigInt().toString(); + if (this.isNegative()) { + let n = this.negate(); + return "-" + goog_varint_1.int64toString(n.lo, n.hi); } - rangeTmp = new Range(this.value, options); - return satisfies(comp.semver, rangeTmp, options); + return goog_varint_1.int64toString(this.lo, this.hi); + } + /** + * Convert to native bigint. + */ + toBigInt() { + assertBi(BI); + BI.V.setInt32(0, this.lo, true); + BI.V.setInt32(4, this.hi, true); + return BI.V.getBigInt64(0, true); } - var sameDirectionIncreasing = (this.operator === ">=" || this.operator === ">") && (comp.operator === ">=" || comp.operator === ">"); - var sameDirectionDecreasing = (this.operator === "<=" || this.operator === "<") && (comp.operator === "<=" || comp.operator === "<"); - var sameSemVer = this.semver.version === comp.semver.version; - var differentDirectionsInclusive = (this.operator === ">=" || this.operator === "<=") && (comp.operator === ">=" || comp.operator === "<="); - var oppositeDirectionsLessThan = cmp(this.semver, "<", comp.semver, options) && ((this.operator === ">=" || this.operator === ">") && (comp.operator === "<=" || comp.operator === "<")); - var oppositeDirectionsGreaterThan = cmp(this.semver, ">", comp.semver, options) && ((this.operator === "<=" || this.operator === "<") && (comp.operator === ">=" || comp.operator === ">")); - return sameDirectionIncreasing || sameDirectionDecreasing || sameSemVer && differentDirectionsInclusive || oppositeDirectionsLessThan || oppositeDirectionsGreaterThan; }; - exports2.Range = Range; - function Range(range, options) { - if (!options || typeof options !== "object") { - options = { - loose: !!options, - includePrerelease: false - }; + exports2.PbLong = PbLong; + PbLong.ZERO = new PbLong(0, 0); + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js +var require_binary_reader = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BinaryReader = exports2.binaryReadOptions = void 0; + var binary_format_contract_1 = require_binary_format_contract(); + var pb_long_1 = require_pb_long(); + var goog_varint_1 = require_goog_varint(); + var defaultsRead = { + readUnknownField: true, + readerFactory: /* @__PURE__ */ __name((bytes) => new BinaryReader(bytes), "readerFactory") + }; + function binaryReadOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; + } + __name(binaryReadOptions, "binaryReadOptions"); + exports2.binaryReadOptions = binaryReadOptions; + var BinaryReader = class { + static { + __name(this, "BinaryReader"); } - if (range instanceof Range) { - if (range.loose === !!options.loose && range.includePrerelease === !!options.includePrerelease) { - return range; - } else { - return new Range(range.raw, options); + constructor(buf, textDecoder) { + this.varint64 = goog_varint_1.varint64read; + this.uint32 = goog_varint_1.varint32read; + this.buf = buf; + this.len = buf.length; + this.pos = 0; + this.view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); + this.textDecoder = textDecoder !== null && textDecoder !== void 0 ? textDecoder : new TextDecoder("utf-8", { + fatal: true, + ignoreBOM: true + }); + } + /** + * Reads a tag - field number and wire type. + */ + tag() { + let tag = this.uint32(), fieldNo = tag >>> 3, wireType = tag & 7; + if (fieldNo <= 0 || wireType < 0 || wireType > 5) + throw new Error("illegal tag: field no " + fieldNo + " wire type " + wireType); + return [fieldNo, wireType]; + } + /** + * Skip one element on the wire and return the skipped data. + * Supports WireType.StartGroup since v2.0.0-alpha.23. + */ + skip(wireType) { + let start = this.pos; + switch (wireType) { + case binary_format_contract_1.WireType.Varint: + while (this.buf[this.pos++] & 128) { + } + break; + case binary_format_contract_1.WireType.Bit64: + this.pos += 4; + case binary_format_contract_1.WireType.Bit32: + this.pos += 4; + break; + case binary_format_contract_1.WireType.LengthDelimited: + let len = this.uint32(); + this.pos += len; + break; + case binary_format_contract_1.WireType.StartGroup: + let t; + while ((t = this.tag()[1]) !== binary_format_contract_1.WireType.EndGroup) { + this.skip(t); + } + break; + default: + throw new Error("cant skip wire type " + wireType); } + this.assertBounds(); + return this.buf.subarray(start, this.pos); } - if (range instanceof Comparator) { - return new Range(range.value, options); + /** + * Throws error if position in byte array is out of range. + */ + assertBounds() { + if (this.pos > this.len) + throw new RangeError("premature EOF"); } - if (!(this instanceof Range)) { - return new Range(range, options); + /** + * Read a `int32` field, a signed 32 bit varint. + */ + int32() { + return this.uint32() | 0; } - this.options = options; - this.loose = !!options.loose; - this.includePrerelease = !!options.includePrerelease; - this.raw = range.trim().split(/\s+/).join(" "); - this.set = this.raw.split("||").map(function(range2) { - return this.parseRange(range2.trim()); - }, this).filter(function(c) { - return c.length; - }); - if (!this.set.length) { - throw new TypeError("Invalid SemVer Range: " + this.raw); + /** + * Read a `sint32` field, a signed, zigzag-encoded 32-bit varint. + */ + sint32() { + let zze = this.uint32(); + return zze >>> 1 ^ -(zze & 1); } - this.format(); - } - __name(Range, "Range"); - Range.prototype.format = function() { - this.range = this.set.map(function(comps) { - return comps.join(" ").trim(); - }).join("||").trim(); - return this.range; - }; - Range.prototype.toString = function() { - return this.range; - }; - Range.prototype.parseRange = function(range) { - var loose = this.options.loose; - var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]; - range = range.replace(hr, hyphenReplace); - debug("hyphen replace", range); - range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace); - debug("comparator trim", range, safeRe[t.COMPARATORTRIM]); - range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace); - range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace); - range = range.split(/\s+/).join(" "); - var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]; - var set = range.split(" ").map(function(comp) { - return parseComparator(comp, this.options); - }, this).join(" ").split(/\s+/); - if (this.options.loose) { - set = set.filter(function(comp) { - return !!comp.match(compRe); - }); + /** + * Read a `int64` field, a signed 64-bit varint. + */ + int64() { + return new pb_long_1.PbLong(...this.varint64()); } - set = set.map(function(comp) { - return new Comparator(comp, this.options); - }, this); - return set; - }; - Range.prototype.intersects = function(range, options) { - if (!(range instanceof Range)) { - throw new TypeError("a Range is required"); + /** + * Read a `uint64` field, an unsigned 64-bit varint. + */ + uint64() { + return new pb_long_1.PbULong(...this.varint64()); + } + /** + * Read a `sint64` field, a signed, zig-zag-encoded 64-bit varint. + */ + sint64() { + let [lo, hi] = this.varint64(); + let s = -(lo & 1); + lo = (lo >>> 1 | (hi & 1) << 31) ^ s; + hi = hi >>> 1 ^ s; + return new pb_long_1.PbLong(lo, hi); + } + /** + * Read a `bool` field, a variant. + */ + bool() { + let [lo, hi] = this.varint64(); + return lo !== 0 || hi !== 0; + } + /** + * Read a `fixed32` field, an unsigned, fixed-length 32-bit integer. + */ + fixed32() { + return this.view.getUint32((this.pos += 4) - 4, true); + } + /** + * Read a `sfixed32` field, a signed, fixed-length 32-bit integer. + */ + sfixed32() { + return this.view.getInt32((this.pos += 4) - 4, true); + } + /** + * Read a `fixed64` field, an unsigned, fixed-length 64 bit integer. + */ + fixed64() { + return new pb_long_1.PbULong(this.sfixed32(), this.sfixed32()); + } + /** + * Read a `fixed64` field, a signed, fixed-length 64-bit integer. + */ + sfixed64() { + return new pb_long_1.PbLong(this.sfixed32(), this.sfixed32()); + } + /** + * Read a `float` field, 32-bit floating point number. + */ + float() { + return this.view.getFloat32((this.pos += 4) - 4, true); + } + /** + * Read a `double` field, a 64-bit floating point number. + */ + double() { + return this.view.getFloat64((this.pos += 8) - 8, true); + } + /** + * Read a `bytes` field, length-delimited arbitrary data. + */ + bytes() { + let len = this.uint32(); + let start = this.pos; + this.pos += len; + this.assertBounds(); + return this.buf.subarray(start, start + len); + } + /** + * Read a `string` field, length-delimited data converted to UTF-8 text. + */ + string() { + return this.textDecoder.decode(this.bytes()); } - return this.set.some(function(thisComparators) { - return isSatisfiable(thisComparators, options) && range.set.some(function(rangeComparators) { - return isSatisfiable(rangeComparators, options) && thisComparators.every(function(thisComparator) { - return rangeComparators.every(function(rangeComparator) { - return thisComparator.intersects(rangeComparator, options); - }); - }); - }); - }); }; - function isSatisfiable(comparators, options) { - var result = true; - var remainingComparators = comparators.slice(); - var testComparator = remainingComparators.pop(); - while (result && remainingComparators.length) { - result = remainingComparators.every(function(otherComparator) { - return testComparator.intersects(otherComparator, options); - }); - testComparator = remainingComparators.pop(); + exports2.BinaryReader = BinaryReader; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/assert.js +var require_assert = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/assert.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.assertFloat32 = exports2.assertUInt32 = exports2.assertInt32 = exports2.assertNever = exports2.assert = void 0; + function assert(condition, msg) { + if (!condition) { + throw new Error(msg); } - return result; - } - __name(isSatisfiable, "isSatisfiable"); - exports2.toComparators = toComparators; - function toComparators(range, options) { - return new Range(range, options).set.map(function(comp) { - return comp.map(function(c) { - return c.value; - }).join(" ").trim().split(" "); - }); - } - __name(toComparators, "toComparators"); - function parseComparator(comp, options) { - debug("comp", comp, options); - comp = replaceCarets(comp, options); - debug("caret", comp); - comp = replaceTildes(comp, options); - debug("tildes", comp); - comp = replaceXRanges(comp, options); - debug("xrange", comp); - comp = replaceStars(comp, options); - debug("stars", comp); - return comp; - } - __name(parseComparator, "parseComparator"); - function isX(id) { - return !id || id.toLowerCase() === "x" || id === "*"; - } - __name(isX, "isX"); - function replaceTildes(comp, options) { - return comp.trim().split(/\s+/).map(function(comp2) { - return replaceTilde(comp2, options); - }).join(" "); - } - __name(replaceTildes, "replaceTildes"); - function replaceTilde(comp, options) { - var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]; - return comp.replace(r, function(_2, M, m, p, pr) { - debug("tilde", comp, _2, M, m, p, pr); - var ret; - if (isX(M)) { - ret = ""; - } else if (isX(m)) { - ret = ">=" + M + ".0.0 <" + (+M + 1) + ".0.0"; - } else if (isX(p)) { - ret = ">=" + M + "." + m + ".0 <" + M + "." + (+m + 1) + ".0"; - } else if (pr) { - debug("replaceTilde pr", pr); - ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + (+m + 1) + ".0"; - } else { - ret = ">=" + M + "." + m + "." + p + " <" + M + "." + (+m + 1) + ".0"; - } - debug("tilde return", ret); - return ret; - }); - } - __name(replaceTilde, "replaceTilde"); - function replaceCarets(comp, options) { - return comp.trim().split(/\s+/).map(function(comp2) { - return replaceCaret(comp2, options); - }).join(" "); } - __name(replaceCarets, "replaceCarets"); - function replaceCaret(comp, options) { - debug("caret", comp, options); - var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]; - return comp.replace(r, function(_2, M, m, p, pr) { - debug("caret", comp, _2, M, m, p, pr); - var ret; - if (isX(M)) { - ret = ""; - } else if (isX(m)) { - ret = ">=" + M + ".0.0 <" + (+M + 1) + ".0.0"; - } else if (isX(p)) { - if (M === "0") { - ret = ">=" + M + "." + m + ".0 <" + M + "." + (+m + 1) + ".0"; - } else { - ret = ">=" + M + "." + m + ".0 <" + (+M + 1) + ".0.0"; - } - } else if (pr) { - debug("replaceCaret pr", pr); - if (M === "0") { - if (m === "0") { - ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + m + "." + (+p + 1); - } else { - ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + (+m + 1) + ".0"; - } - } else { - ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + (+M + 1) + ".0.0"; - } - } else { - debug("no pr"); - if (M === "0") { - if (m === "0") { - ret = ">=" + M + "." + m + "." + p + " <" + M + "." + m + "." + (+p + 1); - } else { - ret = ">=" + M + "." + m + "." + p + " <" + M + "." + (+m + 1) + ".0"; - } - } else { - ret = ">=" + M + "." + m + "." + p + " <" + (+M + 1) + ".0.0"; - } - } - debug("caret return", ret); - return ret; - }); + __name(assert, "assert"); + exports2.assert = assert; + function assertNever(value, msg) { + throw new Error(msg !== null && msg !== void 0 ? msg : "Unexpected object: " + value); } - __name(replaceCaret, "replaceCaret"); - function replaceXRanges(comp, options) { - debug("replaceXRanges", comp, options); - return comp.split(/\s+/).map(function(comp2) { - return replaceXRange(comp2, options); - }).join(" "); + __name(assertNever, "assertNever"); + exports2.assertNever = assertNever; + var FLOAT32_MAX = 34028234663852886e22; + var FLOAT32_MIN = -34028234663852886e22; + var UINT32_MAX = 4294967295; + var INT32_MAX = 2147483647; + var INT32_MIN = -2147483648; + function assertInt32(arg) { + if (typeof arg !== "number") + throw new Error("invalid int 32: " + typeof arg); + if (!Number.isInteger(arg) || arg > INT32_MAX || arg < INT32_MIN) + throw new Error("invalid int 32: " + arg); } - __name(replaceXRanges, "replaceXRanges"); - function replaceXRange(comp, options) { - comp = comp.trim(); - var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]; - return comp.replace(r, function(ret, gtlt, M, m, p, pr) { - debug("xRange", comp, ret, gtlt, M, m, p, pr); - var xM = isX(M); - var xm = xM || isX(m); - var xp = xm || isX(p); - var anyX = xp; - if (gtlt === "=" && anyX) { - gtlt = ""; - } - pr = options.includePrerelease ? "-0" : ""; - if (xM) { - if (gtlt === ">" || gtlt === "<") { - ret = "<0.0.0-0"; - } else { - ret = "*"; - } - } else if (gtlt && anyX) { - if (xm) { - m = 0; - } - p = 0; - if (gtlt === ">") { - gtlt = ">="; - if (xm) { - M = +M + 1; - m = 0; - p = 0; - } else { - m = +m + 1; - p = 0; - } - } else if (gtlt === "<=") { - gtlt = "<"; - if (xm) { - M = +M + 1; - } else { - m = +m + 1; - } - } - ret = gtlt + M + "." + m + "." + p + pr; - } else if (xm) { - ret = ">=" + M + ".0.0" + pr + " <" + (+M + 1) + ".0.0" + pr; - } else if (xp) { - ret = ">=" + M + "." + m + ".0" + pr + " <" + M + "." + (+m + 1) + ".0" + pr; - } - debug("xRange return", ret); - return ret; - }); + __name(assertInt32, "assertInt32"); + exports2.assertInt32 = assertInt32; + function assertUInt32(arg) { + if (typeof arg !== "number") + throw new Error("invalid uint 32: " + typeof arg); + if (!Number.isInteger(arg) || arg > UINT32_MAX || arg < 0) + throw new Error("invalid uint 32: " + arg); } - __name(replaceXRange, "replaceXRange"); - function replaceStars(comp, options) { - debug("replaceStars", comp, options); - return comp.trim().replace(safeRe[t.STAR], ""); + __name(assertUInt32, "assertUInt32"); + exports2.assertUInt32 = assertUInt32; + function assertFloat32(arg) { + if (typeof arg !== "number") + throw new Error("invalid float 32: " + typeof arg); + if (!Number.isFinite(arg)) + return; + if (arg > FLOAT32_MAX || arg < FLOAT32_MIN) + throw new Error("invalid float 32: " + arg); } - __name(replaceStars, "replaceStars"); - function hyphenReplace($0, from, fM, fm, fp, fpr, fb, to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = ""; - } else if (isX(fm)) { - from = ">=" + fM + ".0.0"; - } else if (isX(fp)) { - from = ">=" + fM + "." + fm + ".0"; - } else { - from = ">=" + from; - } - if (isX(tM)) { - to = ""; - } else if (isX(tm)) { - to = "<" + (+tM + 1) + ".0.0"; - } else if (isX(tp)) { - to = "<" + tM + "." + (+tm + 1) + ".0"; - } else if (tpr) { - to = "<=" + tM + "." + tm + "." + tp + "-" + tpr; - } else { - to = "<=" + to; - } - return (from + " " + to).trim(); + __name(assertFloat32, "assertFloat32"); + exports2.assertFloat32 = assertFloat32; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js +var require_binary_writer = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BinaryWriter = exports2.binaryWriteOptions = void 0; + var pb_long_1 = require_pb_long(); + var goog_varint_1 = require_goog_varint(); + var assert_1 = require_assert(); + var defaultsWrite = { + writeUnknownFields: true, + writerFactory: /* @__PURE__ */ __name(() => new BinaryWriter(), "writerFactory") + }; + function binaryWriteOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; } - __name(hyphenReplace, "hyphenReplace"); - Range.prototype.test = function(version4) { - if (!version4) { - return false; - } - if (typeof version4 === "string") { - try { - version4 = new SemVer(version4, this.options); - } catch (er) { - return false; - } + __name(binaryWriteOptions, "binaryWriteOptions"); + exports2.binaryWriteOptions = binaryWriteOptions; + var BinaryWriter = class { + static { + __name(this, "BinaryWriter"); } - for (var i2 = 0; i2 < this.set.length; i2++) { - if (testSet(this.set[i2], version4, this.options)) { - return true; - } + constructor(textEncoder) { + this.stack = []; + this.textEncoder = textEncoder !== null && textEncoder !== void 0 ? textEncoder : new TextEncoder(); + this.chunks = []; + this.buf = []; } - return false; - }; - function testSet(set, version4, options) { - for (var i2 = 0; i2 < set.length; i2++) { - if (!set[i2].test(version4)) { - return false; + /** + * Return all bytes written and reset this writer. + */ + finish() { + this.chunks.push(new Uint8Array(this.buf)); + let len = 0; + for (let i = 0; i < this.chunks.length; i++) + len += this.chunks[i].length; + let bytes = new Uint8Array(len); + let offset = 0; + for (let i = 0; i < this.chunks.length; i++) { + bytes.set(this.chunks[i], offset); + offset += this.chunks[i].length; } + this.chunks = []; + return bytes; } - if (version4.prerelease.length && !options.includePrerelease) { - for (i2 = 0; i2 < set.length; i2++) { - debug(set[i2].semver); - if (set[i2].semver === ANY) { - continue; - } - if (set[i2].semver.prerelease.length > 0) { - var allowed = set[i2].semver; - if (allowed.major === version4.major && allowed.minor === version4.minor && allowed.patch === version4.patch) { - return true; - } - } - } - return false; + /** + * Start a new fork for length-delimited data like a message + * or a packed repeated field. + * + * Must be joined later with `join()`. + */ + fork() { + this.stack.push({ chunks: this.chunks, buf: this.buf }); + this.chunks = []; + this.buf = []; + return this; } - return true; - } - __name(testSet, "testSet"); - exports2.satisfies = satisfies; - function satisfies(version4, range, options) { - try { - range = new Range(range, options); - } catch (er) { - return false; + /** + * Join the last fork. Write its length and bytes, then + * return to the previous state. + */ + join() { + let chunk = this.finish(); + let prev = this.stack.pop(); + if (!prev) + throw new Error("invalid state, fork stack empty"); + this.chunks = prev.chunks; + this.buf = prev.buf; + this.uint32(chunk.byteLength); + return this.raw(chunk); } - return range.test(version4); - } - __name(satisfies, "satisfies"); - exports2.maxSatisfying = maxSatisfying; - function maxSatisfying(versions, range, options) { - var max = null; - var maxSV = null; - try { - var rangeObj = new Range(range, options); - } catch (er) { - return null; + /** + * Writes a tag (field number and wire type). + * + * Equivalent to `uint32( (fieldNo << 3 | type) >>> 0 )`. + * + * Generated code should compute the tag ahead of time and call `uint32()`. + */ + tag(fieldNo, type) { + return this.uint32((fieldNo << 3 | type) >>> 0); } - versions.forEach(function(v) { - if (rangeObj.test(v)) { - if (!max || maxSV.compare(v) === -1) { - max = v; - maxSV = new SemVer(max, options); - } + /** + * Write a chunk of raw bytes. + */ + raw(chunk) { + if (this.buf.length) { + this.chunks.push(new Uint8Array(this.buf)); + this.buf = []; } - }); - return max; - } - __name(maxSatisfying, "maxSatisfying"); - exports2.minSatisfying = minSatisfying; - function minSatisfying(versions, range, options) { - var min = null; - var minSV = null; - try { - var rangeObj = new Range(range, options); - } catch (er) { - return null; + this.chunks.push(chunk); + return this; } - versions.forEach(function(v) { - if (rangeObj.test(v)) { - if (!min || minSV.compare(v) === 1) { - min = v; - minSV = new SemVer(min, options); - } + /** + * Write a `uint32` value, an unsigned 32 bit varint. + */ + uint32(value) { + assert_1.assertUInt32(value); + while (value > 127) { + this.buf.push(value & 127 | 128); + value = value >>> 7; } - }); - return min; - } - __name(minSatisfying, "minSatisfying"); - exports2.minVersion = minVersion; - function minVersion(range, loose) { - range = new Range(range, loose); - var minver = new SemVer("0.0.0"); - if (range.test(minver)) { - return minver; + this.buf.push(value); + return this; } - minver = new SemVer("0.0.0-0"); - if (range.test(minver)) { - return minver; + /** + * Write a `int32` value, a signed 32 bit varint. + */ + int32(value) { + assert_1.assertInt32(value); + goog_varint_1.varint32write(value, this.buf); + return this; } - minver = null; - for (var i2 = 0; i2 < range.set.length; ++i2) { - var comparators = range.set[i2]; - comparators.forEach(function(comparator) { - var compver = new SemVer(comparator.semver.version); - switch (comparator.operator) { - case ">": - if (compver.prerelease.length === 0) { - compver.patch++; - } else { - compver.prerelease.push(0); - } - compver.raw = compver.format(); - /* fallthrough */ - case "": - case ">=": - if (!minver || gt(minver, compver)) { - minver = compver; - } - break; - case "<": - case "<=": - break; - /* istanbul ignore next */ - default: - throw new Error("Unexpected operation: " + comparator.operator); - } - }); + /** + * Write a `bool` value, a variant. + */ + bool(value) { + this.buf.push(value ? 1 : 0); + return this; } - if (minver && range.test(minver)) { - return minver; + /** + * Write a `bytes` value, length-delimited arbitrary data. + */ + bytes(value) { + this.uint32(value.byteLength); + return this.raw(value); } - return null; - } - __name(minVersion, "minVersion"); - exports2.validRange = validRange; - function validRange(range, options) { - try { - return new Range(range, options).range || "*"; - } catch (er) { - return null; + /** + * Write a `string` value, length-delimited data converted to UTF-8 text. + */ + string(value) { + let chunk = this.textEncoder.encode(value); + this.uint32(chunk.byteLength); + return this.raw(chunk); } - } - __name(validRange, "validRange"); - exports2.ltr = ltr; - function ltr(version4, range, options) { - return outside(version4, range, "<", options); - } - __name(ltr, "ltr"); - exports2.gtr = gtr; - function gtr(version4, range, options) { - return outside(version4, range, ">", options); - } - __name(gtr, "gtr"); - exports2.outside = outside; - function outside(version4, range, hilo, options) { - version4 = new SemVer(version4, options); - range = new Range(range, options); - var gtfn, ltefn, ltfn, comp, ecomp; - switch (hilo) { - case ">": - gtfn = gt; - ltefn = lte; - ltfn = lt; - comp = ">"; - ecomp = ">="; - break; - case "<": - gtfn = lt; - ltefn = gte; - ltfn = gt; - comp = "<"; - ecomp = "<="; - break; - default: - throw new TypeError('Must provide a hilo val of "<" or ">"'); + /** + * Write a `float` value, 32-bit floating point number. + */ + float(value) { + assert_1.assertFloat32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setFloat32(0, value, true); + return this.raw(chunk); } - if (satisfies(version4, range, options)) { - return false; + /** + * Write a `double` value, a 64-bit floating point number. + */ + double(value) { + let chunk = new Uint8Array(8); + new DataView(chunk.buffer).setFloat64(0, value, true); + return this.raw(chunk); } - for (var i2 = 0; i2 < range.set.length; ++i2) { - var comparators = range.set[i2]; - var high = null; - var low = null; - comparators.forEach(function(comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator(">=0.0.0"); - } - high = high || comparator; - low = low || comparator; - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator; - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator; - } - }); - if (high.operator === comp || high.operator === ecomp) { - return false; - } - if ((!low.operator || low.operator === comp) && ltefn(version4, low.semver)) { - return false; - } else if (low.operator === ecomp && ltfn(version4, low.semver)) { - return false; - } + /** + * Write a `fixed32` value, an unsigned, fixed-length 32-bit integer. + */ + fixed32(value) { + assert_1.assertUInt32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setUint32(0, value, true); + return this.raw(chunk); } - return true; - } - __name(outside, "outside"); - exports2.prerelease = prerelease; - function prerelease(version4, options) { - var parsed = parse3(version4, options); - return parsed && parsed.prerelease.length ? parsed.prerelease : null; - } - __name(prerelease, "prerelease"); - exports2.intersects = intersects; - function intersects(r1, r2, options) { - r1 = new Range(r1, options); - r2 = new Range(r2, options); - return r1.intersects(r2); - } - __name(intersects, "intersects"); - exports2.coerce = coerce; - function coerce(version4, options) { - if (version4 instanceof SemVer) { - return version4; + /** + * Write a `sfixed32` value, a signed, fixed-length 32-bit integer. + */ + sfixed32(value) { + assert_1.assertInt32(value); + let chunk = new Uint8Array(4); + new DataView(chunk.buffer).setInt32(0, value, true); + return this.raw(chunk); } - if (typeof version4 === "number") { - version4 = String(version4); + /** + * Write a `sint32` value, a signed, zigzag-encoded 32-bit varint. + */ + sint32(value) { + assert_1.assertInt32(value); + value = (value << 1 ^ value >> 31) >>> 0; + goog_varint_1.varint32write(value, this.buf); + return this; } - if (typeof version4 !== "string") { - return null; + /** + * Write a `fixed64` value, a signed, fixed-length 64-bit integer. + */ + sfixed64(value) { + let chunk = new Uint8Array(8); + let view = new DataView(chunk.buffer); + let long = pb_long_1.PbLong.from(value); + view.setInt32(0, long.lo, true); + view.setInt32(4, long.hi, true); + return this.raw(chunk); } - options = options || {}; - var match = null; - if (!options.rtl) { - match = version4.match(safeRe[t.COERCE]); - } else { - var next; - while ((next = safeRe[t.COERCERTL].exec(version4)) && (!match || match.index + match[0].length !== version4.length)) { - if (!match || next.index + next[0].length !== match.index + match[0].length) { - match = next; - } - safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length; - } - safeRe[t.COERCERTL].lastIndex = -1; + /** + * Write a `fixed64` value, an unsigned, fixed-length 64 bit integer. + */ + fixed64(value) { + let chunk = new Uint8Array(8); + let view = new DataView(chunk.buffer); + let long = pb_long_1.PbULong.from(value); + view.setInt32(0, long.lo, true); + view.setInt32(4, long.hi, true); + return this.raw(chunk); } - if (match === null) { - return null; + /** + * Write a `int64` value, a signed 64-bit varint. + */ + int64(value) { + let long = pb_long_1.PbLong.from(value); + goog_varint_1.varint64write(long.lo, long.hi, this.buf); + return this; } - return parse3(match[2] + "." + (match[3] || "0") + "." + (match[4] || "0"), options); - } - __name(coerce, "coerce"); + /** + * Write a `sint64` value, a signed, zig-zag-encoded 64-bit varint. + */ + sint64(value) { + let long = pb_long_1.PbLong.from(value), sign = long.hi >> 31, lo = long.lo << 1 ^ sign, hi = (long.hi << 1 | long.lo >>> 31) ^ sign; + goog_varint_1.varint64write(lo, hi, this.buf); + return this; + } + /** + * Write a `uint64` value, an unsigned 64-bit varint. + */ + uint64(value) { + let long = pb_long_1.PbULong.from(value); + goog_varint_1.varint64write(long.lo, long.hi, this.buf); + return this; + } + }; + exports2.BinaryWriter = BinaryWriter; } }); -// ../node_modules/@actions/tool-cache/lib/manifest.js -var require_manifest = __commonJS({ - "../node_modules/@actions/tool-cache/lib/manifest.js"(exports2, module2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js +var require_json_format_contract = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js"(exports2) { "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.mergeJsonOptions = exports2.jsonWriteOptions = exports2.jsonReadOptions = void 0; + var defaultsWrite = { + emitDefaultValues: false, + enumAsInteger: false, + useProtoFieldName: false, + prettySpaces: 0 }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); + var defaultsRead = { + ignoreUnknownFields: false }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2._readLinuxVersionFile = exports2._getOsVersion = exports2._findMatch = void 0; - var semver = __importStar3(require_semver2()); - var core_1 = require_core(); - var os = require("os"); - var cp = require("child_process"); - var fs2 = require("fs"); - function _findMatch(versionSpec, stable, candidates, archFilter) { - return __awaiter3(this, void 0, void 0, function* () { - const platFilter = os.platform(); - let result; - let match; - let file; - for (const candidate of candidates) { - const version4 = candidate.version; - core_1.debug(`check ${version4} satisfies ${versionSpec}`); - if (semver.satisfies(version4, versionSpec) && (!stable || candidate.stable === stable)) { - file = candidate.files.find((item) => { - core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`); - let chk = item.arch === archFilter && item.platform === platFilter; - if (chk && item.platform_version) { - const osVersion = module2.exports._getOsVersion(); - if (osVersion === item.platform_version) { - chk = true; - } else { - chk = semver.satisfies(osVersion, item.platform_version); - } - } - return chk; - }); - if (file) { - core_1.debug(`matched ${candidate.version}`); - match = candidate; - break; - } - } - } - if (match && file) { - result = Object.assign({}, match); - result.files = [file]; - } - return result; - }); + function jsonReadOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; } - __name(_findMatch, "_findMatch"); - exports2._findMatch = _findMatch; - function _getOsVersion() { - const plat = os.platform(); - let version4 = ""; - if (plat === "darwin") { - version4 = cp.execSync("sw_vers -productVersion").toString(); - } else if (plat === "linux") { - const lsbContents = module2.exports._readLinuxVersionFile(); - if (lsbContents) { - const lines = lsbContents.split("\n"); - for (const line of lines) { - const parts = line.split("="); - if (parts.length === 2 && (parts[0].trim() === "VERSION_ID" || parts[0].trim() === "DISTRIB_RELEASE")) { - version4 = parts[1].trim().replace(/^"/, "").replace(/"$/, ""); - break; - } - } - } - } - return version4; + __name(jsonReadOptions, "jsonReadOptions"); + exports2.jsonReadOptions = jsonReadOptions; + function jsonWriteOptions(options) { + return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; } - __name(_getOsVersion, "_getOsVersion"); - exports2._getOsVersion = _getOsVersion; - function _readLinuxVersionFile() { - const lsbReleaseFile = "/etc/lsb-release"; - const osReleaseFile = "/etc/os-release"; - let contents = ""; - if (fs2.existsSync(lsbReleaseFile)) { - contents = fs2.readFileSync(lsbReleaseFile).toString(); - } else if (fs2.existsSync(osReleaseFile)) { - contents = fs2.readFileSync(osReleaseFile).toString(); - } - return contents; + __name(jsonWriteOptions, "jsonWriteOptions"); + exports2.jsonWriteOptions = jsonWriteOptions; + function mergeJsonOptions(a, b) { + var _a, _b; + let c = Object.assign(Object.assign({}, a), b); + c.typeRegistry = [...(_a = a === null || a === void 0 ? void 0 : a.typeRegistry) !== null && _a !== void 0 ? _a : [], ...(_b = b === null || b === void 0 ? void 0 : b.typeRegistry) !== null && _b !== void 0 ? _b : []]; + return c; } - __name(_readLinuxVersionFile, "_readLinuxVersionFile"); - exports2._readLinuxVersionFile = _readLinuxVersionFile; + __name(mergeJsonOptions, "mergeJsonOptions"); + exports2.mergeJsonOptions = mergeJsonOptions; } }); -// ../node_modules/@actions/tool-cache/lib/retry-helper.js -var require_retry_helper = __commonJS({ - "../node_modules/@actions/tool-cache/lib/retry-helper.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js +var require_message_type_contract = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js"(exports2) { "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RetryHelper = void 0; - var core2 = __importStar3(require_core()); - var RetryHelper = class { - static { - __name(this, "RetryHelper"); - } - constructor(maxAttempts, minSeconds, maxSeconds) { - if (maxAttempts < 1) { - throw new Error("max attempts should be greater than or equal to 1"); - } - this.maxAttempts = maxAttempts; - this.minSeconds = Math.floor(minSeconds); - this.maxSeconds = Math.floor(maxSeconds); - if (this.minSeconds > this.maxSeconds) { - throw new Error("min seconds should be less than or equal to max seconds"); - } - } - execute(action, isRetryable) { - return __awaiter3(this, void 0, void 0, function* () { - let attempt = 1; - while (attempt < this.maxAttempts) { - try { - return yield action(); - } catch (err) { - if (isRetryable && !isRetryable(err)) { - throw err; - } - core2.info(err.message); - } - const seconds = this.getSleepAmount(); - core2.info(`Waiting ${seconds} seconds before trying again`); - yield this.sleep(seconds); - attempt++; - } - return yield action(); - }); - } - getSleepAmount() { - return Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) + this.minSeconds; - } - sleep(seconds) { - return __awaiter3(this, void 0, void 0, function* () { - return new Promise((resolve) => setTimeout(resolve, seconds * 1e3)); - }); - } - }; - exports2.RetryHelper = RetryHelper; + exports2.MESSAGE_TYPE = void 0; + exports2.MESSAGE_TYPE = Symbol.for("protobuf-ts/message-type"); } }); -// ../node_modules/@actions/tool-cache/lib/tool-cache.js -var require_tool_cache = __commonJS({ - "../node_modules/@actions/tool-cache/lib/tool-cache.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js +var require_lower_camel_case = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js"(exports2) { "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; - var core2 = __importStar3(require_core()); - var io2 = __importStar3(require_io()); - var fs2 = __importStar3(require("fs")); - var mm = __importStar3(require_manifest()); - var os = __importStar3(require("os")); - var path2 = __importStar3(require("path")); - var httpm = __importStar3(require_lib()); - var semver = __importStar3(require_semver2()); - var stream = __importStar3(require("stream")); - var util = __importStar3(require("util")); - var assert_1 = require("assert"); - var v4_1 = __importDefault2(require_v4()); - var exec_1 = require_exec(); - var retry_helper_1 = require_retry_helper(); - var HTTPError = class extends Error { - static { - __name(this, "HTTPError"); - } - constructor(httpStatusCode) { - super(`Unexpected HTTP response: ${httpStatusCode}`); - this.httpStatusCode = httpStatusCode; - Object.setPrototypeOf(this, new.target.prototype); - } - }; - exports2.HTTPError = HTTPError; - var IS_WINDOWS = process.platform === "win32"; - var IS_MAC = process.platform === "darwin"; - var userAgent = "actions/tool-cache"; - function downloadTool(url, dest, auth, headers) { - return __awaiter3(this, void 0, void 0, function* () { - dest = dest || path2.join(_getTempDirectory(), v4_1.default()); - yield io2.mkdirP(path2.dirname(dest)); - core2.debug(`Downloading ${url}`); - core2.debug(`Destination ${dest}`); - const maxAttempts = 3; - const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); - const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); - const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds); - return yield retryHelper.execute(() => __awaiter3(this, void 0, void 0, function* () { - return yield downloadToolAttempt(url, dest || "", auth, headers); - }), (err) => { - if (err instanceof HTTPError && err.httpStatusCode) { - if (err.httpStatusCode < 500 && err.httpStatusCode !== 408 && err.httpStatusCode !== 429) { - return false; - } - } - return true; - }); - }); - } - __name(downloadTool, "downloadTool"); - exports2.downloadTool = downloadTool; - function downloadToolAttempt(url, dest, auth, headers) { - return __awaiter3(this, void 0, void 0, function* () { - if (fs2.existsSync(dest)) { - throw new Error(`Destination file path ${dest} already exists`); - } - const http = new httpm.HttpClient(userAgent, [], { - allowRetries: false - }); - if (auth) { - core2.debug("set auth"); - if (headers === void 0) { - headers = {}; - } - headers.authorization = auth; - } - const response = yield http.get(url, headers); - if (response.message.statusCode !== 200) { - const err = new HTTPError(response.message.statusCode); - core2.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); - throw err; - } - const pipeline = util.promisify(stream.pipeline); - const responseMessageFactory = _getGlobal("TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY", () => response.message); - const readStream = responseMessageFactory(); - let succeeded = false; - try { - yield pipeline(readStream, fs2.createWriteStream(dest)); - core2.debug("download complete"); - succeeded = true; - return dest; - } finally { - if (!succeeded) { - core2.debug("download failed"); - try { - yield io2.rmRF(dest); - } catch (err) { - core2.debug(`Failed to delete '${dest}'. ${err.message}`); - } - } - } - }); - } - __name(downloadToolAttempt, "downloadToolAttempt"); - function extract7z(file, dest, _7zPath) { - return __awaiter3(this, void 0, void 0, function* () { - assert_1.ok(IS_WINDOWS, "extract7z() not supported on current OS"); - assert_1.ok(file, 'parameter "file" is required'); - dest = yield _createExtractFolder(dest); - const originalCwd = process.cwd(); - process.chdir(dest); - if (_7zPath) { - try { - const logLevel = core2.isDebug() ? "-bb1" : "-bb0"; - const args = [ - "x", - logLevel, - "-bd", - "-sccUTF-8", - file - ]; - const options = { - silent: true - }; - yield exec_1.exec(`"${_7zPath}"`, args, options); - } finally { - process.chdir(originalCwd); - } - } else { - const escapedScript = path2.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); - const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); - const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); - const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; - const args = [ - "-NoLogo", - "-Sta", - "-NoProfile", - "-NonInteractive", - "-ExecutionPolicy", - "Unrestricted", - "-Command", - command - ]; - const options = { - silent: true - }; - try { - const powershellPath = yield io2.which("powershell", true); - yield exec_1.exec(`"${powershellPath}"`, args, options); - } finally { - process.chdir(originalCwd); - } - } - return dest; - }); - } - __name(extract7z, "extract7z"); - exports2.extract7z = extract7z; - function extractTar(file, dest, flags = "xz") { - return __awaiter3(this, void 0, void 0, function* () { - if (!file) { - throw new Error("parameter 'file' is required"); - } - dest = yield _createExtractFolder(dest); - core2.debug("Checking tar --version"); - let versionOutput = ""; - yield exec_1.exec("tar --version", [], { - ignoreReturnCode: true, - silent: true, - listeners: { - stdout: /* @__PURE__ */ __name((data) => versionOutput += data.toString(), "stdout"), - stderr: /* @__PURE__ */ __name((data) => versionOutput += data.toString(), "stderr") - } - }); - core2.debug(versionOutput.trim()); - const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); - let args; - if (flags instanceof Array) { - args = flags; - } else { - args = [flags]; - } - if (core2.isDebug() && !flags.includes("v")) { - args.push("-v"); - } - let destArg = dest; - let fileArg = file; - if (IS_WINDOWS && isGnuTar) { - args.push("--force-local"); - destArg = dest.replace(/\\/g, "/"); - fileArg = file.replace(/\\/g, "/"); - } - if (isGnuTar) { - args.push("--warning=no-unknown-keyword"); - args.push("--overwrite"); - } - args.push("-C", destArg, "-f", fileArg); - yield exec_1.exec(`tar`, args); - return dest; - }); - } - __name(extractTar, "extractTar"); - exports2.extractTar = extractTar; - function extractXar(file, dest, flags = []) { - return __awaiter3(this, void 0, void 0, function* () { - assert_1.ok(IS_MAC, "extractXar() not supported on current OS"); - assert_1.ok(file, 'parameter "file" is required'); - dest = yield _createExtractFolder(dest); - let args; - if (flags instanceof Array) { - args = flags; - } else { - args = [flags]; - } - args.push("-x", "-C", dest, "-f", file); - if (core2.isDebug()) { - args.push("-v"); - } - const xarPath = yield io2.which("xar", true); - yield exec_1.exec(`"${xarPath}"`, _unique(args)); - return dest; - }); - } - __name(extractXar, "extractXar"); - exports2.extractXar = extractXar; - function extractZip(file, dest) { - return __awaiter3(this, void 0, void 0, function* () { - if (!file) { - throw new Error("parameter 'file' is required"); - } - dest = yield _createExtractFolder(dest); - if (IS_WINDOWS) { - yield extractZipWin(file, dest); + exports2.lowerCamelCase = void 0; + function lowerCamelCase(snakeCase) { + let capNext = false; + const sb = []; + for (let i = 0; i < snakeCase.length; i++) { + let next = snakeCase.charAt(i); + if (next == "_") { + capNext = true; + } else if (/\d/.test(next)) { + sb.push(next); + capNext = true; + } else if (capNext) { + sb.push(next.toUpperCase()); + capNext = false; + } else if (i == 0) { + sb.push(next.toLowerCase()); } else { - yield extractZipNix(file, dest); + sb.push(next); } - return dest; - }); + } + return sb.join(""); } - __name(extractZip, "extractZip"); - exports2.extractZip = extractZip; - function extractZipWin(file, dest) { - return __awaiter3(this, void 0, void 0, function* () { - const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); - const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); - const pwshPath = yield io2.which("pwsh", false); - if (pwshPath) { - const pwshCommand = [ - `$ErrorActionPreference = 'Stop' ;`, - `try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ;`, - `try { [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`, - `catch { if (($_.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ($_.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force } else { throw $_ } } ;` - ].join(" "); - const args = [ - "-NoLogo", - "-NoProfile", - "-NonInteractive", - "-ExecutionPolicy", - "Unrestricted", - "-Command", - pwshCommand - ]; - core2.debug(`Using pwsh at path: ${pwshPath}`); - yield exec_1.exec(`"${pwshPath}"`, args); - } else { - const powershellCommand = [ - `$ErrorActionPreference = 'Stop' ;`, - `try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ;`, - `if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force }`, - `else {[System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }` - ].join(" "); - const args = [ - "-NoLogo", - "-Sta", - "-NoProfile", - "-NonInteractive", - "-ExecutionPolicy", - "Unrestricted", - "-Command", - powershellCommand - ]; - const powershellPath = yield io2.which("powershell", true); - core2.debug(`Using powershell at path: ${powershellPath}`); - yield exec_1.exec(`"${powershellPath}"`, args); - } - }); + __name(lowerCamelCase, "lowerCamelCase"); + exports2.lowerCamelCase = lowerCamelCase; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js +var require_reflection_info = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.readMessageOption = exports2.readFieldOption = exports2.readFieldOptions = exports2.normalizeFieldInfo = exports2.RepeatType = exports2.LongType = exports2.ScalarType = void 0; + var lower_camel_case_1 = require_lower_camel_case(); + var ScalarType; + (function(ScalarType2) { + ScalarType2[ScalarType2["DOUBLE"] = 1] = "DOUBLE"; + ScalarType2[ScalarType2["FLOAT"] = 2] = "FLOAT"; + ScalarType2[ScalarType2["INT64"] = 3] = "INT64"; + ScalarType2[ScalarType2["UINT64"] = 4] = "UINT64"; + ScalarType2[ScalarType2["INT32"] = 5] = "INT32"; + ScalarType2[ScalarType2["FIXED64"] = 6] = "FIXED64"; + ScalarType2[ScalarType2["FIXED32"] = 7] = "FIXED32"; + ScalarType2[ScalarType2["BOOL"] = 8] = "BOOL"; + ScalarType2[ScalarType2["STRING"] = 9] = "STRING"; + ScalarType2[ScalarType2["BYTES"] = 12] = "BYTES"; + ScalarType2[ScalarType2["UINT32"] = 13] = "UINT32"; + ScalarType2[ScalarType2["SFIXED32"] = 15] = "SFIXED32"; + ScalarType2[ScalarType2["SFIXED64"] = 16] = "SFIXED64"; + ScalarType2[ScalarType2["SINT32"] = 17] = "SINT32"; + ScalarType2[ScalarType2["SINT64"] = 18] = "SINT64"; + })(ScalarType = exports2.ScalarType || (exports2.ScalarType = {})); + var LongType; + (function(LongType2) { + LongType2[LongType2["BIGINT"] = 0] = "BIGINT"; + LongType2[LongType2["STRING"] = 1] = "STRING"; + LongType2[LongType2["NUMBER"] = 2] = "NUMBER"; + })(LongType = exports2.LongType || (exports2.LongType = {})); + var RepeatType; + (function(RepeatType2) { + RepeatType2[RepeatType2["NO"] = 0] = "NO"; + RepeatType2[RepeatType2["PACKED"] = 1] = "PACKED"; + RepeatType2[RepeatType2["UNPACKED"] = 2] = "UNPACKED"; + })(RepeatType = exports2.RepeatType || (exports2.RepeatType = {})); + function normalizeFieldInfo(field) { + var _a, _b, _c, _d; + field.localName = (_a = field.localName) !== null && _a !== void 0 ? _a : lower_camel_case_1.lowerCamelCase(field.name); + field.jsonName = (_b = field.jsonName) !== null && _b !== void 0 ? _b : lower_camel_case_1.lowerCamelCase(field.name); + field.repeat = (_c = field.repeat) !== null && _c !== void 0 ? _c : RepeatType.NO; + field.opt = (_d = field.opt) !== null && _d !== void 0 ? _d : field.repeat ? false : field.oneof ? false : field.kind == "message"; + return field; } - __name(extractZipWin, "extractZipWin"); - function extractZipNix(file, dest) { - return __awaiter3(this, void 0, void 0, function* () { - const unzipPath = yield io2.which("unzip", true); - const args = [file]; - if (!core2.isDebug()) { - args.unshift("-q"); - } - args.unshift("-o"); - yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest }); - }); + __name(normalizeFieldInfo, "normalizeFieldInfo"); + exports2.normalizeFieldInfo = normalizeFieldInfo; + function readFieldOptions(messageType, fieldName, extensionName, extensionType) { + var _a; + const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; + return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; } - __name(extractZipNix, "extractZipNix"); - function cacheDir(sourceDir, tool, version4, arch) { - return __awaiter3(this, void 0, void 0, function* () { - version4 = semver.clean(version4) || version4; - arch = arch || os.arch(); - core2.debug(`Caching tool ${tool} ${version4} ${arch}`); - core2.debug(`source dir: ${sourceDir}`); - if (!fs2.statSync(sourceDir).isDirectory()) { - throw new Error("sourceDir is not a directory"); - } - const destPath = yield _createToolPath(tool, version4, arch); - for (const itemName of fs2.readdirSync(sourceDir)) { - const s = path2.join(sourceDir, itemName); - yield io2.cp(s, destPath, { recursive: true }); - } - _completeToolPath(tool, version4, arch); - return destPath; - }); + __name(readFieldOptions, "readFieldOptions"); + exports2.readFieldOptions = readFieldOptions; + function readFieldOption(messageType, fieldName, extensionName, extensionType) { + var _a; + const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; + if (!options) { + return void 0; + } + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; + } + return extensionType ? extensionType.fromJson(optionVal) : optionVal; } - __name(cacheDir, "cacheDir"); - exports2.cacheDir = cacheDir; - function cacheFile(sourceFile, targetFile, tool, version4, arch) { - return __awaiter3(this, void 0, void 0, function* () { - version4 = semver.clean(version4) || version4; - arch = arch || os.arch(); - core2.debug(`Caching tool ${tool} ${version4} ${arch}`); - core2.debug(`source file: ${sourceFile}`); - if (!fs2.statSync(sourceFile).isFile()) { - throw new Error("sourceFile is not a file"); - } - const destFolder = yield _createToolPath(tool, version4, arch); - const destPath = path2.join(destFolder, targetFile); - core2.debug(`destination file ${destPath}`); - yield io2.cp(sourceFile, destPath); - _completeToolPath(tool, version4, arch); - return destFolder; - }); + __name(readFieldOption, "readFieldOption"); + exports2.readFieldOption = readFieldOption; + function readMessageOption(messageType, extensionName, extensionType) { + const options = messageType.options; + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; + } + return extensionType ? extensionType.fromJson(optionVal) : optionVal; } - __name(cacheFile, "cacheFile"); - exports2.cacheFile = cacheFile; - function find(toolName, versionSpec, arch) { - if (!toolName) { - throw new Error("toolName parameter is required"); + __name(readMessageOption, "readMessageOption"); + exports2.readMessageOption = readMessageOption; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js +var require_oneof = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.getSelectedOneofValue = exports2.clearOneofValue = exports2.setUnknownOneofValue = exports2.setOneofValue = exports2.getOneofValue = exports2.isOneofGroup = void 0; + function isOneofGroup(any) { + if (typeof any != "object" || any === null || !any.hasOwnProperty("oneofKind")) { + return false; } - if (!versionSpec) { - throw new Error("versionSpec parameter is required"); + switch (typeof any.oneofKind) { + case "string": + if (any[any.oneofKind] === void 0) + return false; + return Object.keys(any).length == 2; + case "undefined": + return Object.keys(any).length == 1; + default: + return false; } - arch = arch || os.arch(); - if (!isExplicitVersion(versionSpec)) { - const localVersions = findAllVersions(toolName, arch); - const match = evaluateVersions(localVersions, versionSpec); - versionSpec = match; + } + __name(isOneofGroup, "isOneofGroup"); + exports2.isOneofGroup = isOneofGroup; + function getOneofValue(oneof, kind) { + return oneof[kind]; + } + __name(getOneofValue, "getOneofValue"); + exports2.getOneofValue = getOneofValue; + function setOneofValue(oneof, kind, value) { + if (oneof.oneofKind !== void 0) { + delete oneof[oneof.oneofKind]; } - let toolPath = ""; - if (versionSpec) { - versionSpec = semver.clean(versionSpec) || ""; - const cachePath = path2.join(_getCacheDirectory(), toolName, versionSpec, arch); - core2.debug(`checking cache: ${cachePath}`); - if (fs2.existsSync(cachePath) && fs2.existsSync(`${cachePath}.complete`)) { - core2.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); - toolPath = cachePath; - } else { - core2.debug("not found"); - } + oneof.oneofKind = kind; + if (value !== void 0) { + oneof[kind] = value; } - return toolPath; } - __name(find, "find"); - exports2.find = find; - function findAllVersions(toolName, arch) { - const versions = []; - arch = arch || os.arch(); - const toolPath = path2.join(_getCacheDirectory(), toolName); - if (fs2.existsSync(toolPath)) { - const children2 = fs2.readdirSync(toolPath); - for (const child of children2) { - if (isExplicitVersion(child)) { - const fullPath = path2.join(toolPath, child, arch || ""); - if (fs2.existsSync(fullPath) && fs2.existsSync(`${fullPath}.complete`)) { - versions.push(child); + __name(setOneofValue, "setOneofValue"); + exports2.setOneofValue = setOneofValue; + function setUnknownOneofValue(oneof, kind, value) { + if (oneof.oneofKind !== void 0) { + delete oneof[oneof.oneofKind]; + } + oneof.oneofKind = kind; + if (value !== void 0 && kind !== void 0) { + oneof[kind] = value; + } + } + __name(setUnknownOneofValue, "setUnknownOneofValue"); + exports2.setUnknownOneofValue = setUnknownOneofValue; + function clearOneofValue(oneof) { + if (oneof.oneofKind !== void 0) { + delete oneof[oneof.oneofKind]; + } + oneof.oneofKind = void 0; + } + __name(clearOneofValue, "clearOneofValue"); + exports2.clearOneofValue = clearOneofValue; + function getSelectedOneofValue(oneof) { + if (oneof.oneofKind === void 0) { + return void 0; + } + return oneof[oneof.oneofKind]; + } + __name(getSelectedOneofValue, "getSelectedOneofValue"); + exports2.getSelectedOneofValue = getSelectedOneofValue; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js +var require_reflection_type_check = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ReflectionTypeCheck = void 0; + var reflection_info_1 = require_reflection_info(); + var oneof_1 = require_oneof(); + var ReflectionTypeCheck = class { + static { + __name(this, "ReflectionTypeCheck"); + } + constructor(info) { + var _a; + this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; + } + prepare() { + if (this.data) + return; + const req = [], known = [], oneofs = []; + for (let field of this.fields) { + if (field.oneof) { + if (!oneofs.includes(field.oneof)) { + oneofs.push(field.oneof); + req.push(field.oneof); + known.push(field.oneof); + } + } else { + known.push(field.localName); + switch (field.kind) { + case "scalar": + case "enum": + if (!field.opt || field.repeat) + req.push(field.localName); + break; + case "message": + if (field.repeat) + req.push(field.localName); + break; + case "map": + req.push(field.localName); + break; } } } + this.data = { req, known, oneofs: Object.values(oneofs) }; } - return versions; - } - __name(findAllVersions, "findAllVersions"); - exports2.findAllVersions = findAllVersions; - function getManifestFromRepo(owner, repo, auth, branch = "master") { - return __awaiter3(this, void 0, void 0, function* () { - let releases = []; - const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`; - const http = new httpm.HttpClient("tool-cache"); - const headers = {}; - if (auth) { - core2.debug("set auth"); - headers.authorization = auth; + /** + * Is the argument a valid message as specified by the + * reflection information? + * + * Checks all field types recursively. The `depth` + * specifies how deep into the structure the check will be. + * + * With a depth of 0, only the presence of fields + * is checked. + * + * With a depth of 1 or more, the field types are checked. + * + * With a depth of 2 or more, the members of map, repeated + * and message fields are checked. + * + * Message fields will be checked recursively with depth - 1. + * + * The number of map entries / repeated values being checked + * is < depth. + */ + is(message, depth, allowExcessProperties = false) { + if (depth < 0) + return true; + if (message === null || message === void 0 || typeof message != "object") + return false; + this.prepare(); + let keys = Object.keys(message), data = this.data; + if (keys.length < data.req.length || data.req.some((n) => !keys.includes(n))) + return false; + if (!allowExcessProperties) { + if (keys.some((k) => !data.known.includes(k))) + return false; } - const response = yield http.getJson(treeUrl, headers); - if (!response.result) { - return releases; + if (depth < 1) { + return true; } - let manifestUrl = ""; - for (const item of response.result.tree) { - if (item.path === "versions-manifest.json") { - manifestUrl = item.url; + for (const name of data.oneofs) { + const group = message[name]; + if (!oneof_1.isOneofGroup(group)) + return false; + if (group.oneofKind === void 0) + continue; + const field = this.fields.find((f) => f.localName === group.oneofKind); + if (!field) + return false; + if (!this.field(group[group.oneofKind], field, allowExcessProperties, depth)) + return false; + } + for (const field of this.fields) { + if (field.oneof !== void 0) + continue; + if (!this.field(message[field.localName], field, allowExcessProperties, depth)) + return false; + } + return true; + } + field(arg, field, allowExcessProperties, depth) { + let repeated = field.repeat; + switch (field.kind) { + case "scalar": + if (arg === void 0) + return field.opt; + if (repeated) + return this.scalars(arg, field.T, depth, field.L); + return this.scalar(arg, field.T, field.L); + case "enum": + if (arg === void 0) + return field.opt; + if (repeated) + return this.scalars(arg, reflection_info_1.ScalarType.INT32, depth); + return this.scalar(arg, reflection_info_1.ScalarType.INT32); + case "message": + if (arg === void 0) + return true; + if (repeated) + return this.messages(arg, field.T(), allowExcessProperties, depth); + return this.message(arg, field.T(), allowExcessProperties, depth); + case "map": + if (typeof arg != "object" || arg === null) + return false; + if (depth < 2) + return true; + if (!this.mapKeys(arg, field.K, depth)) + return false; + switch (field.V.kind) { + case "scalar": + return this.scalars(Object.values(arg), field.V.T, depth, field.V.L); + case "enum": + return this.scalars(Object.values(arg), reflection_info_1.ScalarType.INT32, depth); + case "message": + return this.messages(Object.values(arg), field.V.T(), allowExcessProperties, depth); + } break; - } } - headers["accept"] = "application/vnd.github.VERSION.raw"; - let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody(); - if (versionsRaw) { - versionsRaw = versionsRaw.replace(/^\uFEFF/, ""); - try { - releases = JSON.parse(versionsRaw); - } catch (_a) { - core2.debug("Invalid json"); - } + return true; + } + message(arg, type, allowExcessProperties, depth) { + if (allowExcessProperties) { + return type.isAssignable(arg, depth); } - return releases; - }); - } - __name(getManifestFromRepo, "getManifestFromRepo"); - exports2.getManifestFromRepo = getManifestFromRepo; - function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) { - return __awaiter3(this, void 0, void 0, function* () { - const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter); - return match; - }); - } - __name(findFromManifest, "findFromManifest"); - exports2.findFromManifest = findFromManifest; - function _createExtractFolder(dest) { - return __awaiter3(this, void 0, void 0, function* () { - if (!dest) { - dest = path2.join(_getTempDirectory(), v4_1.default()); + return type.is(arg, depth); + } + messages(arg, type, allowExcessProperties, depth) { + if (!Array.isArray(arg)) + return false; + if (depth < 2) + return true; + if (allowExcessProperties) { + for (let i = 0; i < arg.length && i < depth; i++) + if (!type.isAssignable(arg[i], depth - 1)) + return false; + } else { + for (let i = 0; i < arg.length && i < depth; i++) + if (!type.is(arg[i], depth - 1)) + return false; } - yield io2.mkdirP(dest); - return dest; - }); - } - __name(_createExtractFolder, "_createExtractFolder"); - function _createToolPath(tool, version4, arch) { - return __awaiter3(this, void 0, void 0, function* () { - const folderPath = path2.join(_getCacheDirectory(), tool, semver.clean(version4) || version4, arch || ""); - core2.debug(`destination ${folderPath}`); - const markerPath = `${folderPath}.complete`; - yield io2.rmRF(folderPath); - yield io2.rmRF(markerPath); - yield io2.mkdirP(folderPath); - return folderPath; - }); - } - __name(_createToolPath, "_createToolPath"); - function _completeToolPath(tool, version4, arch) { - const folderPath = path2.join(_getCacheDirectory(), tool, semver.clean(version4) || version4, arch || ""); - const markerPath = `${folderPath}.complete`; - fs2.writeFileSync(markerPath, ""); - core2.debug("finished caching tool"); - } - __name(_completeToolPath, "_completeToolPath"); - function isExplicitVersion(versionSpec) { - const c = semver.clean(versionSpec) || ""; - core2.debug(`isExplicit: ${c}`); - const valid = semver.valid(c) != null; - core2.debug(`explicit? ${valid}`); - return valid; - } - __name(isExplicitVersion, "isExplicitVersion"); - exports2.isExplicitVersion = isExplicitVersion; - function evaluateVersions(versions, versionSpec) { - let version4 = ""; - core2.debug(`evaluating ${versions.length} versions`); - versions = versions.sort((a, b) => { - if (semver.gt(a, b)) { - return 1; + return true; + } + scalar(arg, type, longType) { + let argType = typeof arg; + switch (type) { + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + switch (longType) { + case reflection_info_1.LongType.BIGINT: + return argType == "bigint"; + case reflection_info_1.LongType.NUMBER: + return argType == "number" && !isNaN(arg); + default: + return argType == "string"; + } + case reflection_info_1.ScalarType.BOOL: + return argType == "boolean"; + case reflection_info_1.ScalarType.STRING: + return argType == "string"; + case reflection_info_1.ScalarType.BYTES: + return arg instanceof Uint8Array; + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + return argType == "number" && !isNaN(arg); + default: + return argType == "number" && Number.isInteger(arg); } - return -1; - }); - for (let i = versions.length - 1; i >= 0; i--) { - const potential = versions[i]; - const satisfied = semver.satisfies(potential, versionSpec); - if (satisfied) { - version4 = potential; - break; + } + scalars(arg, type, depth, longType) { + if (!Array.isArray(arg)) + return false; + if (depth < 2) + return true; + if (Array.isArray(arg)) { + for (let i = 0; i < arg.length && i < depth; i++) + if (!this.scalar(arg[i], type, longType)) + return false; } + return true; } - if (version4) { - core2.debug(`matched: ${version4}`); - } else { - core2.debug("match not found"); + mapKeys(map, type, depth) { + let keys = Object.keys(map); + switch (type) { + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + case reflection_info_1.ScalarType.UINT32: + return this.scalars(keys.slice(0, depth).map((k) => parseInt(k)), type, depth); + case reflection_info_1.ScalarType.BOOL: + return this.scalars(keys.slice(0, depth).map((k) => k == "true" ? true : k == "false" ? false : k), type, depth); + default: + return this.scalars(keys, type, depth, reflection_info_1.LongType.STRING); + } } - return version4; - } - __name(evaluateVersions, "evaluateVersions"); - exports2.evaluateVersions = evaluateVersions; - function _getCacheDirectory() { - const cacheDirectory = process.env["RUNNER_TOOL_CACHE"] || ""; - assert_1.ok(cacheDirectory, "Expected RUNNER_TOOL_CACHE to be defined"); - return cacheDirectory; - } - __name(_getCacheDirectory, "_getCacheDirectory"); - function _getTempDirectory() { - const tempDirectory = process.env["RUNNER_TEMP"] || ""; - assert_1.ok(tempDirectory, "Expected RUNNER_TEMP to be defined"); - return tempDirectory; - } - __name(_getTempDirectory, "_getTempDirectory"); - function _getGlobal(key, defaultValue) { - const value = global[key]; - return value !== void 0 ? value : defaultValue; - } - __name(_getGlobal, "_getGlobal"); - function _unique(values) { - return Array.from(new Set(values)); - } - __name(_unique, "_unique"); + }; + exports2.ReflectionTypeCheck = ReflectionTypeCheck; } }); -// ../node_modules/@actions/artifact/lib/internal/shared/config.js -var require_config = __commonJS({ - "../node_modules/@actions/artifact/lib/internal/shared/config.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js +var require_reflection_long_convert = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js"(exports2) { "use strict"; - var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { - return mod && mod.__esModule ? mod : { "default": mod }; - }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getUploadChunkTimeout = exports2.getConcurrency = exports2.getGitHubWorkspaceDir = exports2.isGhes = exports2.getResultsServiceUrl = exports2.getRuntimeToken = exports2.getUploadChunkSize = void 0; - var os_1 = __importDefault2(require("os")); - function getUploadChunkSize() { - return 8 * 1024 * 1024; - } - __name(getUploadChunkSize, "getUploadChunkSize"); - exports2.getUploadChunkSize = getUploadChunkSize; - function getRuntimeToken() { - const token = process.env["ACTIONS_RUNTIME_TOKEN"]; - if (!token) { - throw new Error("Unable to get the ACTIONS_RUNTIME_TOKEN env variable"); - } - return token; - } - __name(getRuntimeToken, "getRuntimeToken"); - exports2.getRuntimeToken = getRuntimeToken; - function getResultsServiceUrl() { - const resultsUrl = process.env["ACTIONS_RESULTS_URL"]; - if (!resultsUrl) { - throw new Error("Unable to get the ACTIONS_RESULTS_URL env variable"); - } - return new URL(resultsUrl).origin; - } - __name(getResultsServiceUrl, "getResultsServiceUrl"); - exports2.getResultsServiceUrl = getResultsServiceUrl; - function isGhes() { - const ghUrl = new URL(process.env["GITHUB_SERVER_URL"] || "https://github.com"); - const hostname = ghUrl.hostname.trimEnd().toUpperCase(); - const isGitHubHost = hostname === "GITHUB.COM"; - const isGheHost = hostname.endsWith(".GHE.COM"); - const isLocalHost = hostname.endsWith(".LOCALHOST"); - return !isGitHubHost && !isGheHost && !isLocalHost; - } - __name(isGhes, "isGhes"); - exports2.isGhes = isGhes; - function getGitHubWorkspaceDir() { - const ghWorkspaceDir = process.env["GITHUB_WORKSPACE"]; - if (!ghWorkspaceDir) { - throw new Error("Unable to get the GITHUB_WORKSPACE env variable"); - } - return ghWorkspaceDir; - } - __name(getGitHubWorkspaceDir, "getGitHubWorkspaceDir"); - exports2.getGitHubWorkspaceDir = getGitHubWorkspaceDir; - function getConcurrency() { - const numCPUs = os_1.default.cpus().length; - if (numCPUs <= 4) { - return 32; + exports2.reflectionLongConvert = void 0; + var reflection_info_1 = require_reflection_info(); + function reflectionLongConvert(long, type) { + switch (type) { + case reflection_info_1.LongType.BIGINT: + return long.toBigInt(); + case reflection_info_1.LongType.NUMBER: + return long.toNumber(); + default: + return long.toString(); } - const concurrency = 16 * numCPUs; - return concurrency > 300 ? 300 : concurrency; - } - __name(getConcurrency, "getConcurrency"); - exports2.getConcurrency = getConcurrency; - function getUploadChunkTimeout() { - return 3e5; } - __name(getUploadChunkTimeout, "getUploadChunkTimeout"); - exports2.getUploadChunkTimeout = getUploadChunkTimeout; + __name(reflectionLongConvert, "reflectionLongConvert"); + exports2.reflectionLongConvert = reflectionLongConvert; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js -var require_json_typings = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/json-typings.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js +var require_reflection_json_reader = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isJsonObject = exports2.typeofJsonValue = void 0; - function typeofJsonValue(value) { - let t = typeof value; - if (t == "object") { - if (Array.isArray(value)) - return "array"; - if (value === null) - return "null"; + exports2.ReflectionJsonReader = void 0; + var json_typings_1 = require_json_typings(); + var base64_1 = require_base642(); + var reflection_info_1 = require_reflection_info(); + var pb_long_1 = require_pb_long(); + var assert_1 = require_assert(); + var reflection_long_convert_1 = require_reflection_long_convert(); + var ReflectionJsonReader = class { + static { + __name(this, "ReflectionJsonReader"); } - return t; - } - __name(typeofJsonValue, "typeofJsonValue"); - exports2.typeofJsonValue = typeofJsonValue; - function isJsonObject(value) { - return value !== null && typeof value == "object" && !Array.isArray(value); - } - __name(isJsonObject, "isJsonObject"); - exports2.isJsonObject = isJsonObject; + constructor(info) { + this.info = info; + } + prepare() { + var _a; + if (this.fMap === void 0) { + this.fMap = {}; + const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; + for (const field of fieldsInput) { + this.fMap[field.name] = field; + this.fMap[field.jsonName] = field; + this.fMap[field.localName] = field; + } + } + } + // Cannot parse JSON for #. + assert(condition, fieldName, jsonValue) { + if (!condition) { + let what = json_typings_1.typeofJsonValue(jsonValue); + if (what == "number" || what == "boolean") + what = jsonValue.toString(); + throw new Error(`Cannot parse JSON ${what} for ${this.info.typeName}#${fieldName}`); + } + } + /** + * Reads a message from canonical JSON format into the target message. + * + * Repeated fields are appended. Map entries are added, overwriting + * existing keys. + * + * If a message field is already present, it will be merged with the + * new data. + */ + read(input, message, options) { + this.prepare(); + const oneofsHandled = []; + for (const [jsonKey, jsonValue] of Object.entries(input)) { + const field = this.fMap[jsonKey]; + if (!field) { + if (!options.ignoreUnknownFields) + throw new Error(`Found unknown field while reading ${this.info.typeName} from JSON format. JSON key: ${jsonKey}`); + continue; + } + const localName = field.localName; + let target; + if (field.oneof) { + if (jsonValue === null && (field.kind !== "enum" || field.T()[0] !== "google.protobuf.NullValue")) { + continue; + } + if (oneofsHandled.includes(field.oneof)) + throw new Error(`Multiple members of the oneof group "${field.oneof}" of ${this.info.typeName} are present in JSON.`); + oneofsHandled.push(field.oneof); + target = message[field.oneof] = { + oneofKind: localName + }; + } else { + target = message; + } + if (field.kind == "map") { + if (jsonValue === null) { + continue; + } + this.assert(json_typings_1.isJsonObject(jsonValue), field.name, jsonValue); + const fieldObj = target[localName]; + for (const [jsonObjKey, jsonObjValue] of Object.entries(jsonValue)) { + this.assert(jsonObjValue !== null, field.name + " map value", null); + let val; + switch (field.V.kind) { + case "message": + val = field.V.T().internalJsonRead(jsonObjValue, options); + break; + case "enum": + val = this.enum(field.V.T(), jsonObjValue, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + break; + case "scalar": + val = this.scalar(jsonObjValue, field.V.T, field.V.L, field.name); + break; + } + this.assert(val !== void 0, field.name + " map value", jsonObjValue); + let key = jsonObjKey; + if (field.K == reflection_info_1.ScalarType.BOOL) + key = key == "true" ? true : key == "false" ? false : key; + key = this.scalar(key, field.K, reflection_info_1.LongType.STRING, field.name).toString(); + fieldObj[key] = val; + } + } else if (field.repeat) { + if (jsonValue === null) + continue; + this.assert(Array.isArray(jsonValue), field.name, jsonValue); + const fieldArr = target[localName]; + for (const jsonItem of jsonValue) { + this.assert(jsonItem !== null, field.name, null); + let val; + switch (field.kind) { + case "message": + val = field.T().internalJsonRead(jsonItem, options); + break; + case "enum": + val = this.enum(field.T(), jsonItem, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + break; + case "scalar": + val = this.scalar(jsonItem, field.T, field.L, field.name); + break; + } + this.assert(val !== void 0, field.name, jsonValue); + fieldArr.push(val); + } + } else { + switch (field.kind) { + case "message": + if (jsonValue === null && field.T().typeName != "google.protobuf.Value") { + this.assert(field.oneof === void 0, field.name + " (oneof member)", null); + continue; + } + target[localName] = field.T().internalJsonRead(jsonValue, options, target[localName]); + break; + case "enum": + let val = this.enum(field.T(), jsonValue, field.name, options.ignoreUnknownFields); + if (val === false) + continue; + target[localName] = val; + break; + case "scalar": + target[localName] = this.scalar(jsonValue, field.T, field.L, field.name); + break; + } + } + } + } + /** + * Returns `false` for unrecognized string representations. + * + * google.protobuf.NullValue accepts only JSON `null` (or the old `"NULL_VALUE"`). + */ + enum(type, json, fieldName, ignoreUnknownFields) { + if (type[0] == "google.protobuf.NullValue") + assert_1.assert(json === null || json === "NULL_VALUE", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} only accepts null.`); + if (json === null) + return 0; + switch (typeof json) { + case "number": + assert_1.assert(Number.isInteger(json), `Unable to parse field ${this.info.typeName}#${fieldName}, enum can only be integral number, got ${json}.`); + return json; + case "string": + let localEnumName = json; + if (type[2] && json.substring(0, type[2].length) === type[2]) + localEnumName = json.substring(type[2].length); + let enumNumber = type[1][localEnumName]; + if (typeof enumNumber === "undefined" && ignoreUnknownFields) { + return false; + } + assert_1.assert(typeof enumNumber == "number", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} has no value for "${json}".`); + return enumNumber; + } + assert_1.assert(false, `Unable to parse field ${this.info.typeName}#${fieldName}, cannot parse enum value from ${typeof json}".`); + } + scalar(json, type, longType, fieldName) { + let e; + try { + switch (type) { + // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". + // Either numbers or strings are accepted. Exponent notation is also accepted. + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + if (json === null) + return 0; + if (json === "NaN") + return Number.NaN; + if (json === "Infinity") + return Number.POSITIVE_INFINITY; + if (json === "-Infinity") + return Number.NEGATIVE_INFINITY; + if (json === "") { + e = "empty string"; + break; + } + if (typeof json == "string" && json.trim().length !== json.length) { + e = "extra whitespace"; + break; + } + if (typeof json != "string" && typeof json != "number") { + break; + } + let float = Number(json); + if (Number.isNaN(float)) { + e = "not a number"; + break; + } + if (!Number.isFinite(float)) { + e = "too large or small"; + break; + } + if (type == reflection_info_1.ScalarType.FLOAT) + assert_1.assertFloat32(float); + return float; + // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + case reflection_info_1.ScalarType.UINT32: + if (json === null) + return 0; + let int32; + if (typeof json == "number") + int32 = json; + else if (json === "") + e = "empty string"; + else if (typeof json == "string") { + if (json.trim().length !== json.length) + e = "extra whitespace"; + else + int32 = Number(json); + } + if (int32 === void 0) + break; + if (type == reflection_info_1.ScalarType.UINT32) + assert_1.assertUInt32(int32); + else + assert_1.assertInt32(int32); + return int32; + // int64, fixed64, uint64: JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + if (json === null) + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); + if (typeof json != "number" && typeof json != "string") + break; + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.from(json), longType); + case reflection_info_1.ScalarType.FIXED64: + case reflection_info_1.ScalarType.UINT64: + if (json === null) + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); + if (typeof json != "number" && typeof json != "string") + break; + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.from(json), longType); + // bool: + case reflection_info_1.ScalarType.BOOL: + if (json === null) + return false; + if (typeof json !== "boolean") + break; + return json; + // string: + case reflection_info_1.ScalarType.STRING: + if (json === null) + return ""; + if (typeof json !== "string") { + e = "extra whitespace"; + break; + } + try { + encodeURIComponent(json); + } catch (e2) { + e2 = "invalid UTF8"; + break; + } + return json; + // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. + // Either standard or URL-safe base64 encoding with/without paddings are accepted. + case reflection_info_1.ScalarType.BYTES: + if (json === null || json === "") + return new Uint8Array(0); + if (typeof json !== "string") + break; + return base64_1.base64decode(json); + } + } catch (error) { + e = error.message; + } + this.assert(false, fieldName + (e ? " - " + e : ""), json); + } + }; + exports2.ReflectionJsonReader = ReflectionJsonReader; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/base64.js -var require_base642 = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/base64.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js +var require_reflection_json_writer = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.base64encode = exports2.base64decode = void 0; - var encTable = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/".split(""); - var decTable = []; - for (let i = 0; i < encTable.length; i++) - decTable[encTable[i].charCodeAt(0)] = i; - decTable["-".charCodeAt(0)] = encTable.indexOf("+"); - decTable["_".charCodeAt(0)] = encTable.indexOf("/"); - function base64decode(base64Str) { - let es = base64Str.length * 3 / 4; - if (base64Str[base64Str.length - 2] == "=") - es -= 2; - else if (base64Str[base64Str.length - 1] == "=") - es -= 1; - let bytes = new Uint8Array(es), bytePos = 0, groupPos = 0, b, p = 0; - for (let i = 0; i < base64Str.length; i++) { - b = decTable[base64Str.charCodeAt(i)]; - if (b === void 0) { - switch (base64Str[i]) { - case "=": - groupPos = 0; - // reset state when padding found - case "\n": - case "\r": - case " ": - case " ": - continue; - // skip white-space, and padding - default: - throw Error(`invalid base64 string.`); + exports2.ReflectionJsonWriter = void 0; + var base64_1 = require_base642(); + var pb_long_1 = require_pb_long(); + var reflection_info_1 = require_reflection_info(); + var assert_1 = require_assert(); + var ReflectionJsonWriter = class { + static { + __name(this, "ReflectionJsonWriter"); + } + constructor(info) { + var _a; + this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; + } + /** + * Converts the message to a JSON object, based on the field descriptors. + */ + write(message, options) { + const json = {}, source = message; + for (const field of this.fields) { + if (!field.oneof) { + let jsonValue2 = this.field(field, source[field.localName], options); + if (jsonValue2 !== void 0) + json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue2; + continue; + } + const group = source[field.oneof]; + if (group.oneofKind !== field.localName) + continue; + const opt = field.kind == "scalar" || field.kind == "enum" ? Object.assign(Object.assign({}, options), { emitDefaultValues: true }) : options; + let jsonValue = this.field(field, group[field.localName], opt); + assert_1.assert(jsonValue !== void 0); + json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; + } + return json; + } + field(field, value, options) { + let jsonValue = void 0; + if (field.kind == "map") { + assert_1.assert(typeof value == "object" && value !== null); + const jsonObj = {}; + switch (field.V.kind) { + case "scalar": + for (const [entryKey, entryValue] of Object.entries(value)) { + const val = this.scalar(field.V.T, entryValue, field.name, false, true); + assert_1.assert(val !== void 0); + jsonObj[entryKey.toString()] = val; + } + break; + case "message": + const messageType = field.V.T(); + for (const [entryKey, entryValue] of Object.entries(value)) { + const val = this.message(messageType, entryValue, field.name, options); + assert_1.assert(val !== void 0); + jsonObj[entryKey.toString()] = val; + } + break; + case "enum": + const enumInfo = field.V.T(); + for (const [entryKey, entryValue] of Object.entries(value)) { + assert_1.assert(entryValue === void 0 || typeof entryValue == "number"); + const val = this.enum(enumInfo, entryValue, field.name, false, true, options.enumAsInteger); + assert_1.assert(val !== void 0); + jsonObj[entryKey.toString()] = val; + } + break; + } + if (options.emitDefaultValues || Object.keys(jsonObj).length > 0) + jsonValue = jsonObj; + } else if (field.repeat) { + assert_1.assert(Array.isArray(value)); + const jsonArr = []; + switch (field.kind) { + case "scalar": + for (let i = 0; i < value.length; i++) { + const val = this.scalar(field.T, value[i], field.name, field.opt, true); + assert_1.assert(val !== void 0); + jsonArr.push(val); + } + break; + case "enum": + const enumInfo = field.T(); + for (let i = 0; i < value.length; i++) { + assert_1.assert(value[i] === void 0 || typeof value[i] == "number"); + const val = this.enum(enumInfo, value[i], field.name, field.opt, true, options.enumAsInteger); + assert_1.assert(val !== void 0); + jsonArr.push(val); + } + break; + case "message": + const messageType = field.T(); + for (let i = 0; i < value.length; i++) { + const val = this.message(messageType, value[i], field.name, options); + assert_1.assert(val !== void 0); + jsonArr.push(val); + } + break; + } + if (options.emitDefaultValues || jsonArr.length > 0 || options.emitDefaultValues) + jsonValue = jsonArr; + } else { + switch (field.kind) { + case "scalar": + jsonValue = this.scalar(field.T, value, field.name, field.opt, options.emitDefaultValues); + break; + case "enum": + jsonValue = this.enum(field.T(), value, field.name, field.opt, options.emitDefaultValues, options.enumAsInteger); + break; + case "message": + jsonValue = this.message(field.T(), value, field.name, options); + break; } } - switch (groupPos) { - case 0: - p = b; - groupPos = 1; - break; - case 1: - bytes[bytePos++] = p << 2 | (b & 48) >> 4; - p = b; - groupPos = 2; - break; - case 2: - bytes[bytePos++] = (p & 15) << 4 | (b & 60) >> 2; - p = b; - groupPos = 3; - break; - case 3: - bytes[bytePos++] = (p & 3) << 6 | b; - groupPos = 0; - break; - } + return jsonValue; } - if (groupPos == 1) - throw Error(`invalid base64 string.`); - return bytes.subarray(0, bytePos); - } - __name(base64decode, "base64decode"); - exports2.base64decode = base64decode; - function base64encode(bytes) { - let base64 = "", groupPos = 0, b, p = 0; - for (let i = 0; i < bytes.length; i++) { - b = bytes[i]; - switch (groupPos) { - case 0: - base64 += encTable[b >> 2]; - p = (b & 3) << 4; - groupPos = 1; - break; - case 1: - base64 += encTable[p | b >> 4]; - p = (b & 15) << 2; - groupPos = 2; - break; - case 2: - base64 += encTable[p | b >> 6]; - base64 += encTable[b & 63]; - groupPos = 0; - break; + /** + * Returns `null` as the default for google.protobuf.NullValue. + */ + enum(type, value, fieldName, optional, emitDefaultValues, enumAsInteger) { + if (type[0] == "google.protobuf.NullValue") + return !emitDefaultValues && !optional ? void 0 : null; + if (value === void 0) { + assert_1.assert(optional); + return void 0; } + if (value === 0 && !emitDefaultValues && !optional) + return void 0; + assert_1.assert(typeof value == "number"); + assert_1.assert(Number.isInteger(value)); + if (enumAsInteger || !type[1].hasOwnProperty(value)) + return value; + if (type[2]) + return type[2] + type[1][value]; + return type[1][value]; } - if (groupPos) { - base64 += encTable[p]; - base64 += "="; - if (groupPos == 1) - base64 += "="; + message(type, value, fieldName, options) { + if (value === void 0) + return options.emitDefaultValues ? null : void 0; + return type.internalJsonWrite(value, options); } - return base64; - } - __name(base64encode, "base64encode"); - exports2.base64encode = base64encode; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js -var require_protobufjs_utf8 = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/protobufjs-utf8.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.utf8read = void 0; - var fromCharCodes = /* @__PURE__ */ __name((chunk) => String.fromCharCode.apply(String, chunk), "fromCharCodes"); - function utf8read(bytes) { - if (bytes.length < 1) - return ""; - let pos = 0, parts = [], chunk = [], i = 0, t; - let len = bytes.length; - while (pos < len) { - t = bytes[pos++]; - if (t < 128) - chunk[i++] = t; - else if (t > 191 && t < 224) - chunk[i++] = (t & 31) << 6 | bytes[pos++] & 63; - else if (t > 239 && t < 365) { - t = ((t & 7) << 18 | (bytes[pos++] & 63) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63) - 65536; - chunk[i++] = 55296 + (t >> 10); - chunk[i++] = 56320 + (t & 1023); - } else - chunk[i++] = (t & 15) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63; - if (i > 8191) { - parts.push(fromCharCodes(chunk)); - i = 0; + scalar(type, value, fieldName, optional, emitDefaultValues) { + if (value === void 0) { + assert_1.assert(optional); + return void 0; + } + const ed = emitDefaultValues || optional; + switch (type) { + // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + if (value === 0) + return ed ? 0 : void 0; + assert_1.assertInt32(value); + return value; + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.UINT32: + if (value === 0) + return ed ? 0 : void 0; + assert_1.assertUInt32(value); + return value; + // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". + // Either numbers or strings are accepted. Exponent notation is also accepted. + case reflection_info_1.ScalarType.FLOAT: + assert_1.assertFloat32(value); + case reflection_info_1.ScalarType.DOUBLE: + if (value === 0) + return ed ? 0 : void 0; + assert_1.assert(typeof value == "number"); + if (Number.isNaN(value)) + return "NaN"; + if (value === Number.POSITIVE_INFINITY) + return "Infinity"; + if (value === Number.NEGATIVE_INFINITY) + return "-Infinity"; + return value; + // string: + case reflection_info_1.ScalarType.STRING: + if (value === "") + return ed ? "" : void 0; + assert_1.assert(typeof value == "string"); + return value; + // bool: + case reflection_info_1.ScalarType.BOOL: + if (value === false) + return ed ? false : void 0; + assert_1.assert(typeof value == "boolean"); + return value; + // JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); + let ulong = pb_long_1.PbULong.from(value); + if (ulong.isZero() && !ed) + return void 0; + return ulong.toString(); + // JSON value will be a decimal string. Either numbers or strings are accepted. + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); + let long = pb_long_1.PbLong.from(value); + if (long.isZero() && !ed) + return void 0; + return long.toString(); + // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. + // Either standard or URL-safe base64 encoding with/without paddings are accepted. + case reflection_info_1.ScalarType.BYTES: + assert_1.assert(value instanceof Uint8Array); + if (!value.byteLength) + return ed ? "" : void 0; + return base64_1.base64encode(value); } } - if (parts.length) { - if (i) - parts.push(fromCharCodes(chunk.slice(0, i))); - return parts.join(""); - } - return fromCharCodes(chunk.slice(0, i)); - } - __name(utf8read, "utf8read"); - exports2.utf8read = utf8read; + }; + exports2.ReflectionJsonWriter = ReflectionJsonWriter; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js -var require_binary_format_contract = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/binary-format-contract.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js +var require_reflection_scalar_default = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.WireType = exports2.mergeBinaryOptions = exports2.UnknownFieldHandler = void 0; - var UnknownFieldHandler; - (function(UnknownFieldHandler2) { - UnknownFieldHandler2.symbol = Symbol.for("protobuf-ts/unknown"); - UnknownFieldHandler2.onRead = (typeName, message, fieldNo, wireType, data) => { - let container = is(message) ? message[UnknownFieldHandler2.symbol] : message[UnknownFieldHandler2.symbol] = []; - container.push({ no: fieldNo, wireType, data }); - }; - UnknownFieldHandler2.onWrite = (typeName, message, writer) => { - for (let { no, wireType, data } of UnknownFieldHandler2.list(message)) - writer.tag(no, wireType).raw(data); - }; - UnknownFieldHandler2.list = (message, fieldNo) => { - if (is(message)) { - let all = message[UnknownFieldHandler2.symbol]; - return fieldNo ? all.filter((uf) => uf.no == fieldNo) : all; - } - return []; - }; - UnknownFieldHandler2.last = (message, fieldNo) => UnknownFieldHandler2.list(message, fieldNo).slice(-1)[0]; - const is = /* @__PURE__ */ __name((message) => message && Array.isArray(message[UnknownFieldHandler2.symbol]), "is"); - })(UnknownFieldHandler = exports2.UnknownFieldHandler || (exports2.UnknownFieldHandler = {})); - function mergeBinaryOptions(a, b) { - return Object.assign(Object.assign({}, a), b); + exports2.reflectionScalarDefault = void 0; + var reflection_info_1 = require_reflection_info(); + var reflection_long_convert_1 = require_reflection_long_convert(); + var pb_long_1 = require_pb_long(); + function reflectionScalarDefault(type, longType = reflection_info_1.LongType.STRING) { + switch (type) { + case reflection_info_1.ScalarType.BOOL: + return false; + case reflection_info_1.ScalarType.UINT64: + case reflection_info_1.ScalarType.FIXED64: + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); + case reflection_info_1.ScalarType.INT64: + case reflection_info_1.ScalarType.SFIXED64: + case reflection_info_1.ScalarType.SINT64: + return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); + case reflection_info_1.ScalarType.DOUBLE: + case reflection_info_1.ScalarType.FLOAT: + return 0; + case reflection_info_1.ScalarType.BYTES: + return new Uint8Array(0); + case reflection_info_1.ScalarType.STRING: + return ""; + default: + return 0; + } } - __name(mergeBinaryOptions, "mergeBinaryOptions"); - exports2.mergeBinaryOptions = mergeBinaryOptions; - var WireType; - (function(WireType2) { - WireType2[WireType2["Varint"] = 0] = "Varint"; - WireType2[WireType2["Bit64"] = 1] = "Bit64"; - WireType2[WireType2["LengthDelimited"] = 2] = "LengthDelimited"; - WireType2[WireType2["StartGroup"] = 3] = "StartGroup"; - WireType2[WireType2["EndGroup"] = 4] = "EndGroup"; - WireType2[WireType2["Bit32"] = 5] = "Bit32"; - })(WireType = exports2.WireType || (exports2.WireType = {})); + __name(reflectionScalarDefault, "reflectionScalarDefault"); + exports2.reflectionScalarDefault = reflectionScalarDefault; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js -var require_goog_varint = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/goog-varint.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js +var require_reflection_binary_reader = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.varint32read = exports2.varint32write = exports2.int64toString = exports2.int64fromString = exports2.varint64write = exports2.varint64read = void 0; - function varint64read() { - let lowBits = 0; - let highBits = 0; - for (let shift = 0; shift < 28; shift += 7) { - let b = this.buf[this.pos++]; - lowBits |= (b & 127) << shift; - if ((b & 128) == 0) { - this.assertBounds(); - return [lowBits, highBits]; - } - } - let middleByte = this.buf[this.pos++]; - lowBits |= (middleByte & 15) << 28; - highBits = (middleByte & 112) >> 4; - if ((middleByte & 128) == 0) { - this.assertBounds(); - return [lowBits, highBits]; + exports2.ReflectionBinaryReader = void 0; + var binary_format_contract_1 = require_binary_format_contract(); + var reflection_info_1 = require_reflection_info(); + var reflection_long_convert_1 = require_reflection_long_convert(); + var reflection_scalar_default_1 = require_reflection_scalar_default(); + var ReflectionBinaryReader = class { + static { + __name(this, "ReflectionBinaryReader"); } - for (let shift = 3; shift <= 31; shift += 7) { - let b = this.buf[this.pos++]; - highBits |= (b & 127) << shift; - if ((b & 128) == 0) { - this.assertBounds(); - return [lowBits, highBits]; - } + constructor(info) { + this.info = info; } - throw new Error("invalid varint"); - } - __name(varint64read, "varint64read"); - exports2.varint64read = varint64read; - function varint64write(lo, hi, bytes) { - for (let i = 0; i < 28; i = i + 7) { - const shift = lo >>> i; - const hasNext = !(shift >>> 7 == 0 && hi == 0); - const byte = (hasNext ? shift | 128 : shift) & 255; - bytes.push(byte); - if (!hasNext) { - return; + prepare() { + var _a; + if (!this.fieldNoToField) { + const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; + this.fieldNoToField = new Map(fieldsInput.map((field) => [field.no, field])); } } - const splitBits = lo >>> 28 & 15 | (hi & 7) << 4; - const hasMoreBits = !(hi >> 3 == 0); - bytes.push((hasMoreBits ? splitBits | 128 : splitBits) & 255); - if (!hasMoreBits) { - return; - } - for (let i = 3; i < 31; i = i + 7) { - const shift = hi >>> i; - const hasNext = !(shift >>> 7 == 0); - const byte = (hasNext ? shift | 128 : shift) & 255; - bytes.push(byte); - if (!hasNext) { - return; + /** + * Reads a message from binary format into the target message. + * + * Repeated fields are appended. Map entries are added, overwriting + * existing keys. + * + * If a message field is already present, it will be merged with the + * new data. + */ + read(reader, message, options, length) { + this.prepare(); + const end = length === void 0 ? reader.len : reader.pos + length; + while (reader.pos < end) { + const [fieldNo, wireType] = reader.tag(), field = this.fieldNoToField.get(fieldNo); + if (!field) { + let u = options.readUnknownField; + if (u == "throw") + throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.info.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? binary_format_contract_1.UnknownFieldHandler.onRead : u)(this.info.typeName, message, fieldNo, wireType, d); + continue; + } + let target = message, repeated = field.repeat, localName = field.localName; + if (field.oneof) { + target = target[field.oneof]; + if (target.oneofKind !== localName) + target = message[field.oneof] = { + oneofKind: localName + }; + } + switch (field.kind) { + case "scalar": + case "enum": + let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + let L = field.kind == "scalar" ? field.L : void 0; + if (repeated) { + let arr = target[localName]; + if (wireType == binary_format_contract_1.WireType.LengthDelimited && T != reflection_info_1.ScalarType.STRING && T != reflection_info_1.ScalarType.BYTES) { + let e = reader.uint32() + reader.pos; + while (reader.pos < e) + arr.push(this.scalar(reader, T, L)); + } else + arr.push(this.scalar(reader, T, L)); + } else + target[localName] = this.scalar(reader, T, L); + break; + case "message": + if (repeated) { + let arr = target[localName]; + let msg = field.T().internalBinaryRead(reader, reader.uint32(), options); + arr.push(msg); + } else + target[localName] = field.T().internalBinaryRead(reader, reader.uint32(), options, target[localName]); + break; + case "map": + let [mapKey, mapVal] = this.mapEntry(field, reader, options); + target[localName][mapKey] = mapVal; + break; + } } } - bytes.push(hi >>> 31 & 1); - } - __name(varint64write, "varint64write"); - exports2.varint64write = varint64write; - var TWO_PWR_32_DBL = (1 << 16) * (1 << 16); - function int64fromString(dec) { - let minus = dec[0] == "-"; - if (minus) - dec = dec.slice(1); - const base = 1e6; - let lowBits = 0; - let highBits = 0; - function add1e6digit(begin, end) { - const digit1e6 = Number(dec.slice(begin, end)); - highBits *= base; - lowBits = lowBits * base + digit1e6; - if (lowBits >= TWO_PWR_32_DBL) { - highBits = highBits + (lowBits / TWO_PWR_32_DBL | 0); - lowBits = lowBits % TWO_PWR_32_DBL; + /** + * Read a map field, expecting key field = 1, value field = 2 + */ + mapEntry(field, reader, options) { + let length = reader.uint32(); + let end = reader.pos + length; + let key = void 0; + let val = void 0; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + if (field.K == reflection_info_1.ScalarType.BOOL) + key = reader.bool().toString(); + else + key = this.scalar(reader, field.K, reflection_info_1.LongType.STRING); + break; + case 2: + switch (field.V.kind) { + case "scalar": + val = this.scalar(reader, field.V.T, field.V.L); + break; + case "enum": + val = reader.int32(); + break; + case "message": + val = field.V.T().internalBinaryRead(reader, reader.uint32(), options); + break; + } + break; + default: + throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) in map entry for ${this.info.typeName}#${field.name}`); + } } - } - __name(add1e6digit, "add1e6digit"); - add1e6digit(-24, -18); - add1e6digit(-18, -12); - add1e6digit(-12, -6); - add1e6digit(-6); - return [minus, lowBits, highBits]; - } - __name(int64fromString, "int64fromString"); - exports2.int64fromString = int64fromString; - function int64toString(bitsLow, bitsHigh) { - if (bitsHigh >>> 0 <= 2097151) { - return "" + (TWO_PWR_32_DBL * bitsHigh + (bitsLow >>> 0)); - } - let low = bitsLow & 16777215; - let mid = (bitsLow >>> 24 | bitsHigh << 8) >>> 0 & 16777215; - let high = bitsHigh >> 16 & 65535; - let digitA = low + mid * 6777216 + high * 6710656; - let digitB = mid + high * 8147497; - let digitC = high * 2; - let base = 1e7; - if (digitA >= base) { - digitB += Math.floor(digitA / base); - digitA %= base; - } - if (digitB >= base) { - digitC += Math.floor(digitB / base); - digitB %= base; - } - function decimalFrom1e7(digit1e7, needLeadingZeros) { - let partial = digit1e7 ? String(digit1e7) : ""; - if (needLeadingZeros) { - return "0000000".slice(partial.length) + partial; + if (key === void 0) { + let keyRaw = reflection_scalar_default_1.reflectionScalarDefault(field.K); + key = field.K == reflection_info_1.ScalarType.BOOL ? keyRaw.toString() : keyRaw; } - return partial; + if (val === void 0) + switch (field.V.kind) { + case "scalar": + val = reflection_scalar_default_1.reflectionScalarDefault(field.V.T, field.V.L); + break; + case "enum": + val = 0; + break; + case "message": + val = field.V.T().create(); + break; + } + return [key, val]; } - __name(decimalFrom1e7, "decimalFrom1e7"); - return decimalFrom1e7( - digitC, - /*needLeadingZeros=*/ - 0 - ) + decimalFrom1e7( - digitB, - /*needLeadingZeros=*/ - digitC - ) + // If the final 1e7 digit didn't need leading zeros, we would have - // returned via the trivial code path at the top. - decimalFrom1e7( - digitA, - /*needLeadingZeros=*/ - 1 - ); - } - __name(int64toString, "int64toString"); - exports2.int64toString = int64toString; - function varint32write(value, bytes) { - if (value >= 0) { - while (value > 127) { - bytes.push(value & 127 | 128); - value = value >>> 7; - } - bytes.push(value); - } else { - for (let i = 0; i < 9; i++) { - bytes.push(value & 127 | 128); - value = value >> 7; + scalar(reader, type, longType) { + switch (type) { + case reflection_info_1.ScalarType.INT32: + return reader.int32(); + case reflection_info_1.ScalarType.STRING: + return reader.string(); + case reflection_info_1.ScalarType.BOOL: + return reader.bool(); + case reflection_info_1.ScalarType.DOUBLE: + return reader.double(); + case reflection_info_1.ScalarType.FLOAT: + return reader.float(); + case reflection_info_1.ScalarType.INT64: + return reflection_long_convert_1.reflectionLongConvert(reader.int64(), longType); + case reflection_info_1.ScalarType.UINT64: + return reflection_long_convert_1.reflectionLongConvert(reader.uint64(), longType); + case reflection_info_1.ScalarType.FIXED64: + return reflection_long_convert_1.reflectionLongConvert(reader.fixed64(), longType); + case reflection_info_1.ScalarType.FIXED32: + return reader.fixed32(); + case reflection_info_1.ScalarType.BYTES: + return reader.bytes(); + case reflection_info_1.ScalarType.UINT32: + return reader.uint32(); + case reflection_info_1.ScalarType.SFIXED32: + return reader.sfixed32(); + case reflection_info_1.ScalarType.SFIXED64: + return reflection_long_convert_1.reflectionLongConvert(reader.sfixed64(), longType); + case reflection_info_1.ScalarType.SINT32: + return reader.sint32(); + case reflection_info_1.ScalarType.SINT64: + return reflection_long_convert_1.reflectionLongConvert(reader.sint64(), longType); } - bytes.push(1); - } - } - __name(varint32write, "varint32write"); - exports2.varint32write = varint32write; - function varint32read() { - let b = this.buf[this.pos++]; - let result = b & 127; - if ((b & 128) == 0) { - this.assertBounds(); - return result; - } - b = this.buf[this.pos++]; - result |= (b & 127) << 7; - if ((b & 128) == 0) { - this.assertBounds(); - return result; - } - b = this.buf[this.pos++]; - result |= (b & 127) << 14; - if ((b & 128) == 0) { - this.assertBounds(); - return result; - } - b = this.buf[this.pos++]; - result |= (b & 127) << 21; - if ((b & 128) == 0) { - this.assertBounds(); - return result; } - b = this.buf[this.pos++]; - result |= (b & 15) << 28; - for (let readBytes = 5; (b & 128) !== 0 && readBytes < 10; readBytes++) - b = this.buf[this.pos++]; - if ((b & 128) != 0) - throw new Error("invalid varint"); - this.assertBounds(); - return result >>> 0; - } - __name(varint32read, "varint32read"); - exports2.varint32read = varint32read; + }; + exports2.ReflectionBinaryReader = ReflectionBinaryReader; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js -var require_pb_long = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/pb-long.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js +var require_reflection_binary_writer = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.PbLong = exports2.PbULong = exports2.detectBi = void 0; - var goog_varint_1 = require_goog_varint(); - var BI; - function detectBi() { - const dv = new DataView(new ArrayBuffer(8)); - const ok = globalThis.BigInt !== void 0 && typeof dv.getBigInt64 === "function" && typeof dv.getBigUint64 === "function" && typeof dv.setBigInt64 === "function" && typeof dv.setBigUint64 === "function"; - BI = ok ? { - MIN: BigInt("-9223372036854775808"), - MAX: BigInt("9223372036854775807"), - UMIN: BigInt("0"), - UMAX: BigInt("18446744073709551615"), - C: BigInt, - V: dv - } : void 0; - } - __name(detectBi, "detectBi"); - exports2.detectBi = detectBi; - detectBi(); - function assertBi(bi) { - if (!bi) - throw new Error("BigInt unavailable, see https://github.com/timostamm/protobuf-ts/blob/v1.0.8/MANUAL.md#bigint-support"); - } - __name(assertBi, "assertBi"); - var RE_DECIMAL_STR = /^-?[0-9]+$/; - var TWO_PWR_32_DBL = 4294967296; - var HALF_2_PWR_32 = 2147483648; - var SharedPbLong = class { + exports2.ReflectionBinaryWriter = void 0; + var binary_format_contract_1 = require_binary_format_contract(); + var reflection_info_1 = require_reflection_info(); + var assert_1 = require_assert(); + var pb_long_1 = require_pb_long(); + var ReflectionBinaryWriter = class { static { - __name(this, "SharedPbLong"); - } - /** - * Create a new instance with the given bits. - */ - constructor(lo, hi) { - this.lo = lo | 0; - this.hi = hi | 0; - } - /** - * Is this instance equal to 0? - */ - isZero() { - return this.lo == 0 && this.hi == 0; + __name(this, "ReflectionBinaryWriter"); } - /** - * Convert to a native number. - */ - toNumber() { - let result = this.hi * TWO_PWR_32_DBL + (this.lo >>> 0); - if (!Number.isSafeInteger(result)) - throw new Error("cannot convert to safe number"); - return result; + constructor(info) { + this.info = info; } - }; - var PbULong = class _PbULong extends SharedPbLong { - static { - __name(this, "PbULong"); + prepare() { + if (!this.fields) { + const fieldsInput = this.info.fields ? this.info.fields.concat() : []; + this.fields = fieldsInput.sort((a, b) => a.no - b.no); + } } /** - * Create instance from a `string`, `number` or `bigint`. + * Writes the message to binary format. */ - static from(value) { - if (BI) - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - if (value == "") - throw new Error("string is no integer"); - value = BI.C(value); - case "number": - if (value === 0) - return this.ZERO; - value = BI.C(value); - case "bigint": - if (!value) - return this.ZERO; - if (value < BI.UMIN) - throw new Error("signed value for ulong"); - if (value > BI.UMAX) - throw new Error("ulong too large"); - BI.V.setBigUint64(0, value, true); - return new _PbULong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); + write(message, writer, options) { + this.prepare(); + for (const field of this.fields) { + let value, emitDefault, repeated = field.repeat, localName = field.localName; + if (field.oneof) { + const group = message[field.oneof]; + if (group.oneofKind !== localName) + continue; + value = group[localName]; + emitDefault = true; + } else { + value = message[localName]; + emitDefault = false; } - else - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - value = value.trim(); - if (!RE_DECIMAL_STR.test(value)) - throw new Error("string is no integer"); - let [minus, lo, hi] = goog_varint_1.int64fromString(value); - if (minus) - throw new Error("signed value for ulong"); - return new _PbULong(lo, hi); - case "number": - if (value == 0) - return this.ZERO; - if (!Number.isSafeInteger(value)) - throw new Error("number is no integer"); - if (value < 0) - throw new Error("signed value for ulong"); - return new _PbULong(value, value / TWO_PWR_32_DBL); + switch (field.kind) { + case "scalar": + case "enum": + let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + if (repeated) { + assert_1.assert(Array.isArray(value)); + if (repeated == reflection_info_1.RepeatType.PACKED) + this.packed(writer, T, field.no, value); + else + for (const item of value) + this.scalar(writer, T, field.no, item, true); + } else if (value === void 0) + assert_1.assert(field.opt); + else + this.scalar(writer, T, field.no, value, emitDefault || field.opt); + break; + case "message": + if (repeated) { + assert_1.assert(Array.isArray(value)); + for (const item of value) + this.message(writer, options, field.T(), field.no, item); + } else { + this.message(writer, options, field.T(), field.no, value); + } + break; + case "map": + assert_1.assert(typeof value == "object" && value !== null); + for (const [key, val] of Object.entries(value)) + this.mapEntry(writer, options, field, key, val); + break; } - throw new Error("unknown value " + typeof value); - } - /** - * Convert to decimal string. - */ - toString() { - return BI ? this.toBigInt().toString() : goog_varint_1.int64toString(this.lo, this.hi); + } + let u = options.writeUnknownFields; + if (u !== false) + (u === true ? binary_format_contract_1.UnknownFieldHandler.onWrite : u)(this.info.typeName, message, writer); } - /** - * Convert to native bigint. - */ - toBigInt() { - assertBi(BI); - BI.V.setInt32(0, this.lo, true); - BI.V.setInt32(4, this.hi, true); - return BI.V.getBigUint64(0, true); + mapEntry(writer, options, field, key, value) { + writer.tag(field.no, binary_format_contract_1.WireType.LengthDelimited); + writer.fork(); + let keyValue = key; + switch (field.K) { + case reflection_info_1.ScalarType.INT32: + case reflection_info_1.ScalarType.FIXED32: + case reflection_info_1.ScalarType.UINT32: + case reflection_info_1.ScalarType.SFIXED32: + case reflection_info_1.ScalarType.SINT32: + keyValue = Number.parseInt(key); + break; + case reflection_info_1.ScalarType.BOOL: + assert_1.assert(key == "true" || key == "false"); + keyValue = key == "true"; + break; + } + this.scalar(writer, field.K, 1, keyValue, true); + switch (field.V.kind) { + case "scalar": + this.scalar(writer, field.V.T, 2, value, true); + break; + case "enum": + this.scalar(writer, reflection_info_1.ScalarType.INT32, 2, value, true); + break; + case "message": + this.message(writer, options, field.V.T(), 2, value); + break; + } + writer.join(); } - }; - exports2.PbULong = PbULong; - PbULong.ZERO = new PbULong(0, 0); - var PbLong = class _PbLong extends SharedPbLong { - static { - __name(this, "PbLong"); + message(writer, options, handler, fieldNo, value) { + if (value === void 0) + return; + handler.internalBinaryWrite(value, writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited).fork(), options); + writer.join(); } /** - * Create instance from a `string`, `number` or `bigint`. + * Write a single scalar value. */ - static from(value) { - if (BI) - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - if (value == "") - throw new Error("string is no integer"); - value = BI.C(value); - case "number": - if (value === 0) - return this.ZERO; - value = BI.C(value); - case "bigint": - if (!value) - return this.ZERO; - if (value < BI.MIN) - throw new Error("signed long too small"); - if (value > BI.MAX) - throw new Error("signed long too large"); - BI.V.setBigInt64(0, value, true); - return new _PbLong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); - } - else - switch (typeof value) { - case "string": - if (value == "0") - return this.ZERO; - value = value.trim(); - if (!RE_DECIMAL_STR.test(value)) - throw new Error("string is no integer"); - let [minus, lo, hi] = goog_varint_1.int64fromString(value); - if (minus) { - if (hi > HALF_2_PWR_32 || hi == HALF_2_PWR_32 && lo != 0) - throw new Error("signed long too small"); - } else if (hi >= HALF_2_PWR_32) - throw new Error("signed long too large"); - let pbl = new _PbLong(lo, hi); - return minus ? pbl.negate() : pbl; - case "number": - if (value == 0) - return this.ZERO; - if (!Number.isSafeInteger(value)) - throw new Error("number is no integer"); - return value > 0 ? new _PbLong(value, value / TWO_PWR_32_DBL) : new _PbLong(-value, -value / TWO_PWR_32_DBL).negate(); - } - throw new Error("unknown value " + typeof value); + scalar(writer, type, fieldNo, value, emitDefault) { + let [wireType, method, isDefault] = this.scalarInfo(type, value); + if (!isDefault || emitDefault) { + writer.tag(fieldNo, wireType); + writer[method](value); + } } /** - * Do we have a minus sign? + * Write an array of scalar values in packed format. */ - isNegative() { - return (this.hi & HALF_2_PWR_32) !== 0; + packed(writer, type, fieldNo, value) { + if (!value.length) + return; + assert_1.assert(type !== reflection_info_1.ScalarType.BYTES && type !== reflection_info_1.ScalarType.STRING); + writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited); + writer.fork(); + let [, method] = this.scalarInfo(type); + for (let i = 0; i < value.length; i++) + writer[method](value[i]); + writer.join(); } /** - * Negate two's complement. - * Invert all the bits and add one to the result. + * Get information for writing a scalar value. + * + * Returns tuple: + * [0]: appropriate WireType + * [1]: name of the appropriate method of IBinaryWriter + * [2]: whether the given value is a default value + * + * If argument `value` is omitted, [2] is always false. */ - negate() { - let hi = ~this.hi, lo = this.lo; - if (lo) - lo = ~lo + 1; + scalarInfo(type, value) { + let t = binary_format_contract_1.WireType.Varint; + let m; + let i = value === void 0; + let d = value === 0; + switch (type) { + case reflection_info_1.ScalarType.INT32: + m = "int32"; + break; + case reflection_info_1.ScalarType.STRING: + d = i || !value.length; + t = binary_format_contract_1.WireType.LengthDelimited; + m = "string"; + break; + case reflection_info_1.ScalarType.BOOL: + d = value === false; + m = "bool"; + break; + case reflection_info_1.ScalarType.UINT32: + m = "uint32"; + break; + case reflection_info_1.ScalarType.DOUBLE: + t = binary_format_contract_1.WireType.Bit64; + m = "double"; + break; + case reflection_info_1.ScalarType.FLOAT: + t = binary_format_contract_1.WireType.Bit32; + m = "float"; + break; + case reflection_info_1.ScalarType.INT64: + d = i || pb_long_1.PbLong.from(value).isZero(); + m = "int64"; + break; + case reflection_info_1.ScalarType.UINT64: + d = i || pb_long_1.PbULong.from(value).isZero(); + m = "uint64"; + break; + case reflection_info_1.ScalarType.FIXED64: + d = i || pb_long_1.PbULong.from(value).isZero(); + t = binary_format_contract_1.WireType.Bit64; + m = "fixed64"; + break; + case reflection_info_1.ScalarType.BYTES: + d = i || !value.byteLength; + t = binary_format_contract_1.WireType.LengthDelimited; + m = "bytes"; + break; + case reflection_info_1.ScalarType.FIXED32: + t = binary_format_contract_1.WireType.Bit32; + m = "fixed32"; + break; + case reflection_info_1.ScalarType.SFIXED32: + t = binary_format_contract_1.WireType.Bit32; + m = "sfixed32"; + break; + case reflection_info_1.ScalarType.SFIXED64: + d = i || pb_long_1.PbLong.from(value).isZero(); + t = binary_format_contract_1.WireType.Bit64; + m = "sfixed64"; + break; + case reflection_info_1.ScalarType.SINT32: + m = "sint32"; + break; + case reflection_info_1.ScalarType.SINT64: + d = i || pb_long_1.PbLong.from(value).isZero(); + m = "sint64"; + break; + } + return [t, m, i || d]; + } + }; + exports2.ReflectionBinaryWriter = ReflectionBinaryWriter; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js +var require_reflection_create = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.reflectionCreate = void 0; + var reflection_scalar_default_1 = require_reflection_scalar_default(); + var message_type_contract_1 = require_message_type_contract(); + function reflectionCreate(type) { + const msg = type.messagePrototype ? Object.create(type.messagePrototype) : Object.defineProperty({}, message_type_contract_1.MESSAGE_TYPE, { value: type }); + for (let field of type.fields) { + let name = field.localName; + if (field.opt) + continue; + if (field.oneof) + msg[field.oneof] = { oneofKind: void 0 }; + else if (field.repeat) + msg[name] = []; else - hi += 1; - return new _PbLong(lo, hi); + switch (field.kind) { + case "scalar": + msg[name] = reflection_scalar_default_1.reflectionScalarDefault(field.T, field.L); + break; + case "enum": + msg[name] = 0; + break; + case "map": + msg[name] = {}; + break; + } } - /** - * Convert to decimal string. - */ - toString() { - if (BI) - return this.toBigInt().toString(); - if (this.isNegative()) { - let n = this.negate(); - return "-" + goog_varint_1.int64toString(n.lo, n.hi); + return msg; + } + __name(reflectionCreate, "reflectionCreate"); + exports2.reflectionCreate = reflectionCreate; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js +var require_reflection_merge_partial = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.reflectionMergePartial = void 0; + function reflectionMergePartial(info, target, source) { + let fieldValue, input = source, output; + for (let field of info.fields) { + let name = field.localName; + if (field.oneof) { + const group = input[field.oneof]; + if ((group === null || group === void 0 ? void 0 : group.oneofKind) == void 0) { + continue; + } + fieldValue = group[name]; + output = target[field.oneof]; + output.oneofKind = group.oneofKind; + if (fieldValue == void 0) { + delete output[name]; + continue; + } + } else { + fieldValue = input[name]; + output = target; + if (fieldValue == void 0) { + continue; + } + } + if (field.repeat) + output[name].length = fieldValue.length; + switch (field.kind) { + case "scalar": + case "enum": + if (field.repeat) + for (let i = 0; i < fieldValue.length; i++) + output[name][i] = fieldValue[i]; + else + output[name] = fieldValue; + break; + case "message": + let T = field.T(); + if (field.repeat) + for (let i = 0; i < fieldValue.length; i++) + output[name][i] = T.create(fieldValue[i]); + else if (output[name] === void 0) + output[name] = T.create(fieldValue); + else + T.mergePartial(output[name], fieldValue); + break; + case "map": + switch (field.V.kind) { + case "scalar": + case "enum": + Object.assign(output[name], fieldValue); + break; + case "message": + let T2 = field.V.T(); + for (let k of Object.keys(fieldValue)) + output[name][k] = T2.create(fieldValue[k]); + break; + } + break; } - return goog_varint_1.int64toString(this.lo, this.hi); - } - /** - * Convert to native bigint. - */ - toBigInt() { - assertBi(BI); - BI.V.setInt32(0, this.lo, true); - BI.V.setInt32(4, this.hi, true); - return BI.V.getBigInt64(0, true); } - }; - exports2.PbLong = PbLong; - PbLong.ZERO = new PbLong(0, 0); + } + __name(reflectionMergePartial, "reflectionMergePartial"); + exports2.reflectionMergePartial = reflectionMergePartial; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js -var require_binary_reader = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/binary-reader.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js +var require_reflection_equals = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BinaryReader = exports2.binaryReadOptions = void 0; - var binary_format_contract_1 = require_binary_format_contract(); - var pb_long_1 = require_pb_long(); - var goog_varint_1 = require_goog_varint(); - var defaultsRead = { - readUnknownField: true, - readerFactory: /* @__PURE__ */ __name((bytes) => new BinaryReader(bytes), "readerFactory") - }; - function binaryReadOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; + exports2.reflectionEquals = void 0; + var reflection_info_1 = require_reflection_info(); + function reflectionEquals(info, a, b) { + if (a === b) + return true; + if (!a || !b) + return false; + for (let field of info.fields) { + let localName = field.localName; + let val_a = field.oneof ? a[field.oneof][localName] : a[localName]; + let val_b = field.oneof ? b[field.oneof][localName] : b[localName]; + switch (field.kind) { + case "enum": + case "scalar": + let t = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; + if (!(field.repeat ? repeatedPrimitiveEq(t, val_a, val_b) : primitiveEq(t, val_a, val_b))) + return false; + break; + case "map": + if (!(field.V.kind == "message" ? repeatedMsgEq(field.V.T(), objectValues(val_a), objectValues(val_b)) : repeatedPrimitiveEq(field.V.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.V.T, objectValues(val_a), objectValues(val_b)))) + return false; + break; + case "message": + let T = field.T(); + if (!(field.repeat ? repeatedMsgEq(T, val_a, val_b) : T.equals(val_a, val_b))) + return false; + break; + } + } + return true; } - __name(binaryReadOptions, "binaryReadOptions"); - exports2.binaryReadOptions = binaryReadOptions; - var BinaryReader = class { + __name(reflectionEquals, "reflectionEquals"); + exports2.reflectionEquals = reflectionEquals; + var objectValues = Object.values; + function primitiveEq(type, a, b) { + if (a === b) + return true; + if (type !== reflection_info_1.ScalarType.BYTES) + return false; + let ba = a; + let bb = b; + if (ba.length !== bb.length) + return false; + for (let i = 0; i < ba.length; i++) + if (ba[i] != bb[i]) + return false; + return true; + } + __name(primitiveEq, "primitiveEq"); + function repeatedPrimitiveEq(type, a, b) { + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; i++) + if (!primitiveEq(type, a[i], b[i])) + return false; + return true; + } + __name(repeatedPrimitiveEq, "repeatedPrimitiveEq"); + function repeatedMsgEq(type, a, b) { + if (a.length !== b.length) + return false; + for (let i = 0; i < a.length; i++) + if (!type.equals(a[i], b[i])) + return false; + return true; + } + __name(repeatedMsgEq, "repeatedMsgEq"); + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js +var require_message_type = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.MessageType = void 0; + var message_type_contract_1 = require_message_type_contract(); + var reflection_info_1 = require_reflection_info(); + var reflection_type_check_1 = require_reflection_type_check(); + var reflection_json_reader_1 = require_reflection_json_reader(); + var reflection_json_writer_1 = require_reflection_json_writer(); + var reflection_binary_reader_1 = require_reflection_binary_reader(); + var reflection_binary_writer_1 = require_reflection_binary_writer(); + var reflection_create_1 = require_reflection_create(); + var reflection_merge_partial_1 = require_reflection_merge_partial(); + var json_typings_1 = require_json_typings(); + var json_format_contract_1 = require_json_format_contract(); + var reflection_equals_1 = require_reflection_equals(); + var binary_writer_1 = require_binary_writer(); + var binary_reader_1 = require_binary_reader(); + var baseDescriptors = Object.getOwnPropertyDescriptors(Object.getPrototypeOf({})); + var MessageType = class { static { - __name(this, "BinaryReader"); - } - constructor(buf, textDecoder) { - this.varint64 = goog_varint_1.varint64read; - this.uint32 = goog_varint_1.varint32read; - this.buf = buf; - this.len = buf.length; - this.pos = 0; - this.view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); - this.textDecoder = textDecoder !== null && textDecoder !== void 0 ? textDecoder : new TextDecoder("utf-8", { - fatal: true, - ignoreBOM: true - }); + __name(this, "MessageType"); } - /** - * Reads a tag - field number and wire type. - */ - tag() { - let tag = this.uint32(), fieldNo = tag >>> 3, wireType = tag & 7; - if (fieldNo <= 0 || wireType < 0 || wireType > 5) - throw new Error("illegal tag: field no " + fieldNo + " wire type " + wireType); - return [fieldNo, wireType]; + constructor(name, fields, options) { + this.defaultCheckDepth = 16; + this.typeName = name; + this.fields = fields.map(reflection_info_1.normalizeFieldInfo); + this.options = options !== null && options !== void 0 ? options : {}; + this.messagePrototype = Object.create(null, Object.assign(Object.assign({}, baseDescriptors), { [message_type_contract_1.MESSAGE_TYPE]: { value: this } })); + this.refTypeCheck = new reflection_type_check_1.ReflectionTypeCheck(this); + this.refJsonReader = new reflection_json_reader_1.ReflectionJsonReader(this); + this.refJsonWriter = new reflection_json_writer_1.ReflectionJsonWriter(this); + this.refBinReader = new reflection_binary_reader_1.ReflectionBinaryReader(this); + this.refBinWriter = new reflection_binary_writer_1.ReflectionBinaryWriter(this); } - /** - * Skip one element on the wire and return the skipped data. - * Supports WireType.StartGroup since v2.0.0-alpha.23. - */ - skip(wireType) { - let start = this.pos; - switch (wireType) { - case binary_format_contract_1.WireType.Varint: - while (this.buf[this.pos++] & 128) { - } - break; - case binary_format_contract_1.WireType.Bit64: - this.pos += 4; - case binary_format_contract_1.WireType.Bit32: - this.pos += 4; - break; - case binary_format_contract_1.WireType.LengthDelimited: - let len = this.uint32(); - this.pos += len; - break; - case binary_format_contract_1.WireType.StartGroup: - let t; - while ((t = this.tag()[1]) !== binary_format_contract_1.WireType.EndGroup) { - this.skip(t); - } - break; - default: - throw new Error("cant skip wire type " + wireType); + create(value) { + let message = reflection_create_1.reflectionCreate(this); + if (value !== void 0) { + reflection_merge_partial_1.reflectionMergePartial(this, message, value); } - this.assertBounds(); - return this.buf.subarray(start, this.pos); + return message; } /** - * Throws error if position in byte array is out of range. + * Clone the message. + * + * Unknown fields are discarded. */ - assertBounds() { - if (this.pos > this.len) - throw new RangeError("premature EOF"); + clone(message) { + let copy = this.create(); + reflection_merge_partial_1.reflectionMergePartial(this, copy, message); + return copy; } /** - * Read a `int32` field, a signed 32 bit varint. + * Determines whether two message of the same type have the same field values. + * Checks for deep equality, traversing repeated fields, oneof groups, maps + * and messages recursively. + * Will also return true if both messages are `undefined`. */ - int32() { - return this.uint32() | 0; + equals(a, b) { + return reflection_equals_1.reflectionEquals(this, a, b); } /** - * Read a `sint32` field, a signed, zigzag-encoded 32-bit varint. + * Is the given value assignable to our message type + * and contains no [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? */ - sint32() { - let zze = this.uint32(); - return zze >>> 1 ^ -(zze & 1); + is(arg, depth = this.defaultCheckDepth) { + return this.refTypeCheck.is(arg, depth, false); } /** - * Read a `int64` field, a signed 64-bit varint. + * Is the given value assignable to our message type, + * regardless of [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? */ - int64() { - return new pb_long_1.PbLong(...this.varint64()); + isAssignable(arg, depth = this.defaultCheckDepth) { + return this.refTypeCheck.is(arg, depth, true); } /** - * Read a `uint64` field, an unsigned 64-bit varint. + * Copy partial data into the target message. */ - uint64() { - return new pb_long_1.PbULong(...this.varint64()); + mergePartial(target, source) { + reflection_merge_partial_1.reflectionMergePartial(this, target, source); } /** - * Read a `sint64` field, a signed, zig-zag-encoded 64-bit varint. + * Create a new message from binary format. */ - sint64() { - let [lo, hi] = this.varint64(); - let s = -(lo & 1); - lo = (lo >>> 1 | (hi & 1) << 31) ^ s; - hi = hi >>> 1 ^ s; - return new pb_long_1.PbLong(lo, hi); + fromBinary(data, options) { + let opt = binary_reader_1.binaryReadOptions(options); + return this.internalBinaryRead(opt.readerFactory(data), data.byteLength, opt); } /** - * Read a `bool` field, a variant. + * Read a new message from a JSON value. */ - bool() { - let [lo, hi] = this.varint64(); - return lo !== 0 || hi !== 0; + fromJson(json, options) { + return this.internalJsonRead(json, json_format_contract_1.jsonReadOptions(options)); } /** - * Read a `fixed32` field, an unsigned, fixed-length 32-bit integer. + * Read a new message from a JSON string. + * This is equivalent to `T.fromJson(JSON.parse(json))`. */ - fixed32() { - return this.view.getUint32((this.pos += 4) - 4, true); + fromJsonString(json, options) { + let value = JSON.parse(json); + return this.fromJson(value, options); } /** - * Read a `sfixed32` field, a signed, fixed-length 32-bit integer. + * Write the message to canonical JSON value. */ - sfixed32() { - return this.view.getInt32((this.pos += 4) - 4, true); + toJson(message, options) { + return this.internalJsonWrite(message, json_format_contract_1.jsonWriteOptions(options)); } /** - * Read a `fixed64` field, an unsigned, fixed-length 64 bit integer. + * Convert the message to canonical JSON string. + * This is equivalent to `JSON.stringify(T.toJson(t))` */ - fixed64() { - return new pb_long_1.PbULong(this.sfixed32(), this.sfixed32()); + toJsonString(message, options) { + var _a; + let value = this.toJson(message, options); + return JSON.stringify(value, null, (_a = options === null || options === void 0 ? void 0 : options.prettySpaces) !== null && _a !== void 0 ? _a : 0); } /** - * Read a `fixed64` field, a signed, fixed-length 64-bit integer. + * Write the message to binary format. */ - sfixed64() { - return new pb_long_1.PbLong(this.sfixed32(), this.sfixed32()); + toBinary(message, options) { + let opt = binary_writer_1.binaryWriteOptions(options); + return this.internalBinaryWrite(message, opt.writerFactory(), opt).finish(); } /** - * Read a `float` field, 32-bit floating point number. + * This is an internal method. If you just want to read a message from + * JSON, use `fromJson()` or `fromJsonString()`. + * + * Reads JSON value and merges the fields into the target + * according to protobuf rules. If the target is omitted, + * a new instance is created first. */ - float() { - return this.view.getFloat32((this.pos += 4) - 4, true); + internalJsonRead(json, options, target) { + if (json !== null && typeof json == "object" && !Array.isArray(json)) { + let message = target !== null && target !== void 0 ? target : this.create(); + this.refJsonReader.read(json, message, options); + return message; + } + throw new Error(`Unable to parse message ${this.typeName} from JSON ${json_typings_1.typeofJsonValue(json)}.`); } /** - * Read a `double` field, a 64-bit floating point number. + * This is an internal method. If you just want to write a message + * to JSON, use `toJson()` or `toJsonString(). + * + * Writes JSON value and returns it. */ - double() { - return this.view.getFloat64((this.pos += 8) - 8, true); + internalJsonWrite(message, options) { + return this.refJsonWriter.write(message, options); } /** - * Read a `bytes` field, length-delimited arbitrary data. + * This is an internal method. If you just want to write a message + * in binary format, use `toBinary()`. + * + * Serializes the message in binary format and appends it to the given + * writer. Returns passed writer. */ - bytes() { - let len = this.uint32(); - let start = this.pos; - this.pos += len; - this.assertBounds(); - return this.buf.subarray(start, start + len); + internalBinaryWrite(message, writer, options) { + this.refBinWriter.write(message, writer, options); + return writer; } /** - * Read a `string` field, length-delimited data converted to UTF-8 text. + * This is an internal method. If you just want to read a message from + * binary data, use `fromBinary()`. + * + * Reads data from binary format and merges the fields into + * the target according to protobuf rules. If the target is + * omitted, a new instance is created first. */ - string() { - return this.textDecoder.decode(this.bytes()); + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(); + this.refBinReader.read(reader, message, options, length); + return message; } }; - exports2.BinaryReader = BinaryReader; + exports2.MessageType = MessageType; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/assert.js -var require_assert = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/assert.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js +var require_reflection_contains_message_type = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.assertFloat32 = exports2.assertUInt32 = exports2.assertInt32 = exports2.assertNever = exports2.assert = void 0; - function assert(condition, msg) { - if (!condition) { - throw new Error(msg); - } + exports2.containsMessageType = void 0; + var message_type_contract_1 = require_message_type_contract(); + function containsMessageType(msg) { + return msg[message_type_contract_1.MESSAGE_TYPE] != null; } - __name(assert, "assert"); - exports2.assert = assert; - function assertNever(value, msg) { - throw new Error(msg !== null && msg !== void 0 ? msg : "Unexpected object: " + value); + __name(containsMessageType, "containsMessageType"); + exports2.containsMessageType = containsMessageType; + } +}); + +// ../node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js +var require_enum_object = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.listEnumNumbers = exports2.listEnumNames = exports2.listEnumValues = exports2.isEnumObject = void 0; + function isEnumObject(arg) { + if (typeof arg != "object" || arg === null) { + return false; + } + if (!arg.hasOwnProperty(0)) { + return false; + } + for (let k of Object.keys(arg)) { + let num = parseInt(k); + if (!Number.isNaN(num)) { + let nam = arg[num]; + if (nam === void 0) + return false; + if (arg[nam] !== num) + return false; + } else { + let num2 = arg[k]; + if (num2 === void 0) + return false; + if (typeof num2 !== "number") + return false; + if (arg[num2] === void 0) + return false; + } + } + return true; } - __name(assertNever, "assertNever"); - exports2.assertNever = assertNever; - var FLOAT32_MAX = 34028234663852886e22; - var FLOAT32_MIN = -34028234663852886e22; - var UINT32_MAX = 4294967295; - var INT32_MAX = 2147483647; - var INT32_MIN = -2147483648; - function assertInt32(arg) { - if (typeof arg !== "number") - throw new Error("invalid int 32: " + typeof arg); - if (!Number.isInteger(arg) || arg > INT32_MAX || arg < INT32_MIN) - throw new Error("invalid int 32: " + arg); + __name(isEnumObject, "isEnumObject"); + exports2.isEnumObject = isEnumObject; + function listEnumValues(enumObject) { + if (!isEnumObject(enumObject)) + throw new Error("not a typescript enum object"); + let values = []; + for (let [name, number] of Object.entries(enumObject)) + if (typeof number == "number") + values.push({ name, number }); + return values; } - __name(assertInt32, "assertInt32"); - exports2.assertInt32 = assertInt32; - function assertUInt32(arg) { - if (typeof arg !== "number") - throw new Error("invalid uint 32: " + typeof arg); - if (!Number.isInteger(arg) || arg > UINT32_MAX || arg < 0) - throw new Error("invalid uint 32: " + arg); + __name(listEnumValues, "listEnumValues"); + exports2.listEnumValues = listEnumValues; + function listEnumNames(enumObject) { + return listEnumValues(enumObject).map((val) => val.name); } - __name(assertUInt32, "assertUInt32"); - exports2.assertUInt32 = assertUInt32; - function assertFloat32(arg) { - if (typeof arg !== "number") - throw new Error("invalid float 32: " + typeof arg); - if (!Number.isFinite(arg)) - return; - if (arg > FLOAT32_MAX || arg < FLOAT32_MIN) - throw new Error("invalid float 32: " + arg); + __name(listEnumNames, "listEnumNames"); + exports2.listEnumNames = listEnumNames; + function listEnumNumbers(enumObject) { + return listEnumValues(enumObject).map((val) => val.number).filter((num, index, arr) => arr.indexOf(num) == index); } - __name(assertFloat32, "assertFloat32"); - exports2.assertFloat32 = assertFloat32; + __name(listEnumNumbers, "listEnumNumbers"); + exports2.listEnumNumbers = listEnumNumbers; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js -var require_binary_writer = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/binary-writer.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime/build/commonjs/index.js +var require_commonjs7 = __commonJS({ + "../node_modules/@protobuf-ts/runtime/build/commonjs/index.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BinaryWriter = exports2.binaryWriteOptions = void 0; + var json_typings_1 = require_json_typings(); + Object.defineProperty(exports2, "typeofJsonValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return json_typings_1.typeofJsonValue; + }, "get") }); + Object.defineProperty(exports2, "isJsonObject", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return json_typings_1.isJsonObject; + }, "get") }); + var base64_1 = require_base642(); + Object.defineProperty(exports2, "base64decode", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return base64_1.base64decode; + }, "get") }); + Object.defineProperty(exports2, "base64encode", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return base64_1.base64encode; + }, "get") }); + var protobufjs_utf8_1 = require_protobufjs_utf8(); + Object.defineProperty(exports2, "utf8read", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return protobufjs_utf8_1.utf8read; + }, "get") }); + var binary_format_contract_1 = require_binary_format_contract(); + Object.defineProperty(exports2, "WireType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_format_contract_1.WireType; + }, "get") }); + Object.defineProperty(exports2, "mergeBinaryOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_format_contract_1.mergeBinaryOptions; + }, "get") }); + Object.defineProperty(exports2, "UnknownFieldHandler", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_format_contract_1.UnknownFieldHandler; + }, "get") }); + var binary_reader_1 = require_binary_reader(); + Object.defineProperty(exports2, "BinaryReader", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_reader_1.BinaryReader; + }, "get") }); + Object.defineProperty(exports2, "binaryReadOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_reader_1.binaryReadOptions; + }, "get") }); + var binary_writer_1 = require_binary_writer(); + Object.defineProperty(exports2, "BinaryWriter", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_writer_1.BinaryWriter; + }, "get") }); + Object.defineProperty(exports2, "binaryWriteOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return binary_writer_1.binaryWriteOptions; + }, "get") }); var pb_long_1 = require_pb_long(); - var goog_varint_1 = require_goog_varint(); + Object.defineProperty(exports2, "PbLong", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return pb_long_1.PbLong; + }, "get") }); + Object.defineProperty(exports2, "PbULong", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return pb_long_1.PbULong; + }, "get") }); + var json_format_contract_1 = require_json_format_contract(); + Object.defineProperty(exports2, "jsonReadOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return json_format_contract_1.jsonReadOptions; + }, "get") }); + Object.defineProperty(exports2, "jsonWriteOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return json_format_contract_1.jsonWriteOptions; + }, "get") }); + Object.defineProperty(exports2, "mergeJsonOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return json_format_contract_1.mergeJsonOptions; + }, "get") }); + var message_type_contract_1 = require_message_type_contract(); + Object.defineProperty(exports2, "MESSAGE_TYPE", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return message_type_contract_1.MESSAGE_TYPE; + }, "get") }); + var message_type_1 = require_message_type(); + Object.defineProperty(exports2, "MessageType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return message_type_1.MessageType; + }, "get") }); + var reflection_info_1 = require_reflection_info(); + Object.defineProperty(exports2, "ScalarType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.ScalarType; + }, "get") }); + Object.defineProperty(exports2, "LongType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.LongType; + }, "get") }); + Object.defineProperty(exports2, "RepeatType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.RepeatType; + }, "get") }); + Object.defineProperty(exports2, "normalizeFieldInfo", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.normalizeFieldInfo; + }, "get") }); + Object.defineProperty(exports2, "readFieldOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.readFieldOptions; + }, "get") }); + Object.defineProperty(exports2, "readFieldOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.readFieldOption; + }, "get") }); + Object.defineProperty(exports2, "readMessageOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.readMessageOption; + }, "get") }); + var reflection_type_check_1 = require_reflection_type_check(); + Object.defineProperty(exports2, "ReflectionTypeCheck", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_type_check_1.ReflectionTypeCheck; + }, "get") }); + var reflection_create_1 = require_reflection_create(); + Object.defineProperty(exports2, "reflectionCreate", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_create_1.reflectionCreate; + }, "get") }); + var reflection_scalar_default_1 = require_reflection_scalar_default(); + Object.defineProperty(exports2, "reflectionScalarDefault", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_scalar_default_1.reflectionScalarDefault; + }, "get") }); + var reflection_merge_partial_1 = require_reflection_merge_partial(); + Object.defineProperty(exports2, "reflectionMergePartial", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_merge_partial_1.reflectionMergePartial; + }, "get") }); + var reflection_equals_1 = require_reflection_equals(); + Object.defineProperty(exports2, "reflectionEquals", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_equals_1.reflectionEquals; + }, "get") }); + var reflection_binary_reader_1 = require_reflection_binary_reader(); + Object.defineProperty(exports2, "ReflectionBinaryReader", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_binary_reader_1.ReflectionBinaryReader; + }, "get") }); + var reflection_binary_writer_1 = require_reflection_binary_writer(); + Object.defineProperty(exports2, "ReflectionBinaryWriter", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_binary_writer_1.ReflectionBinaryWriter; + }, "get") }); + var reflection_json_reader_1 = require_reflection_json_reader(); + Object.defineProperty(exports2, "ReflectionJsonReader", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_json_reader_1.ReflectionJsonReader; + }, "get") }); + var reflection_json_writer_1 = require_reflection_json_writer(); + Object.defineProperty(exports2, "ReflectionJsonWriter", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_json_writer_1.ReflectionJsonWriter; + }, "get") }); + var reflection_contains_message_type_1 = require_reflection_contains_message_type(); + Object.defineProperty(exports2, "containsMessageType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_contains_message_type_1.containsMessageType; + }, "get") }); + var oneof_1 = require_oneof(); + Object.defineProperty(exports2, "isOneofGroup", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return oneof_1.isOneofGroup; + }, "get") }); + Object.defineProperty(exports2, "setOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return oneof_1.setOneofValue; + }, "get") }); + Object.defineProperty(exports2, "getOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return oneof_1.getOneofValue; + }, "get") }); + Object.defineProperty(exports2, "clearOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return oneof_1.clearOneofValue; + }, "get") }); + Object.defineProperty(exports2, "getSelectedOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return oneof_1.getSelectedOneofValue; + }, "get") }); + var enum_object_1 = require_enum_object(); + Object.defineProperty(exports2, "listEnumValues", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return enum_object_1.listEnumValues; + }, "get") }); + Object.defineProperty(exports2, "listEnumNames", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return enum_object_1.listEnumNames; + }, "get") }); + Object.defineProperty(exports2, "listEnumNumbers", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return enum_object_1.listEnumNumbers; + }, "get") }); + Object.defineProperty(exports2, "isEnumObject", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return enum_object_1.isEnumObject; + }, "get") }); + var lower_camel_case_1 = require_lower_camel_case(); + Object.defineProperty(exports2, "lowerCamelCase", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return lower_camel_case_1.lowerCamelCase; + }, "get") }); var assert_1 = require_assert(); - var defaultsWrite = { - writeUnknownFields: true, - writerFactory: /* @__PURE__ */ __name(() => new BinaryWriter(), "writerFactory") - }; - function binaryWriteOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; + Object.defineProperty(exports2, "assert", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return assert_1.assert; + }, "get") }); + Object.defineProperty(exports2, "assertNever", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return assert_1.assertNever; + }, "get") }); + Object.defineProperty(exports2, "assertInt32", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return assert_1.assertInt32; + }, "get") }); + Object.defineProperty(exports2, "assertUInt32", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return assert_1.assertUInt32; + }, "get") }); + Object.defineProperty(exports2, "assertFloat32", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return assert_1.assertFloat32; + }, "get") }); + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js +var require_reflection_info2 = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.readServiceOption = exports2.readMethodOption = exports2.readMethodOptions = exports2.normalizeMethodInfo = void 0; + var runtime_1 = require_commonjs7(); + function normalizeMethodInfo(method, service) { + var _a, _b, _c; + let m = method; + m.service = service; + m.localName = (_a = m.localName) !== null && _a !== void 0 ? _a : runtime_1.lowerCamelCase(m.name); + m.serverStreaming = !!m.serverStreaming; + m.clientStreaming = !!m.clientStreaming; + m.options = (_b = m.options) !== null && _b !== void 0 ? _b : {}; + m.idempotency = (_c = m.idempotency) !== null && _c !== void 0 ? _c : void 0; + return m; } - __name(binaryWriteOptions, "binaryWriteOptions"); - exports2.binaryWriteOptions = binaryWriteOptions; - var BinaryWriter = class { - static { - __name(this, "BinaryWriter"); + __name(normalizeMethodInfo, "normalizeMethodInfo"); + exports2.normalizeMethodInfo = normalizeMethodInfo; + function readMethodOptions(service, methodName, extensionName, extensionType) { + var _a; + const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; + return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; + } + __name(readMethodOptions, "readMethodOptions"); + exports2.readMethodOptions = readMethodOptions; + function readMethodOption(service, methodName, extensionName, extensionType) { + var _a; + const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; + if (!options) { + return void 0; } - constructor(textEncoder) { - this.stack = []; - this.textEncoder = textEncoder !== null && textEncoder !== void 0 ? textEncoder : new TextEncoder(); - this.chunks = []; - this.buf = []; + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; } - /** - * Return all bytes written and reset this writer. - */ - finish() { - this.chunks.push(new Uint8Array(this.buf)); - let len = 0; - for (let i = 0; i < this.chunks.length; i++) - len += this.chunks[i].length; - let bytes = new Uint8Array(len); - let offset = 0; - for (let i = 0; i < this.chunks.length; i++) { - bytes.set(this.chunks[i], offset); - offset += this.chunks[i].length; - } - this.chunks = []; - return bytes; + return extensionType ? extensionType.fromJson(optionVal) : optionVal; + } + __name(readMethodOption, "readMethodOption"); + exports2.readMethodOption = readMethodOption; + function readServiceOption(service, extensionName, extensionType) { + const options = service.options; + if (!options) { + return void 0; } - /** - * Start a new fork for length-delimited data like a message - * or a packed repeated field. - * - * Must be joined later with `join()`. - */ - fork() { - this.stack.push({ chunks: this.chunks, buf: this.buf }); - this.chunks = []; - this.buf = []; - return this; + const optionVal = options[extensionName]; + if (optionVal === void 0) { + return optionVal; } - /** - * Join the last fork. Write its length and bytes, then - * return to the previous state. - */ - join() { - let chunk = this.finish(); - let prev = this.stack.pop(); - if (!prev) - throw new Error("invalid state, fork stack empty"); - this.chunks = prev.chunks; - this.buf = prev.buf; - this.uint32(chunk.byteLength); - return this.raw(chunk); + return extensionType ? extensionType.fromJson(optionVal) : optionVal; + } + __name(readServiceOption, "readServiceOption"); + exports2.readServiceOption = readServiceOption; + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js +var require_service_type = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServiceType = void 0; + var reflection_info_1 = require_reflection_info2(); + var ServiceType = class { + static { + __name(this, "ServiceType"); } - /** - * Writes a tag (field number and wire type). - * - * Equivalent to `uint32( (fieldNo << 3 | type) >>> 0 )`. - * - * Generated code should compute the tag ahead of time and call `uint32()`. - */ - tag(fieldNo, type) { - return this.uint32((fieldNo << 3 | type) >>> 0); + constructor(typeName, methods, options) { + this.typeName = typeName; + this.methods = methods.map((i) => reflection_info_1.normalizeMethodInfo(i, this)); + this.options = options !== null && options !== void 0 ? options : {}; } - /** - * Write a chunk of raw bytes. - */ - raw(chunk) { - if (this.buf.length) { - this.chunks.push(new Uint8Array(this.buf)); - this.buf = []; + }; + exports2.ServiceType = ServiceType; + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js +var require_rpc_error = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.RpcError = void 0; + var RpcError = class extends Error { + static { + __name(this, "RpcError"); + } + constructor(message, code = "UNKNOWN", meta) { + super(message); + this.name = "RpcError"; + Object.setPrototypeOf(this, new.target.prototype); + this.code = code; + this.meta = meta !== null && meta !== void 0 ? meta : {}; + } + toString() { + const l = [this.name + ": " + this.message]; + if (this.code) { + l.push(""); + l.push("Code: " + this.code); } - this.chunks.push(chunk); - return this; + if (this.serviceName && this.methodName) { + l.push("Method: " + this.serviceName + "/" + this.methodName); + } + let m = Object.entries(this.meta); + if (m.length) { + l.push(""); + l.push("Meta:"); + for (let [k, v] of m) { + l.push(` ${k}: ${v}`); + } + } + return l.join("\n"); } - /** - * Write a `uint32` value, an unsigned 32 bit varint. - */ - uint32(value) { - assert_1.assertUInt32(value); - while (value > 127) { - this.buf.push(value & 127 | 128); - value = value >>> 7; + }; + exports2.RpcError = RpcError; + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js +var require_rpc_options = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.mergeRpcOptions = void 0; + var runtime_1 = require_commonjs7(); + function mergeRpcOptions(defaults, options) { + if (!options) + return defaults; + let o = {}; + copy(defaults, o); + copy(options, o); + for (let key of Object.keys(options)) { + let val = options[key]; + switch (key) { + case "jsonOptions": + o.jsonOptions = runtime_1.mergeJsonOptions(defaults.jsonOptions, o.jsonOptions); + break; + case "binaryOptions": + o.binaryOptions = runtime_1.mergeBinaryOptions(defaults.binaryOptions, o.binaryOptions); + break; + case "meta": + o.meta = {}; + copy(defaults.meta, o.meta); + copy(options.meta, o.meta); + break; + case "interceptors": + o.interceptors = defaults.interceptors ? defaults.interceptors.concat(val) : val.concat(); + break; } - this.buf.push(value); - return this; } - /** - * Write a `int32` value, a signed 32 bit varint. - */ - int32(value) { - assert_1.assertInt32(value); - goog_varint_1.varint32write(value, this.buf); - return this; + return o; + } + __name(mergeRpcOptions, "mergeRpcOptions"); + exports2.mergeRpcOptions = mergeRpcOptions; + function copy(a, into) { + if (!a) + return; + let c = into; + for (let [k, v] of Object.entries(a)) { + if (v instanceof Date) + c[k] = new Date(v.getTime()); + else if (Array.isArray(v)) + c[k] = v.concat(); + else + c[k] = v; + } + } + __name(copy, "copy"); + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js +var require_deferred = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Deferred = exports2.DeferredState = void 0; + var DeferredState; + (function(DeferredState2) { + DeferredState2[DeferredState2["PENDING"] = 0] = "PENDING"; + DeferredState2[DeferredState2["REJECTED"] = 1] = "REJECTED"; + DeferredState2[DeferredState2["RESOLVED"] = 2] = "RESOLVED"; + })(DeferredState = exports2.DeferredState || (exports2.DeferredState = {})); + var Deferred = class { + static { + __name(this, "Deferred"); } /** - * Write a `bool` value, a variant. + * @param preventUnhandledRejectionWarning - prevents the warning + * "Unhandled Promise rejection" by adding a noop rejection handler. + * Working with calls returned from the runtime-rpc package in an + * async function usually means awaiting one call property after + * the other. This means that the "status" is not being awaited when + * an earlier await for the "headers" is rejected. This causes the + * "unhandled promise reject" warning. A more correct behaviour for + * calls might be to become aware whether at least one of the + * promises is handled and swallow the rejection warning for the + * others. */ - bool(value) { - this.buf.push(value ? 1 : 0); - return this; + constructor(preventUnhandledRejectionWarning = true) { + this._state = DeferredState.PENDING; + this._promise = new Promise((resolve, reject) => { + this._resolve = resolve; + this._reject = reject; + }); + if (preventUnhandledRejectionWarning) { + this._promise.catch((_2) => { + }); + } } /** - * Write a `bytes` value, length-delimited arbitrary data. + * Get the current state of the promise. */ - bytes(value) { - this.uint32(value.byteLength); - return this.raw(value); + get state() { + return this._state; } /** - * Write a `string` value, length-delimited data converted to UTF-8 text. + * Get the deferred promise. */ - string(value) { - let chunk = this.textEncoder.encode(value); - this.uint32(chunk.byteLength); - return this.raw(chunk); + get promise() { + return this._promise; } /** - * Write a `float` value, 32-bit floating point number. + * Resolve the promise. Throws if the promise is already resolved or rejected. */ - float(value) { - assert_1.assertFloat32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setFloat32(0, value, true); - return this.raw(chunk); + resolve(value) { + if (this.state !== DeferredState.PENDING) + throw new Error(`cannot resolve ${DeferredState[this.state].toLowerCase()}`); + this._resolve(value); + this._state = DeferredState.RESOLVED; } /** - * Write a `double` value, a 64-bit floating point number. + * Reject the promise. Throws if the promise is already resolved or rejected. */ - double(value) { - let chunk = new Uint8Array(8); - new DataView(chunk.buffer).setFloat64(0, value, true); - return this.raw(chunk); + reject(reason) { + if (this.state !== DeferredState.PENDING) + throw new Error(`cannot reject ${DeferredState[this.state].toLowerCase()}`); + this._reject(reason); + this._state = DeferredState.REJECTED; } /** - * Write a `fixed32` value, an unsigned, fixed-length 32-bit integer. + * Resolve the promise. Ignore if not pending. */ - fixed32(value) { - assert_1.assertUInt32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setUint32(0, value, true); - return this.raw(chunk); + resolvePending(val) { + if (this._state === DeferredState.PENDING) + this.resolve(val); } /** - * Write a `sfixed32` value, a signed, fixed-length 32-bit integer. + * Reject the promise. Ignore if not pending. */ - sfixed32(value) { - assert_1.assertInt32(value); - let chunk = new Uint8Array(4); - new DataView(chunk.buffer).setInt32(0, value, true); - return this.raw(chunk); + rejectPending(reason) { + if (this._state === DeferredState.PENDING) + this.reject(reason); + } + }; + exports2.Deferred = Deferred; + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js +var require_rpc_output_stream = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.RpcOutputStreamController = void 0; + var deferred_1 = require_deferred(); + var runtime_1 = require_commonjs7(); + var RpcOutputStreamController = class { + static { + __name(this, "RpcOutputStreamController"); + } + constructor() { + this._lis = { + nxt: [], + msg: [], + err: [], + cmp: [] + }; + this._closed = false; + } + // --- RpcOutputStream callback API + onNext(callback) { + return this.addLis(callback, this._lis.nxt); + } + onMessage(callback) { + return this.addLis(callback, this._lis.msg); + } + onError(callback) { + return this.addLis(callback, this._lis.err); + } + onComplete(callback) { + return this.addLis(callback, this._lis.cmp); + } + addLis(callback, list) { + list.push(callback); + return () => { + let i = list.indexOf(callback); + if (i >= 0) + list.splice(i, 1); + }; + } + // remove all listeners + clearLis() { + for (let l of Object.values(this._lis)) + l.splice(0, l.length); } + // --- Controller API /** - * Write a `sint32` value, a signed, zigzag-encoded 32-bit varint. + * Is this stream already closed by a completion or error? */ - sint32(value) { - assert_1.assertInt32(value); - value = (value << 1 ^ value >> 31) >>> 0; - goog_varint_1.varint32write(value, this.buf); - return this; + get closed() { + return this._closed !== false; } /** - * Write a `fixed64` value, a signed, fixed-length 64-bit integer. + * Emit message, close with error, or close successfully, but only one + * at a time. + * Can be used to wrap a stream by using the other stream's `onNext`. */ - sfixed64(value) { - let chunk = new Uint8Array(8); - let view = new DataView(chunk.buffer); - let long = pb_long_1.PbLong.from(value); - view.setInt32(0, long.lo, true); - view.setInt32(4, long.hi, true); - return this.raw(chunk); + notifyNext(message, error, complete) { + runtime_1.assert((message ? 1 : 0) + (error ? 1 : 0) + (complete ? 1 : 0) <= 1, "only one emission at a time"); + if (message) + this.notifyMessage(message); + if (error) + this.notifyError(error); + if (complete) + this.notifyComplete(); } /** - * Write a `fixed64` value, an unsigned, fixed-length 64 bit integer. + * Emits a new message. Throws if stream is closed. + * + * Triggers onNext and onMessage callbacks. */ - fixed64(value) { - let chunk = new Uint8Array(8); - let view = new DataView(chunk.buffer); - let long = pb_long_1.PbULong.from(value); - view.setInt32(0, long.lo, true); - view.setInt32(4, long.hi, true); - return this.raw(chunk); + notifyMessage(message) { + runtime_1.assert(!this.closed, "stream is closed"); + this.pushIt({ value: message, done: false }); + this._lis.msg.forEach((l) => l(message)); + this._lis.nxt.forEach((l) => l(message, void 0, false)); } /** - * Write a `int64` value, a signed 64-bit varint. + * Closes the stream with an error. Throws if stream is closed. + * + * Triggers onNext and onError callbacks. */ - int64(value) { - let long = pb_long_1.PbLong.from(value); - goog_varint_1.varint64write(long.lo, long.hi, this.buf); - return this; + notifyError(error) { + runtime_1.assert(!this.closed, "stream is closed"); + this._closed = error; + this.pushIt(error); + this._lis.err.forEach((l) => l(error)); + this._lis.nxt.forEach((l) => l(void 0, error, false)); + this.clearLis(); } /** - * Write a `sint64` value, a signed, zig-zag-encoded 64-bit varint. + * Closes the stream successfully. Throws if stream is closed. + * + * Triggers onNext and onComplete callbacks. */ - sint64(value) { - let long = pb_long_1.PbLong.from(value), sign = long.hi >> 31, lo = long.lo << 1 ^ sign, hi = (long.hi << 1 | long.lo >>> 31) ^ sign; - goog_varint_1.varint64write(lo, hi, this.buf); - return this; + notifyComplete() { + runtime_1.assert(!this.closed, "stream is closed"); + this._closed = true; + this.pushIt({ value: null, done: true }); + this._lis.cmp.forEach((l) => l()); + this._lis.nxt.forEach((l) => l(void 0, void 0, true)); + this.clearLis(); } /** - * Write a `uint64` value, an unsigned 64-bit varint. + * Creates an async iterator (that can be used with `for await {...}`) + * to consume the stream. + * + * Some things to note: + * - If an error occurs, the `for await` will throw it. + * - If an error occurred before the `for await` was started, `for await` + * will re-throw it. + * - If the stream is already complete, the `for await` will be empty. + * - If your `for await` consumes slower than the stream produces, + * for example because you are relaying messages in a slow operation, + * messages are queued. */ - uint64(value) { - let long = pb_long_1.PbULong.from(value); - goog_varint_1.varint64write(long.lo, long.hi, this.buf); - return this; + [Symbol.asyncIterator]() { + if (!this._itState) { + this._itState = { q: [] }; + } + if (this._closed === true) + this.pushIt({ value: null, done: true }); + else if (this._closed !== false) + this.pushIt(this._closed); + return { + next: /* @__PURE__ */ __name(() => { + let state = this._itState; + runtime_1.assert(state, "bad state"); + runtime_1.assert(!state.p, "iterator contract broken"); + let first = state.q.shift(); + if (first) + return "value" in first ? Promise.resolve(first) : Promise.reject(first); + state.p = new deferred_1.Deferred(); + return state.p.promise; + }, "next") + }; + } + // "push" a new iterator result. + // this either resolves a pending promise, or enqueues the result. + pushIt(result) { + let state = this._itState; + if (!state) + return; + if (state.p) { + const p = state.p; + runtime_1.assert(p.state == deferred_1.DeferredState.PENDING, "iterator contract broken"); + "value" in result ? p.resolve(result) : p.reject(result); + delete state.p; + } else { + state.q.push(result); + } } }; - exports2.BinaryWriter = BinaryWriter; + exports2.RpcOutputStreamController = RpcOutputStreamController; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js -var require_json_format_contract = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/json-format-contract.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js +var require_unary_call = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.mergeJsonOptions = exports2.jsonWriteOptions = exports2.jsonReadOptions = void 0; - var defaultsWrite = { - emitDefaultValues: false, - enumAsInteger: false, - useProtoFieldName: false, - prettySpaces: 0 - }; - var defaultsRead = { - ignoreUnknownFields: false + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - function jsonReadOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; - } - __name(jsonReadOptions, "jsonReadOptions"); - exports2.jsonReadOptions = jsonReadOptions; - function jsonWriteOptions(options) { - return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; - } - __name(jsonWriteOptions, "jsonWriteOptions"); - exports2.jsonWriteOptions = jsonWriteOptions; - function mergeJsonOptions(a, b) { - var _a, _b; - let c = Object.assign(Object.assign({}, a), b); - c.typeRegistry = [...(_a = a === null || a === void 0 ? void 0 : a.typeRegistry) !== null && _a !== void 0 ? _a : [], ...(_b = b === null || b === void 0 ? void 0 : b.typeRegistry) !== null && _b !== void 0 ? _b : []]; - return c; - } - __name(mergeJsonOptions, "mergeJsonOptions"); - exports2.mergeJsonOptions = mergeJsonOptions; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js -var require_message_type_contract = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/message-type-contract.js"(exports2) { - "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.MESSAGE_TYPE = void 0; - exports2.MESSAGE_TYPE = Symbol.for("protobuf-ts/message-type"); + exports2.UnaryCall = void 0; + var UnaryCall = class { + static { + __name(this, "UnaryCall"); + } + constructor(method, requestHeaders, request, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.request = request; + this.headers = headers; + this.response = response; + this.status = status; + this.trailers = trailers; + } + /** + * If you are only interested in the final outcome of this call, + * you can await it to receive a `FinishedUnaryCall`. + */ + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + } + promiseFinished() { + return __awaiter3(this, void 0, void 0, function* () { + let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + request: this.request, + headers, + response, + status, + trailers + }; + }); + } + }; + exports2.UnaryCall = UnaryCall; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js -var require_lower_camel_case = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/lower-camel-case.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js +var require_server_streaming_call = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.lowerCamelCase = void 0; - function lowerCamelCase(snakeCase) { - let capNext = false; - const sb = []; - for (let i = 0; i < snakeCase.length; i++) { - let next = snakeCase.charAt(i); - if (next == "_") { - capNext = true; - } else if (/\d/.test(next)) { - sb.push(next); - capNext = true; - } else if (capNext) { - sb.push(next.toUpperCase()); - capNext = false; - } else if (i == 0) { - sb.push(next.toLowerCase()); - } else { - sb.push(next); + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ServerStreamingCall = void 0; + var ServerStreamingCall = class { + static { + __name(this, "ServerStreamingCall"); } - return sb.join(""); - } - __name(lowerCamelCase, "lowerCamelCase"); - exports2.lowerCamelCase = lowerCamelCase; + constructor(method, requestHeaders, request, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.request = request; + this.headers = headers; + this.responses = response; + this.status = status; + this.trailers = trailers; + } + /** + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * You should first setup some listeners to the `request` to + * see the actual messages the server replied with. + */ + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + } + promiseFinished() { + return __awaiter3(this, void 0, void 0, function* () { + let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + request: this.request, + headers, + status, + trailers + }; + }); + } + }; + exports2.ServerStreamingCall = ServerStreamingCall; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js -var require_reflection_info = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-info.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js +var require_client_streaming_call = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js"(exports2) { "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.readMessageOption = exports2.readFieldOption = exports2.readFieldOptions = exports2.normalizeFieldInfo = exports2.RepeatType = exports2.LongType = exports2.ScalarType = void 0; - var lower_camel_case_1 = require_lower_camel_case(); - var ScalarType; - (function(ScalarType2) { - ScalarType2[ScalarType2["DOUBLE"] = 1] = "DOUBLE"; - ScalarType2[ScalarType2["FLOAT"] = 2] = "FLOAT"; - ScalarType2[ScalarType2["INT64"] = 3] = "INT64"; - ScalarType2[ScalarType2["UINT64"] = 4] = "UINT64"; - ScalarType2[ScalarType2["INT32"] = 5] = "INT32"; - ScalarType2[ScalarType2["FIXED64"] = 6] = "FIXED64"; - ScalarType2[ScalarType2["FIXED32"] = 7] = "FIXED32"; - ScalarType2[ScalarType2["BOOL"] = 8] = "BOOL"; - ScalarType2[ScalarType2["STRING"] = 9] = "STRING"; - ScalarType2[ScalarType2["BYTES"] = 12] = "BYTES"; - ScalarType2[ScalarType2["UINT32"] = 13] = "UINT32"; - ScalarType2[ScalarType2["SFIXED32"] = 15] = "SFIXED32"; - ScalarType2[ScalarType2["SFIXED64"] = 16] = "SFIXED64"; - ScalarType2[ScalarType2["SINT32"] = 17] = "SINT32"; - ScalarType2[ScalarType2["SINT64"] = 18] = "SINT64"; - })(ScalarType = exports2.ScalarType || (exports2.ScalarType = {})); - var LongType; - (function(LongType2) { - LongType2[LongType2["BIGINT"] = 0] = "BIGINT"; - LongType2[LongType2["STRING"] = 1] = "STRING"; - LongType2[LongType2["NUMBER"] = 2] = "NUMBER"; - })(LongType = exports2.LongType || (exports2.LongType = {})); - var RepeatType; - (function(RepeatType2) { - RepeatType2[RepeatType2["NO"] = 0] = "NO"; - RepeatType2[RepeatType2["PACKED"] = 1] = "PACKED"; - RepeatType2[RepeatType2["UNPACKED"] = 2] = "UNPACKED"; - })(RepeatType = exports2.RepeatType || (exports2.RepeatType = {})); - function normalizeFieldInfo(field) { - var _a, _b, _c, _d; - field.localName = (_a = field.localName) !== null && _a !== void 0 ? _a : lower_camel_case_1.lowerCamelCase(field.name); - field.jsonName = (_b = field.jsonName) !== null && _b !== void 0 ? _b : lower_camel_case_1.lowerCamelCase(field.name); - field.repeat = (_c = field.repeat) !== null && _c !== void 0 ? _c : RepeatType.NO; - field.opt = (_d = field.opt) !== null && _d !== void 0 ? _d : field.repeat ? false : field.oneof ? false : field.kind == "message"; - return field; - } - __name(normalizeFieldInfo, "normalizeFieldInfo"); - exports2.normalizeFieldInfo = normalizeFieldInfo; - function readFieldOptions(messageType, fieldName, extensionName, extensionType) { - var _a; - const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; - return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; - } - __name(readFieldOptions, "readFieldOptions"); - exports2.readFieldOptions = readFieldOptions; - function readFieldOption(messageType, fieldName, extensionName, extensionType) { - var _a; - const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; - if (!options) { - return void 0; + exports2.ClientStreamingCall = void 0; + var ClientStreamingCall = class { + static { + __name(this, "ClientStreamingCall"); } - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; + constructor(method, requestHeaders, request, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.requests = request; + this.headers = headers; + this.response = response; + this.status = status; + this.trailers = trailers; } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; - } - __name(readFieldOption, "readFieldOption"); - exports2.readFieldOption = readFieldOption; - function readMessageOption(messageType, extensionName, extensionType) { - const options = messageType.options; - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; + /** + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * Note that it may still be valid to send more request messages. + */ + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; - } - __name(readMessageOption, "readMessageOption"); - exports2.readMessageOption = readMessageOption; + promiseFinished() { + return __awaiter3(this, void 0, void 0, function* () { + let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + headers, + response, + status, + trailers + }; + }); + } + }; + exports2.ClientStreamingCall = ClientStreamingCall; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js -var require_oneof = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/oneof.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js +var require_duplex_streaming_call = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.getSelectedOneofValue = exports2.clearOneofValue = exports2.setUnknownOneofValue = exports2.setOneofValue = exports2.getOneofValue = exports2.isOneofGroup = void 0; - function isOneofGroup(any) { - if (typeof any != "object" || any === null || !any.hasOwnProperty("oneofKind")) { - return false; - } - switch (typeof any.oneofKind) { - case "string": - if (any[any.oneofKind] === void 0) - return false; - return Object.keys(any).length == 2; - case "undefined": - return Object.keys(any).length == 1; - default: - return false; - } - } - __name(isOneofGroup, "isOneofGroup"); - exports2.isOneofGroup = isOneofGroup; - function getOneofValue(oneof, kind) { - return oneof[kind]; - } - __name(getOneofValue, "getOneofValue"); - exports2.getOneofValue = getOneofValue; - function setOneofValue(oneof, kind, value) { - if (oneof.oneofKind !== void 0) { - delete oneof[oneof.oneofKind]; - } - oneof.oneofKind = kind; - if (value !== void 0) { - oneof[kind] = value; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); } - } - __name(setOneofValue, "setOneofValue"); - exports2.setOneofValue = setOneofValue; - function setUnknownOneofValue(oneof, kind, value) { - if (oneof.oneofKind !== void 0) { - delete oneof[oneof.oneofKind]; + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.DuplexStreamingCall = void 0; + var DuplexStreamingCall = class { + static { + __name(this, "DuplexStreamingCall"); } - oneof.oneofKind = kind; - if (value !== void 0 && kind !== void 0) { - oneof[kind] = value; + constructor(method, requestHeaders, request, headers, response, status, trailers) { + this.method = method; + this.requestHeaders = requestHeaders; + this.requests = request; + this.headers = headers; + this.responses = response; + this.status = status; + this.trailers = trailers; } - } - __name(setUnknownOneofValue, "setUnknownOneofValue"); - exports2.setUnknownOneofValue = setUnknownOneofValue; - function clearOneofValue(oneof) { - if (oneof.oneofKind !== void 0) { - delete oneof[oneof.oneofKind]; + /** + * Instead of awaiting the response status and trailers, you can + * just as well await this call itself to receive the server outcome. + * Note that it may still be valid to send more request messages. + */ + then(onfulfilled, onrejected) { + return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } - oneof.oneofKind = void 0; - } - __name(clearOneofValue, "clearOneofValue"); - exports2.clearOneofValue = clearOneofValue; - function getSelectedOneofValue(oneof) { - if (oneof.oneofKind === void 0) { - return void 0; + promiseFinished() { + return __awaiter3(this, void 0, void 0, function* () { + let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); + return { + method: this.method, + requestHeaders: this.requestHeaders, + headers, + status, + trailers + }; + }); } - return oneof[oneof.oneofKind]; - } - __name(getSelectedOneofValue, "getSelectedOneofValue"); - exports2.getSelectedOneofValue = getSelectedOneofValue; + }; + exports2.DuplexStreamingCall = DuplexStreamingCall; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js -var require_reflection_type_check = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-type-check.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js +var require_test_transport = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js"(exports2) { "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionTypeCheck = void 0; - var reflection_info_1 = require_reflection_info(); - var oneof_1 = require_oneof(); - var ReflectionTypeCheck = class { + exports2.TestTransport = void 0; + var rpc_error_1 = require_rpc_error(); + var runtime_1 = require_commonjs7(); + var rpc_output_stream_1 = require_rpc_output_stream(); + var rpc_options_1 = require_rpc_options(); + var unary_call_1 = require_unary_call(); + var server_streaming_call_1 = require_server_streaming_call(); + var client_streaming_call_1 = require_client_streaming_call(); + var duplex_streaming_call_1 = require_duplex_streaming_call(); + var TestTransport = class _TestTransport { static { - __name(this, "ReflectionTypeCheck"); + __name(this, "TestTransport"); } - constructor(info) { - var _a; - this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; + /** + * Initialize with mock data. Omitted fields have default value. + */ + constructor(data) { + this.suppressUncaughtRejections = true; + this.headerDelay = 10; + this.responseDelay = 50; + this.betweenResponseDelay = 10; + this.afterResponseDelay = 10; + this.data = data !== null && data !== void 0 ? data : {}; } - prepare() { - if (this.data) - return; - const req = [], known = [], oneofs = []; - for (let field of this.fields) { - if (field.oneof) { - if (!oneofs.includes(field.oneof)) { - oneofs.push(field.oneof); - req.push(field.oneof); - known.push(field.oneof); - } - } else { - known.push(field.localName); - switch (field.kind) { - case "scalar": - case "enum": - if (!field.opt || field.repeat) - req.push(field.localName); - break; - case "message": - if (field.repeat) - req.push(field.localName); - break; - case "map": - req.push(field.localName); - break; - } - } + /** + * Sent message(s) during the last operation. + */ + get sentMessages() { + if (this.lastInput instanceof TestInputStream) { + return this.lastInput.sent; + } else if (typeof this.lastInput == "object") { + return [this.lastInput.single]; } - this.data = { req, known, oneofs: Object.values(oneofs) }; + return []; } /** - * Is the argument a valid message as specified by the - * reflection information? - * - * Checks all field types recursively. The `depth` - * specifies how deep into the structure the check will be. - * - * With a depth of 0, only the presence of fields - * is checked. - * - * With a depth of 1 or more, the field types are checked. - * - * With a depth of 2 or more, the members of map, repeated - * and message fields are checked. - * - * Message fields will be checked recursively with depth - 1. - * - * The number of map entries / repeated values being checked - * is < depth. + * Sending message(s) completed? */ - is(message, depth, allowExcessProperties = false) { - if (depth < 0) - return true; - if (message === null || message === void 0 || typeof message != "object") - return false; - this.prepare(); - let keys = Object.keys(message), data = this.data; - if (keys.length < data.req.length || data.req.some((n) => !keys.includes(n))) - return false; - if (!allowExcessProperties) { - if (keys.some((k) => !data.known.includes(k))) - return false; - } - if (depth < 1) { + get sendComplete() { + if (this.lastInput instanceof TestInputStream) { + return this.lastInput.completed; + } else if (typeof this.lastInput == "object") { return true; } - for (const name of data.oneofs) { - const group = message[name]; - if (!oneof_1.isOneofGroup(group)) - return false; - if (group.oneofKind === void 0) - continue; - const field = this.fields.find((f) => f.localName === group.oneofKind); - if (!field) - return false; - if (!this.field(group[group.oneofKind], field, allowExcessProperties, depth)) - return false; + return false; + } + // Creates a promise for response headers from the mock data. + promiseHeaders() { + var _a; + const headers = (_a = this.data.headers) !== null && _a !== void 0 ? _a : _TestTransport.defaultHeaders; + return headers instanceof rpc_error_1.RpcError ? Promise.reject(headers) : Promise.resolve(headers); + } + // Creates a promise for a single, valid, message from the mock data. + promiseSingleResponse(method) { + if (this.data.response instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.response); } - for (const field of this.fields) { - if (field.oneof !== void 0) - continue; - if (!this.field(message[field.localName], field, allowExcessProperties, depth)) - return false; + let r; + if (Array.isArray(this.data.response)) { + runtime_1.assert(this.data.response.length > 0); + r = this.data.response[0]; + } else if (this.data.response !== void 0) { + r = this.data.response; + } else { + r = method.O.create(); } - return true; + runtime_1.assert(method.O.is(r)); + return Promise.resolve(r); } - field(arg, field, allowExcessProperties, depth) { - let repeated = field.repeat; - switch (field.kind) { - case "scalar": - if (arg === void 0) - return field.opt; - if (repeated) - return this.scalars(arg, field.T, depth, field.L); - return this.scalar(arg, field.T, field.L); - case "enum": - if (arg === void 0) - return field.opt; - if (repeated) - return this.scalars(arg, reflection_info_1.ScalarType.INT32, depth); - return this.scalar(arg, reflection_info_1.ScalarType.INT32); - case "message": - if (arg === void 0) - return true; - if (repeated) - return this.messages(arg, field.T(), allowExcessProperties, depth); - return this.message(arg, field.T(), allowExcessProperties, depth); - case "map": - if (typeof arg != "object" || arg === null) - return false; - if (depth < 2) - return true; - if (!this.mapKeys(arg, field.K, depth)) - return false; - switch (field.V.kind) { - case "scalar": - return this.scalars(Object.values(arg), field.V.T, depth, field.V.L); - case "enum": - return this.scalars(Object.values(arg), reflection_info_1.ScalarType.INT32, depth); - case "message": - return this.messages(Object.values(arg), field.V.T(), allowExcessProperties, depth); + /** + * Pushes response messages from the mock data to the output stream. + * If an error response, status or trailers are mocked, the stream is + * closed with the respective error. + * Otherwise, stream is completed successfully. + * + * The returned promise resolves when the stream is closed. It should + * not reject. If it does, code is broken. + */ + streamResponses(method, stream, abort) { + return __awaiter3(this, void 0, void 0, function* () { + const messages = []; + if (this.data.response === void 0) { + messages.push(method.O.create()); + } else if (Array.isArray(this.data.response)) { + for (let msg of this.data.response) { + runtime_1.assert(method.O.is(msg)); + messages.push(msg); } - break; - } - return true; + } else if (!(this.data.response instanceof rpc_error_1.RpcError)) { + runtime_1.assert(method.O.is(this.data.response)); + messages.push(this.data.response); + } + try { + yield delay(this.responseDelay, abort)(void 0); + } catch (error) { + stream.notifyError(error); + return; + } + if (this.data.response instanceof rpc_error_1.RpcError) { + stream.notifyError(this.data.response); + return; + } + for (let msg of messages) { + stream.notifyMessage(msg); + try { + yield delay(this.betweenResponseDelay, abort)(void 0); + } catch (error) { + stream.notifyError(error); + return; + } + } + if (this.data.status instanceof rpc_error_1.RpcError) { + stream.notifyError(this.data.status); + return; + } + if (this.data.trailers instanceof rpc_error_1.RpcError) { + stream.notifyError(this.data.trailers); + return; + } + stream.notifyComplete(); + }); } - message(arg, type, allowExcessProperties, depth) { - if (allowExcessProperties) { - return type.isAssignable(arg, depth); + // Creates a promise for response status from the mock data. + promiseStatus() { + var _a; + const status = (_a = this.data.status) !== null && _a !== void 0 ? _a : _TestTransport.defaultStatus; + return status instanceof rpc_error_1.RpcError ? Promise.reject(status) : Promise.resolve(status); + } + // Creates a promise for response trailers from the mock data. + promiseTrailers() { + var _a; + const trailers = (_a = this.data.trailers) !== null && _a !== void 0 ? _a : _TestTransport.defaultTrailers; + return trailers instanceof rpc_error_1.RpcError ? Promise.reject(trailers) : Promise.resolve(trailers); + } + maybeSuppressUncaught(...promise) { + if (this.suppressUncaughtRejections) { + for (let p of promise) { + p.catch(() => { + }); + } } - return type.is(arg, depth); } - messages(arg, type, allowExcessProperties, depth) { - if (!Array.isArray(arg)) - return false; - if (depth < 2) - return true; - if (allowExcessProperties) { - for (let i = 0; i < arg.length && i < depth; i++) - if (!type.isAssignable(arg[i], depth - 1)) - return false; + mergeOptions(options) { + return rpc_options_1.mergeRpcOptions({}, options); + } + unary(method, input, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_2) => { + }).then(delay(this.responseDelay, options.abort)).then((_2) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_2) => { + }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseStatus()), trailersPromise = responsePromise.catch((_2) => { + }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = { single: input }; + return new unary_call_1.UnaryCall(method, requestHeaders, input, headersPromise, responsePromise, statusPromise, trailersPromise); + } + serverStreaming(method, input, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay(this.responseDelay, options.abort)).catch(() => { + }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = { single: input }; + return new server_streaming_call_1.ServerStreamingCall(method, requestHeaders, input, headersPromise, outputStream, statusPromise, trailersPromise); + } + clientStreaming(method, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_2) => { + }).then(delay(this.responseDelay, options.abort)).then((_2) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_2) => { + }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseStatus()), trailersPromise = responsePromise.catch((_2) => { + }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = new TestInputStream(this.data, options.abort); + return new client_streaming_call_1.ClientStreamingCall(method, requestHeaders, this.lastInput, headersPromise, responsePromise, statusPromise, trailersPromise); + } + duplex(method, options) { + var _a; + const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay(this.responseDelay, options.abort)).catch(() => { + }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); + this.maybeSuppressUncaught(statusPromise, trailersPromise); + this.lastInput = new TestInputStream(this.data, options.abort); + return new duplex_streaming_call_1.DuplexStreamingCall(method, requestHeaders, this.lastInput, headersPromise, outputStream, statusPromise, trailersPromise); + } + }; + exports2.TestTransport = TestTransport; + TestTransport.defaultHeaders = { + responseHeader: "test" + }; + TestTransport.defaultStatus = { + code: "OK", + detail: "all good" + }; + TestTransport.defaultTrailers = { + responseTrailer: "test" + }; + function delay(ms, abort) { + return (v) => new Promise((resolve, reject) => { + if (abort === null || abort === void 0 ? void 0 : abort.aborted) { + reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); } else { - for (let i = 0; i < arg.length && i < depth; i++) - if (!type.is(arg[i], depth - 1)) - return false; + const id = setTimeout(() => resolve(v), ms); + if (abort) { + abort.addEventListener("abort", (ev) => { + clearTimeout(id); + reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); + }); + } } - return true; + }); + } + __name(delay, "delay"); + var TestInputStream = class { + static { + __name(this, "TestInputStream"); } - scalar(arg, type, longType) { - let argType = typeof arg; - switch (type) { - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - switch (longType) { - case reflection_info_1.LongType.BIGINT: - return argType == "bigint"; - case reflection_info_1.LongType.NUMBER: - return argType == "number" && !isNaN(arg); - default: - return argType == "string"; - } - case reflection_info_1.ScalarType.BOOL: - return argType == "boolean"; - case reflection_info_1.ScalarType.STRING: - return argType == "string"; - case reflection_info_1.ScalarType.BYTES: - return arg instanceof Uint8Array; - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - return argType == "number" && !isNaN(arg); - default: - return argType == "number" && Number.isInteger(arg); - } + constructor(data, abort) { + this._completed = false; + this._sent = []; + this.data = data; + this.abort = abort; } - scalars(arg, type, depth, longType) { - if (!Array.isArray(arg)) - return false; - if (depth < 2) - return true; - if (Array.isArray(arg)) { - for (let i = 0; i < arg.length && i < depth; i++) - if (!this.scalar(arg[i], type, longType)) - return false; + get sent() { + return this._sent; + } + get completed() { + return this._completed; + } + send(message) { + if (this.data.inputMessage instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.inputMessage); } - return true; + const delayMs = this.data.inputMessage === void 0 ? 10 : this.data.inputMessage; + return Promise.resolve(void 0).then(() => { + this._sent.push(message); + }).then(delay(delayMs, this.abort)); } - mapKeys(map, type, depth) { - let keys = Object.keys(map); - switch (type) { - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - case reflection_info_1.ScalarType.UINT32: - return this.scalars(keys.slice(0, depth).map((k) => parseInt(k)), type, depth); - case reflection_info_1.ScalarType.BOOL: - return this.scalars(keys.slice(0, depth).map((k) => k == "true" ? true : k == "false" ? false : k), type, depth); - default: - return this.scalars(keys, type, depth, reflection_info_1.LongType.STRING); + complete() { + if (this.data.inputComplete instanceof rpc_error_1.RpcError) { + return Promise.reject(this.data.inputComplete); } + const delayMs = this.data.inputComplete === void 0 ? 10 : this.data.inputComplete; + return Promise.resolve(void 0).then(() => { + this._completed = true; + }).then(delay(delayMs, this.abort)); } }; - exports2.ReflectionTypeCheck = ReflectionTypeCheck; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js -var require_reflection_long_convert = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-long-convert.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js +var require_rpc_interceptor = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionLongConvert = void 0; - var reflection_info_1 = require_reflection_info(); - function reflectionLongConvert(long, type) { - switch (type) { - case reflection_info_1.LongType.BIGINT: - return long.toBigInt(); - case reflection_info_1.LongType.NUMBER: - return long.toNumber(); - default: - return long.toString(); + exports2.stackDuplexStreamingInterceptors = exports2.stackClientStreamingInterceptors = exports2.stackServerStreamingInterceptors = exports2.stackUnaryInterceptors = exports2.stackIntercept = void 0; + var runtime_1 = require_commonjs7(); + function stackIntercept(kind, transport, method, options, input) { + var _a, _b, _c, _d; + if (kind == "unary") { + let tail = /* @__PURE__ */ __name((mtd, inp, opt) => transport.unary(mtd, inp, opt), "tail"); + for (const curr of ((_a = options.interceptors) !== null && _a !== void 0 ? _a : []).filter((i) => i.interceptUnary).reverse()) { + const next = tail; + tail = /* @__PURE__ */ __name((mtd, inp, opt) => curr.interceptUnary(next, mtd, inp, opt), "tail"); + } + return tail(method, input, options); + } + if (kind == "serverStreaming") { + let tail = /* @__PURE__ */ __name((mtd, inp, opt) => transport.serverStreaming(mtd, inp, opt), "tail"); + for (const curr of ((_b = options.interceptors) !== null && _b !== void 0 ? _b : []).filter((i) => i.interceptServerStreaming).reverse()) { + const next = tail; + tail = /* @__PURE__ */ __name((mtd, inp, opt) => curr.interceptServerStreaming(next, mtd, inp, opt), "tail"); + } + return tail(method, input, options); + } + if (kind == "clientStreaming") { + let tail = /* @__PURE__ */ __name((mtd, opt) => transport.clientStreaming(mtd, opt), "tail"); + for (const curr of ((_c = options.interceptors) !== null && _c !== void 0 ? _c : []).filter((i) => i.interceptClientStreaming).reverse()) { + const next = tail; + tail = /* @__PURE__ */ __name((mtd, opt) => curr.interceptClientStreaming(next, mtd, opt), "tail"); + } + return tail(method, options); + } + if (kind == "duplex") { + let tail = /* @__PURE__ */ __name((mtd, opt) => transport.duplex(mtd, opt), "tail"); + for (const curr of ((_d = options.interceptors) !== null && _d !== void 0 ? _d : []).filter((i) => i.interceptDuplex).reverse()) { + const next = tail; + tail = /* @__PURE__ */ __name((mtd, opt) => curr.interceptDuplex(next, mtd, opt), "tail"); + } + return tail(method, options); } + runtime_1.assertNever(kind); } - __name(reflectionLongConvert, "reflectionLongConvert"); - exports2.reflectionLongConvert = reflectionLongConvert; + __name(stackIntercept, "stackIntercept"); + exports2.stackIntercept = stackIntercept; + function stackUnaryInterceptors(transport, method, input, options) { + return stackIntercept("unary", transport, method, options, input); + } + __name(stackUnaryInterceptors, "stackUnaryInterceptors"); + exports2.stackUnaryInterceptors = stackUnaryInterceptors; + function stackServerStreamingInterceptors(transport, method, input, options) { + return stackIntercept("serverStreaming", transport, method, options, input); + } + __name(stackServerStreamingInterceptors, "stackServerStreamingInterceptors"); + exports2.stackServerStreamingInterceptors = stackServerStreamingInterceptors; + function stackClientStreamingInterceptors(transport, method, options) { + return stackIntercept("clientStreaming", transport, method, options); + } + __name(stackClientStreamingInterceptors, "stackClientStreamingInterceptors"); + exports2.stackClientStreamingInterceptors = stackClientStreamingInterceptors; + function stackDuplexStreamingInterceptors(transport, method, options) { + return stackIntercept("duplex", transport, method, options); + } + __name(stackDuplexStreamingInterceptors, "stackDuplexStreamingInterceptors"); + exports2.stackDuplexStreamingInterceptors = stackDuplexStreamingInterceptors; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js -var require_reflection_json_reader = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-reader.js"(exports2) { +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js +var require_server_call_context = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionJsonReader = void 0; - var json_typings_1 = require_json_typings(); - var base64_1 = require_base642(); - var reflection_info_1 = require_reflection_info(); - var pb_long_1 = require_pb_long(); - var assert_1 = require_assert(); - var reflection_long_convert_1 = require_reflection_long_convert(); - var ReflectionJsonReader = class { + exports2.ServerCallContextController = void 0; + var ServerCallContextController = class { static { - __name(this, "ReflectionJsonReader"); + __name(this, "ServerCallContextController"); } - constructor(info) { - this.info = info; + constructor(method, headers, deadline, sendResponseHeadersFn, defaultStatus = { code: "OK", detail: "" }) { + this._cancelled = false; + this._listeners = []; + this.method = method; + this.headers = headers; + this.deadline = deadline; + this.trailers = {}; + this._sendRH = sendResponseHeadersFn; + this.status = defaultStatus; } - prepare() { - var _a; - if (this.fMap === void 0) { - this.fMap = {}; - const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; - for (const field of fieldsInput) { - this.fMap[field.name] = field; - this.fMap[field.jsonName] = field; - this.fMap[field.localName] = field; + /** + * Set the call cancelled. + * + * Invokes all callbacks registered with onCancel() and + * sets `cancelled = true`. + */ + notifyCancelled() { + if (!this._cancelled) { + this._cancelled = true; + for (let l of this._listeners) { + l(); } } } - // Cannot parse JSON for #. - assert(condition, fieldName, jsonValue) { - if (!condition) { - let what = json_typings_1.typeofJsonValue(jsonValue); - if (what == "number" || what == "boolean") - what = jsonValue.toString(); - throw new Error(`Cannot parse JSON ${what} for ${this.info.typeName}#${fieldName}`); - } + /** + * Send response headers. + */ + sendResponseHeaders(data) { + this._sendRH(data); } /** - * Reads a message from canonical JSON format into the target message. + * Is the call cancelled? * - * Repeated fields are appended. Map entries are added, overwriting - * existing keys. + * When the client closes the connection before the server + * is done, the call is cancelled. * - * If a message field is already present, it will be merged with the - * new data. + * If you want to cancel a request on the server, throw a + * RpcError with the CANCELLED status code. */ - read(input, message, options) { - this.prepare(); - const oneofsHandled = []; - for (const [jsonKey, jsonValue] of Object.entries(input)) { - const field = this.fMap[jsonKey]; - if (!field) { - if (!options.ignoreUnknownFields) - throw new Error(`Found unknown field while reading ${this.info.typeName} from JSON format. JSON key: ${jsonKey}`); - continue; - } - const localName = field.localName; - let target; - if (field.oneof) { - if (jsonValue === null && (field.kind !== "enum" || field.T()[0] !== "google.protobuf.NullValue")) { - continue; - } - if (oneofsHandled.includes(field.oneof)) - throw new Error(`Multiple members of the oneof group "${field.oneof}" of ${this.info.typeName} are present in JSON.`); - oneofsHandled.push(field.oneof); - target = message[field.oneof] = { - oneofKind: localName - }; - } else { - target = message; - } - if (field.kind == "map") { - if (jsonValue === null) { - continue; - } - this.assert(json_typings_1.isJsonObject(jsonValue), field.name, jsonValue); - const fieldObj = target[localName]; - for (const [jsonObjKey, jsonObjValue] of Object.entries(jsonValue)) { - this.assert(jsonObjValue !== null, field.name + " map value", null); - let val; - switch (field.V.kind) { - case "message": - val = field.V.T().internalJsonRead(jsonObjValue, options); - break; - case "enum": - val = this.enum(field.V.T(), jsonObjValue, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - break; - case "scalar": - val = this.scalar(jsonObjValue, field.V.T, field.V.L, field.name); - break; - } - this.assert(val !== void 0, field.name + " map value", jsonObjValue); - let key = jsonObjKey; - if (field.K == reflection_info_1.ScalarType.BOOL) - key = key == "true" ? true : key == "false" ? false : key; - key = this.scalar(key, field.K, reflection_info_1.LongType.STRING, field.name).toString(); - fieldObj[key] = val; - } - } else if (field.repeat) { - if (jsonValue === null) - continue; - this.assert(Array.isArray(jsonValue), field.name, jsonValue); - const fieldArr = target[localName]; - for (const jsonItem of jsonValue) { - this.assert(jsonItem !== null, field.name, null); - let val; - switch (field.kind) { - case "message": - val = field.T().internalJsonRead(jsonItem, options); - break; - case "enum": - val = this.enum(field.T(), jsonItem, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - break; - case "scalar": - val = this.scalar(jsonItem, field.T, field.L, field.name); - break; - } - this.assert(val !== void 0, field.name, jsonValue); - fieldArr.push(val); - } - } else { - switch (field.kind) { - case "message": - if (jsonValue === null && field.T().typeName != "google.protobuf.Value") { - this.assert(field.oneof === void 0, field.name + " (oneof member)", null); - continue; - } - target[localName] = field.T().internalJsonRead(jsonValue, options, target[localName]); - break; - case "enum": - let val = this.enum(field.T(), jsonValue, field.name, options.ignoreUnknownFields); - if (val === false) - continue; - target[localName] = val; - break; - case "scalar": - target[localName] = this.scalar(jsonValue, field.T, field.L, field.name); - break; - } + get cancelled() { + return this._cancelled; + } + /** + * Add a callback for cancellation. + */ + onCancel(callback) { + const l = this._listeners; + l.push(callback); + return () => { + let i = l.indexOf(callback); + if (i >= 0) + l.splice(i, 1); + }; + } + }; + exports2.ServerCallContextController = ServerCallContextController; + } +}); + +// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js +var require_commonjs8 = __commonJS({ + "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + var service_type_1 = require_service_type(); + Object.defineProperty(exports2, "ServiceType", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return service_type_1.ServiceType; + }, "get") }); + var reflection_info_1 = require_reflection_info2(); + Object.defineProperty(exports2, "readMethodOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.readMethodOptions; + }, "get") }); + Object.defineProperty(exports2, "readMethodOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.readMethodOption; + }, "get") }); + Object.defineProperty(exports2, "readServiceOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return reflection_info_1.readServiceOption; + }, "get") }); + var rpc_error_1 = require_rpc_error(); + Object.defineProperty(exports2, "RpcError", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_error_1.RpcError; + }, "get") }); + var rpc_options_1 = require_rpc_options(); + Object.defineProperty(exports2, "mergeRpcOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_options_1.mergeRpcOptions; + }, "get") }); + var rpc_output_stream_1 = require_rpc_output_stream(); + Object.defineProperty(exports2, "RpcOutputStreamController", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_output_stream_1.RpcOutputStreamController; + }, "get") }); + var test_transport_1 = require_test_transport(); + Object.defineProperty(exports2, "TestTransport", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return test_transport_1.TestTransport; + }, "get") }); + var deferred_1 = require_deferred(); + Object.defineProperty(exports2, "Deferred", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return deferred_1.Deferred; + }, "get") }); + Object.defineProperty(exports2, "DeferredState", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return deferred_1.DeferredState; + }, "get") }); + var duplex_streaming_call_1 = require_duplex_streaming_call(); + Object.defineProperty(exports2, "DuplexStreamingCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return duplex_streaming_call_1.DuplexStreamingCall; + }, "get") }); + var client_streaming_call_1 = require_client_streaming_call(); + Object.defineProperty(exports2, "ClientStreamingCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return client_streaming_call_1.ClientStreamingCall; + }, "get") }); + var server_streaming_call_1 = require_server_streaming_call(); + Object.defineProperty(exports2, "ServerStreamingCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return server_streaming_call_1.ServerStreamingCall; + }, "get") }); + var unary_call_1 = require_unary_call(); + Object.defineProperty(exports2, "UnaryCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return unary_call_1.UnaryCall; + }, "get") }); + var rpc_interceptor_1 = require_rpc_interceptor(); + Object.defineProperty(exports2, "stackIntercept", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_interceptor_1.stackIntercept; + }, "get") }); + Object.defineProperty(exports2, "stackDuplexStreamingInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_interceptor_1.stackDuplexStreamingInterceptors; + }, "get") }); + Object.defineProperty(exports2, "stackClientStreamingInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_interceptor_1.stackClientStreamingInterceptors; + }, "get") }); + Object.defineProperty(exports2, "stackServerStreamingInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_interceptor_1.stackServerStreamingInterceptors; + }, "get") }); + Object.defineProperty(exports2, "stackUnaryInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return rpc_interceptor_1.stackUnaryInterceptors; + }, "get") }); + var server_call_context_1 = require_server_call_context(); + Object.defineProperty(exports2, "ServerCallContextController", { enumerable: true, get: /* @__PURE__ */ __name(function() { + return server_call_context_1.ServerCallContextController; + }, "get") }); + } +}); + +// ../node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js +var require_timestamp = __commonJS({ + "../node_modules/@actions/cache/lib/generated/google/protobuf/timestamp.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.Timestamp = void 0; + var runtime_1 = require_commonjs7(); + var runtime_2 = require_commonjs7(); + var runtime_3 = require_commonjs7(); + var runtime_4 = require_commonjs7(); + var runtime_5 = require_commonjs7(); + var runtime_6 = require_commonjs7(); + var runtime_7 = require_commonjs7(); + var Timestamp$Type = class extends runtime_7.MessageType { + static { + __name(this, "Timestamp$Type"); + } + constructor() { + super("google.protobuf.Timestamp", [ + { + no: 1, + name: "seconds", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { + no: 2, + name: "nanos", + kind: "scalar", + T: 5 + /*ScalarType.INT32*/ } - } + ]); } /** - * Returns `false` for unrecognized string representations. - * - * google.protobuf.NullValue accepts only JSON `null` (or the old `"NULL_VALUE"`). + * Creates a new `Timestamp` for the current time. */ - enum(type, json, fieldName, ignoreUnknownFields) { - if (type[0] == "google.protobuf.NullValue") - assert_1.assert(json === null || json === "NULL_VALUE", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} only accepts null.`); - if (json === null) - return 0; - switch (typeof json) { - case "number": - assert_1.assert(Number.isInteger(json), `Unable to parse field ${this.info.typeName}#${fieldName}, enum can only be integral number, got ${json}.`); - return json; - case "string": - let localEnumName = json; - if (type[2] && json.substring(0, type[2].length) === type[2]) - localEnumName = json.substring(type[2].length); - let enumNumber = type[1][localEnumName]; - if (typeof enumNumber === "undefined" && ignoreUnknownFields) { - return false; - } - assert_1.assert(typeof enumNumber == "number", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} has no value for "${json}".`); - return enumNumber; + now() { + const msg = this.create(); + const ms = Date.now(); + msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1e3)).toString(); + msg.nanos = ms % 1e3 * 1e6; + return msg; + } + /** + * Converts a `Timestamp` to a JavaScript Date. + */ + toDate(message) { + return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1e3 + Math.ceil(message.nanos / 1e6)); + } + /** + * Converts a JavaScript Date to a `Timestamp`. + */ + fromDate(date) { + const msg = this.create(); + const ms = date.getTime(); + msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1e3)).toString(); + msg.nanos = ms % 1e3 * 1e6; + return msg; + } + /** + * In JSON format, the `Timestamp` type is encoded as a string + * in the RFC 3339 format. + */ + internalJsonWrite(message, options) { + let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1e3; + if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) + throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); + if (message.nanos < 0) + throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative."); + let z = "Z"; + if (message.nanos > 0) { + let nanosStr = (message.nanos + 1e9).toString().substring(1); + if (nanosStr.substring(3) === "000000") + z = "." + nanosStr.substring(0, 3) + "Z"; + else if (nanosStr.substring(6) === "000") + z = "." + nanosStr.substring(0, 6) + "Z"; + else + z = "." + nanosStr + "Z"; } - assert_1.assert(false, `Unable to parse field ${this.info.typeName}#${fieldName}, cannot parse enum value from ${typeof json}".`); + return new Date(ms).toISOString().replace(".000Z", z); } - scalar(json, type, longType, fieldName) { - let e; - try { - switch (type) { - // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". - // Either numbers or strings are accepted. Exponent notation is also accepted. - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - if (json === null) - return 0; - if (json === "NaN") - return Number.NaN; - if (json === "Infinity") - return Number.POSITIVE_INFINITY; - if (json === "-Infinity") - return Number.NEGATIVE_INFINITY; - if (json === "") { - e = "empty string"; - break; - } - if (typeof json == "string" && json.trim().length !== json.length) { - e = "extra whitespace"; - break; - } - if (typeof json != "string" && typeof json != "number") { - break; - } - let float = Number(json); - if (Number.isNaN(float)) { - e = "not a number"; - break; - } - if (!Number.isFinite(float)) { - e = "too large or small"; - break; - } - if (type == reflection_info_1.ScalarType.FLOAT) - assert_1.assertFloat32(float); - return float; - // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - case reflection_info_1.ScalarType.UINT32: - if (json === null) - return 0; - let int32; - if (typeof json == "number") - int32 = json; - else if (json === "") - e = "empty string"; - else if (typeof json == "string") { - if (json.trim().length !== json.length) - e = "extra whitespace"; - else - int32 = Number(json); - } - if (int32 === void 0) - break; - if (type == reflection_info_1.ScalarType.UINT32) - assert_1.assertUInt32(int32); - else - assert_1.assertInt32(int32); - return int32; - // int64, fixed64, uint64: JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - if (json === null) - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); - if (typeof json != "number" && typeof json != "string") - break; - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.from(json), longType); - case reflection_info_1.ScalarType.FIXED64: - case reflection_info_1.ScalarType.UINT64: - if (json === null) - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); - if (typeof json != "number" && typeof json != "string") - break; - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.from(json), longType); - // bool: - case reflection_info_1.ScalarType.BOOL: - if (json === null) - return false; - if (typeof json !== "boolean") - break; - return json; - // string: - case reflection_info_1.ScalarType.STRING: - if (json === null) - return ""; - if (typeof json !== "string") { - e = "extra whitespace"; - break; - } - try { - encodeURIComponent(json); - } catch (e2) { - e2 = "invalid UTF8"; - break; - } - return json; - // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. - // Either standard or URL-safe base64 encoding with/without paddings are accepted. - case reflection_info_1.ScalarType.BYTES: - if (json === null || json === "") - return new Uint8Array(0); - if (typeof json !== "string") - break; - return base64_1.base64decode(json); + /** + * In JSON format, the `Timestamp` type is encoded as a string + * in the RFC 3339 format. + */ + internalJsonRead(json, options, target) { + if (typeof json !== "string") + throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + "."); + let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/); + if (!matches) + throw new Error("Unable to parse Timestamp from JSON. Invalid format."); + let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z")); + if (Number.isNaN(ms)) + throw new Error("Unable to parse Timestamp from JSON. Invalid value."); + if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) + throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); + if (!target) + target = this.create(); + target.seconds = runtime_6.PbLong.from(ms / 1e3).toString(); + target.nanos = 0; + if (matches[7]) + target.nanos = parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1e9; + return target; + } + create(value) { + const message = { seconds: "0", nanos: 0 }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 seconds */ + 1: + message.seconds = reader.int64().toString(); + break; + case /* int32 nanos */ + 2: + message.nanos = reader.int32(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } - } catch (error) { - e = error.message; } - this.assert(false, fieldName + (e ? " - " + e : ""), json); + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.seconds !== "0") + writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds); + if (message.nanos !== 0) + writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } }; - exports2.ReflectionJsonReader = ReflectionJsonReader; + exports2.Timestamp = new Timestamp$Type(); } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js -var require_reflection_json_writer = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-json-writer.js"(exports2) { +// ../node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js +var require_cacheentry = __commonJS({ + "../node_modules/@actions/cache/lib/generated/results/entities/v1/cacheentry.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionJsonWriter = void 0; - var base64_1 = require_base642(); - var pb_long_1 = require_pb_long(); - var reflection_info_1 = require_reflection_info(); - var assert_1 = require_assert(); - var ReflectionJsonWriter = class { + exports2.CacheEntry = void 0; + var runtime_1 = require_commonjs7(); + var runtime_2 = require_commonjs7(); + var runtime_3 = require_commonjs7(); + var runtime_4 = require_commonjs7(); + var runtime_5 = require_commonjs7(); + var timestamp_1 = require_timestamp(); + var CacheEntry$Type = class extends runtime_5.MessageType { static { - __name(this, "ReflectionJsonWriter"); + __name(this, "CacheEntry$Type"); } - constructor(info) { - var _a; - this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; + constructor() { + super("github.actions.results.entities.v1.CacheEntry", [ + { + no: 1, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 2, + name: "hash", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "size_bytes", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { + no: 4, + name: "scope", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 5, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { no: 6, name: "created_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") }, + { no: 7, name: "last_accessed_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") }, + { no: 8, name: "expires_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") } + ]); } - /** - * Converts the message to a JSON object, based on the field descriptors. - */ - write(message, options) { - const json = {}, source = message; - for (const field of this.fields) { - if (!field.oneof) { - let jsonValue2 = this.field(field, source[field.localName], options); - if (jsonValue2 !== void 0) - json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue2; - continue; - } - const group = source[field.oneof]; - if (group.oneofKind !== field.localName) - continue; - const opt = field.kind == "scalar" || field.kind == "enum" ? Object.assign(Object.assign({}, options), { emitDefaultValues: true }) : options; - let jsonValue = this.field(field, group[field.localName], opt); - assert_1.assert(jsonValue !== void 0); - json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; - } - return json; + create(value) { + const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - field(field, value, options) { - let jsonValue = void 0; - if (field.kind == "map") { - assert_1.assert(typeof value == "object" && value !== null); - const jsonObj = {}; - switch (field.V.kind) { - case "scalar": - for (const [entryKey, entryValue] of Object.entries(value)) { - const val = this.scalar(field.V.T, entryValue, field.name, false, true); - assert_1.assert(val !== void 0); - jsonObj[entryKey.toString()] = val; - } - break; - case "message": - const messageType = field.V.T(); - for (const [entryKey, entryValue] of Object.entries(value)) { - const val = this.message(messageType, entryValue, field.name, options); - assert_1.assert(val !== void 0); - jsonObj[entryKey.toString()] = val; - } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ + 1: + message.key = reader.string(); break; - case "enum": - const enumInfo = field.V.T(); - for (const [entryKey, entryValue] of Object.entries(value)) { - assert_1.assert(entryValue === void 0 || typeof entryValue == "number"); - const val = this.enum(enumInfo, entryValue, field.name, false, true, options.enumAsInteger); - assert_1.assert(val !== void 0); - jsonObj[entryKey.toString()] = val; - } + case /* string hash */ + 2: + message.hash = reader.string(); break; - } - if (options.emitDefaultValues || Object.keys(jsonObj).length > 0) - jsonValue = jsonObj; - } else if (field.repeat) { - assert_1.assert(Array.isArray(value)); - const jsonArr = []; - switch (field.kind) { - case "scalar": - for (let i = 0; i < value.length; i++) { - const val = this.scalar(field.T, value[i], field.name, field.opt, true); - assert_1.assert(val !== void 0); - jsonArr.push(val); - } + case /* int64 size_bytes */ + 3: + message.sizeBytes = reader.int64().toString(); break; - case "enum": - const enumInfo = field.T(); - for (let i = 0; i < value.length; i++) { - assert_1.assert(value[i] === void 0 || typeof value[i] == "number"); - const val = this.enum(enumInfo, value[i], field.name, field.opt, true, options.enumAsInteger); - assert_1.assert(val !== void 0); - jsonArr.push(val); - } + case /* string scope */ + 4: + message.scope = reader.string(); break; - case "message": - const messageType = field.T(); - for (let i = 0; i < value.length; i++) { - const val = this.message(messageType, value[i], field.name, options); - assert_1.assert(val !== void 0); - jsonArr.push(val); - } + case /* string version */ + 5: + message.version = reader.string(); break; - } - if (options.emitDefaultValues || jsonArr.length > 0 || options.emitDefaultValues) - jsonValue = jsonArr; - } else { - switch (field.kind) { - case "scalar": - jsonValue = this.scalar(field.T, value, field.name, field.opt, options.emitDefaultValues); + case /* google.protobuf.Timestamp created_at */ + 6: + message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); break; - case "enum": - jsonValue = this.enum(field.T(), value, field.name, field.opt, options.emitDefaultValues, options.enumAsInteger); + case /* google.protobuf.Timestamp last_accessed_at */ + 7: + message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); break; - case "message": - jsonValue = this.message(field.T(), value, field.name, options); + case /* google.protobuf.Timestamp expires_at */ + 8: + message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } - return jsonValue; - } - /** - * Returns `null` as the default for google.protobuf.NullValue. - */ - enum(type, value, fieldName, optional, emitDefaultValues, enumAsInteger) { - if (type[0] == "google.protobuf.NullValue") - return !emitDefaultValues && !optional ? void 0 : null; - if (value === void 0) { - assert_1.assert(optional); - return void 0; - } - if (value === 0 && !emitDefaultValues && !optional) - return void 0; - assert_1.assert(typeof value == "number"); - assert_1.assert(Number.isInteger(value)); - if (enumAsInteger || !type[1].hasOwnProperty(value)) - return value; - if (type[2]) - return type[2] + type[1][value]; - return type[1][value]; + return message; } - message(type, value, fieldName, options) { - if (value === void 0) - return options.emitDefaultValues ? null : void 0; - return type.internalJsonWrite(value, options); + internalBinaryWrite(message, writer, options) { + if (message.key !== "") + writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key); + if (message.hash !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash); + if (message.sizeBytes !== "0") + writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); + if (message.scope !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope); + if (message.version !== "") + writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version); + if (message.createdAt) + timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.lastAccessedAt) + timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.expiresAt) + timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - scalar(type, value, fieldName, optional, emitDefaultValues) { - if (value === void 0) { - assert_1.assert(optional); - return void 0; - } - const ed = emitDefaultValues || optional; - switch (type) { - // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - if (value === 0) - return ed ? 0 : void 0; - assert_1.assertInt32(value); - return value; - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.UINT32: - if (value === 0) - return ed ? 0 : void 0; - assert_1.assertUInt32(value); - return value; - // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". - // Either numbers or strings are accepted. Exponent notation is also accepted. - case reflection_info_1.ScalarType.FLOAT: - assert_1.assertFloat32(value); - case reflection_info_1.ScalarType.DOUBLE: - if (value === 0) - return ed ? 0 : void 0; - assert_1.assert(typeof value == "number"); - if (Number.isNaN(value)) - return "NaN"; - if (value === Number.POSITIVE_INFINITY) - return "Infinity"; - if (value === Number.NEGATIVE_INFINITY) - return "-Infinity"; - return value; - // string: - case reflection_info_1.ScalarType.STRING: - if (value === "") - return ed ? "" : void 0; - assert_1.assert(typeof value == "string"); - return value; - // bool: - case reflection_info_1.ScalarType.BOOL: - if (value === false) - return ed ? false : void 0; - assert_1.assert(typeof value == "boolean"); - return value; - // JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); - let ulong = pb_long_1.PbULong.from(value); - if (ulong.isZero() && !ed) - return void 0; - return ulong.toString(); - // JSON value will be a decimal string. Either numbers or strings are accepted. - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - assert_1.assert(typeof value == "number" || typeof value == "string" || typeof value == "bigint"); - let long = pb_long_1.PbLong.from(value); - if (long.isZero() && !ed) - return void 0; - return long.toString(); - // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. - // Either standard or URL-safe base64 encoding with/without paddings are accepted. - case reflection_info_1.ScalarType.BYTES: - assert_1.assert(value instanceof Uint8Array); - if (!value.byteLength) - return ed ? "" : void 0; - return base64_1.base64encode(value); + }; + exports2.CacheEntry = new CacheEntry$Type(); + } +}); + +// ../node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js +var require_cachescope = __commonJS({ + "../node_modules/@actions/cache/lib/generated/results/entities/v1/cachescope.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.CacheScope = void 0; + var runtime_1 = require_commonjs7(); + var runtime_2 = require_commonjs7(); + var runtime_3 = require_commonjs7(); + var runtime_4 = require_commonjs7(); + var runtime_5 = require_commonjs7(); + var CacheScope$Type = class extends runtime_5.MessageType { + static { + __name(this, "CacheScope$Type"); + } + constructor() { + super("github.actions.results.entities.v1.CacheScope", [ + { + no: 1, + name: "scope", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 2, + name: "permission", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + } + ]); + } + create(value) { + const message = { scope: "", permission: "0" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string scope */ + 1: + message.scope = reader.string(); + break; + case /* int64 permission */ + 2: + message.permission = reader.int64().toString(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.scope !== "") + writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.scope); + if (message.permission !== "0") + writer.tag(2, runtime_1.WireType.Varint).int64(message.permission); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } }; - exports2.ReflectionJsonWriter = ReflectionJsonWriter; + exports2.CacheScope = new CacheScope$Type(); } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js -var require_reflection_scalar_default = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-scalar-default.js"(exports2) { +// ../node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js +var require_cachemetadata = __commonJS({ + "../node_modules/@actions/cache/lib/generated/results/entities/v1/cachemetadata.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionScalarDefault = void 0; - var reflection_info_1 = require_reflection_info(); - var reflection_long_convert_1 = require_reflection_long_convert(); - var pb_long_1 = require_pb_long(); - function reflectionScalarDefault(type, longType = reflection_info_1.LongType.STRING) { - switch (type) { - case reflection_info_1.ScalarType.BOOL: - return false; - case reflection_info_1.ScalarType.UINT64: - case reflection_info_1.ScalarType.FIXED64: - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); - case reflection_info_1.ScalarType.INT64: - case reflection_info_1.ScalarType.SFIXED64: - case reflection_info_1.ScalarType.SINT64: - return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); - case reflection_info_1.ScalarType.DOUBLE: - case reflection_info_1.ScalarType.FLOAT: - return 0; - case reflection_info_1.ScalarType.BYTES: - return new Uint8Array(0); - case reflection_info_1.ScalarType.STRING: - return ""; - default: - return 0; + exports2.CacheMetadata = void 0; + var runtime_1 = require_commonjs7(); + var runtime_2 = require_commonjs7(); + var runtime_3 = require_commonjs7(); + var runtime_4 = require_commonjs7(); + var runtime_5 = require_commonjs7(); + var cachescope_1 = require_cachescope(); + var CacheMetadata$Type = class extends runtime_5.MessageType { + static { + __name(this, "CacheMetadata$Type"); } - } - __name(reflectionScalarDefault, "reflectionScalarDefault"); - exports2.reflectionScalarDefault = reflectionScalarDefault; + constructor() { + super("github.actions.results.entities.v1.CacheMetadata", [ + { + no: 1, + name: "repository_id", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { no: 2, name: "scope", kind: "message", repeat: 1, T: /* @__PURE__ */ __name(() => cachescope_1.CacheScope, "T") } + ]); + } + create(value) { + const message = { repositoryId: "0", scope: [] }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 repository_id */ + 1: + message.repositoryId = reader.int64().toString(); + break; + case /* repeated github.actions.results.entities.v1.CacheScope scope */ + 2: + message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.repositoryId !== "0") + writer.tag(1, runtime_1.WireType.Varint).int64(message.repositoryId); + for (let i = 0; i < message.scope.length; i++) + cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.CacheMetadata = new CacheMetadata$Type(); } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js -var require_reflection_binary_reader = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-reader.js"(exports2) { +// ../node_modules/@actions/cache/lib/generated/results/api/v1/cache.js +var require_cache2 = __commonJS({ + "../node_modules/@actions/cache/lib/generated/results/api/v1/cache.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionBinaryReader = void 0; - var binary_format_contract_1 = require_binary_format_contract(); - var reflection_info_1 = require_reflection_info(); - var reflection_long_convert_1 = require_reflection_long_convert(); - var reflection_scalar_default_1 = require_reflection_scalar_default(); - var ReflectionBinaryReader = class { + exports2.CacheService = exports2.LookupCacheEntryResponse = exports2.LookupCacheEntryRequest = exports2.ListCacheEntriesResponse = exports2.ListCacheEntriesRequest = exports2.DeleteCacheEntryResponse = exports2.DeleteCacheEntryRequest = exports2.GetCacheEntryDownloadURLResponse = exports2.GetCacheEntryDownloadURLRequest = exports2.FinalizeCacheEntryUploadResponse = exports2.FinalizeCacheEntryUploadRequest = exports2.CreateCacheEntryResponse = exports2.CreateCacheEntryRequest = void 0; + var runtime_rpc_1 = require_commonjs8(); + var runtime_1 = require_commonjs7(); + var runtime_2 = require_commonjs7(); + var runtime_3 = require_commonjs7(); + var runtime_4 = require_commonjs7(); + var runtime_5 = require_commonjs7(); + var cacheentry_1 = require_cacheentry(); + var cachemetadata_1 = require_cachemetadata(); + var CreateCacheEntryRequest$Type = class extends runtime_5.MessageType { static { - __name(this, "ReflectionBinaryReader"); + __name(this, "CreateCacheEntryRequest$Type"); } - constructor(info) { - this.info = info; + constructor() { + super("github.actions.results.api.v1.CreateCacheEntryRequest", [ + { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - prepare() { - var _a; - if (!this.fieldNoToField) { - const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; - this.fieldNoToField = new Map(fieldsInput.map((field) => [field.no, field])); + create(value) { + const message = { key: "", version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + case /* string key */ + 2: + message.key = reader.string(); + break; + case /* string version */ + 3: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } + return message; } - /** - * Reads a message from binary format into the target message. - * - * Repeated fields are appended. Map entries are added, overwriting - * existing keys. - * - * If a message field is already present, it will be merged with the - * new data. - */ - read(reader, message, options, length) { - this.prepare(); - const end = length === void 0 ? reader.len : reader.pos + length; + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + if (message.version !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type(); + var CreateCacheEntryResponse$Type = class extends runtime_5.MessageType { + static { + __name(this, "CreateCacheEntryResponse$Type"); + } + constructor() { + super("github.actions.results.api.v1.CreateCacheEntryResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "signed_upload_url", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); + } + create(value) { + const message = { ok: false, signedUploadUrl: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { - const [fieldNo, wireType] = reader.tag(), field = this.fieldNoToField.get(fieldNo); - if (!field) { - let u = options.readUnknownField; - if (u == "throw") - throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.info.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? binary_format_contract_1.UnknownFieldHandler.onRead : u)(this.info.typeName, message, fieldNo, wireType, d); - continue; + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); + break; + case /* string signed_upload_url */ + 2: + message.signedUploadUrl = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } - let target = message, repeated = field.repeat, localName = field.localName; - if (field.oneof) { - target = target[field.oneof]; - if (target.oneofKind !== localName) - target = message[field.oneof] = { - oneofKind: localName - }; + } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.signedUploadUrl !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type(); + var FinalizeCacheEntryUploadRequest$Type = class extends runtime_5.MessageType { + static { + __name(this, "FinalizeCacheEntryUploadRequest$Type"); + } + constructor() { + super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [ + { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "size_bytes", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { + no: 4, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ } - switch (field.kind) { - case "scalar": - case "enum": - let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - let L = field.kind == "scalar" ? field.L : void 0; - if (repeated) { - let arr = target[localName]; - if (wireType == binary_format_contract_1.WireType.LengthDelimited && T != reflection_info_1.ScalarType.STRING && T != reflection_info_1.ScalarType.BYTES) { - let e = reader.uint32() + reader.pos; - while (reader.pos < e) - arr.push(this.scalar(reader, T, L)); - } else - arr.push(this.scalar(reader, T, L)); - } else - target[localName] = this.scalar(reader, T, L); + ]); + } + create(value) { + const message = { key: "", sizeBytes: "0", version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; - case "message": - if (repeated) { - let arr = target[localName]; - let msg = field.T().internalBinaryRead(reader, reader.uint32(), options); - arr.push(msg); - } else - target[localName] = field.T().internalBinaryRead(reader, reader.uint32(), options, target[localName]); + case /* string key */ + 2: + message.key = reader.string(); break; - case "map": - let [mapKey, mapVal] = this.mapEntry(field, reader, options); - target[localName][mapKey] = mapVal; + case /* int64 size_bytes */ + 3: + message.sizeBytes = reader.int64().toString(); + break; + case /* string version */ + 4: + message.version = reader.string(); break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } + return message; } - /** - * Read a map field, expecting key field = 1, value field = 2 - */ - mapEntry(field, reader, options) { - let length = reader.uint32(); - let end = reader.pos + length; - let key = void 0; - let val = void 0; + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + if (message.sizeBytes !== "0") + writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); + if (message.version !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type(); + var FinalizeCacheEntryUploadResponse$Type = class extends runtime_5.MessageType { + static { + __name(this, "FinalizeCacheEntryUploadResponse$Type"); + } + constructor() { + super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "entry_id", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + } + ]); + } + create(value) { + const message = { ok: false, entryId: "0" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case 1: - if (field.K == reflection_info_1.ScalarType.BOOL) - key = reader.bool().toString(); - else - key = this.scalar(reader, field.K, reflection_info_1.LongType.STRING); + case /* bool ok */ + 1: + message.ok = reader.bool(); break; - case 2: - switch (field.V.kind) { - case "scalar": - val = this.scalar(reader, field.V.T, field.V.L); - break; - case "enum": - val = reader.int32(); - break; - case "message": - val = field.V.T().internalBinaryRead(reader, reader.uint32(), options); - break; - } + case /* int64 entry_id */ + 2: + message.entryId = reader.int64().toString(); break; default: - throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) in map entry for ${this.info.typeName}#${field.name}`); + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } - if (key === void 0) { - let keyRaw = reflection_scalar_default_1.reflectionScalarDefault(field.K); - key = field.K == reflection_info_1.ScalarType.BOOL ? keyRaw.toString() : keyRaw; + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.entryId !== "0") + writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type(); + var GetCacheEntryDownloadURLRequest$Type = class extends runtime_5.MessageType { + static { + __name(this, "GetCacheEntryDownloadURLRequest$Type"); + } + constructor() { + super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [ + { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "restore_keys", + kind: "scalar", + repeat: 2, + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 4, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); + } + create(value) { + const message = { key: "", restoreKeys: [], version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + case /* string key */ + 2: + message.key = reader.string(); + break; + case /* repeated string restore_keys */ + 3: + message.restoreKeys.push(reader.string()); + break; + case /* string version */ + 4: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } - if (val === void 0) - switch (field.V.kind) { - case "scalar": - val = reflection_scalar_default_1.reflectionScalarDefault(field.V.T, field.V.L); + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); + if (message.version !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type(); + var GetCacheEntryDownloadURLResponse$Type = class extends runtime_5.MessageType { + static { + __name(this, "GetCacheEntryDownloadURLResponse$Type"); + } + constructor() { + super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "signed_download_url", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "matched_key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); + } + create(value) { + const message = { ok: false, signedDownloadUrl: "", matchedKey: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); break; - case "enum": - val = 0; + case /* string signed_download_url */ + 2: + message.signedDownloadUrl = reader.string(); break; - case "message": - val = field.V.T().create(); + case /* string matched_key */ + 3: + message.matchedKey = reader.string(); break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } - return [key, val]; + } + return message; } - scalar(reader, type, longType) { - switch (type) { - case reflection_info_1.ScalarType.INT32: - return reader.int32(); - case reflection_info_1.ScalarType.STRING: - return reader.string(); - case reflection_info_1.ScalarType.BOOL: - return reader.bool(); - case reflection_info_1.ScalarType.DOUBLE: - return reader.double(); - case reflection_info_1.ScalarType.FLOAT: - return reader.float(); - case reflection_info_1.ScalarType.INT64: - return reflection_long_convert_1.reflectionLongConvert(reader.int64(), longType); - case reflection_info_1.ScalarType.UINT64: - return reflection_long_convert_1.reflectionLongConvert(reader.uint64(), longType); - case reflection_info_1.ScalarType.FIXED64: - return reflection_long_convert_1.reflectionLongConvert(reader.fixed64(), longType); - case reflection_info_1.ScalarType.FIXED32: - return reader.fixed32(); - case reflection_info_1.ScalarType.BYTES: - return reader.bytes(); - case reflection_info_1.ScalarType.UINT32: - return reader.uint32(); - case reflection_info_1.ScalarType.SFIXED32: - return reader.sfixed32(); - case reflection_info_1.ScalarType.SFIXED64: - return reflection_long_convert_1.reflectionLongConvert(reader.sfixed64(), longType); - case reflection_info_1.ScalarType.SINT32: - return reader.sint32(); - case reflection_info_1.ScalarType.SINT64: - return reflection_long_convert_1.reflectionLongConvert(reader.sint64(), longType); + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.signedDownloadUrl !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl); + if (message.matchedKey !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type(); + var DeleteCacheEntryRequest$Type = class extends runtime_5.MessageType { + static { + __name(this, "DeleteCacheEntryRequest$Type"); + } + constructor() { + super("github.actions.results.api.v1.DeleteCacheEntryRequest", [ + { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); + } + create(value) { + const message = { key: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + case /* string key */ + 2: + message.key = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } }; - exports2.ReflectionBinaryReader = ReflectionBinaryReader; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js -var require_reflection_binary_writer = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-binary-writer.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ReflectionBinaryWriter = void 0; - var binary_format_contract_1 = require_binary_format_contract(); - var reflection_info_1 = require_reflection_info(); - var assert_1 = require_assert(); - var pb_long_1 = require_pb_long(); - var ReflectionBinaryWriter = class { + exports2.DeleteCacheEntryRequest = new DeleteCacheEntryRequest$Type(); + var DeleteCacheEntryResponse$Type = class extends runtime_5.MessageType { static { - __name(this, "ReflectionBinaryWriter"); + __name(this, "DeleteCacheEntryResponse$Type"); } - constructor(info) { - this.info = info; + constructor() { + super("github.actions.results.api.v1.DeleteCacheEntryResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "entry_id", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + } + ]); } - prepare() { - if (!this.fields) { - const fieldsInput = this.info.fields ? this.info.fields.concat() : []; - this.fields = fieldsInput.sort((a, b) => a.no - b.no); + create(value) { + const message = { ok: false, entryId: "0" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); + break; + case /* int64 entry_id */ + 2: + message.entryId = reader.int64().toString(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } + return message; } - /** - * Writes the message to binary format. - */ - write(message, writer, options) { - this.prepare(); - for (const field of this.fields) { - let value, emitDefault, repeated = field.repeat, localName = field.localName; - if (field.oneof) { - const group = message[field.oneof]; - if (group.oneofKind !== localName) - continue; - value = group[localName]; - emitDefault = true; - } else { - value = message[localName]; - emitDefault = false; + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.entryId !== "0") + writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type(); + var ListCacheEntriesRequest$Type = class extends runtime_5.MessageType { + static { + __name(this, "ListCacheEntriesRequest$Type"); + } + constructor() { + super("github.actions.results.api.v1.ListCacheEntriesRequest", [ + { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "restore_keys", + kind: "scalar", + repeat: 2, + T: 9 + /*ScalarType.STRING*/ } - switch (field.kind) { - case "scalar": - case "enum": - let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - if (repeated) { - assert_1.assert(Array.isArray(value)); - if (repeated == reflection_info_1.RepeatType.PACKED) - this.packed(writer, T, field.no, value); - else - for (const item of value) - this.scalar(writer, T, field.no, item, true); - } else if (value === void 0) - assert_1.assert(field.opt); - else - this.scalar(writer, T, field.no, value, emitDefault || field.opt); + ]); + } + create(value) { + const message = { key: "", restoreKeys: [] }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; - case "message": - if (repeated) { - assert_1.assert(Array.isArray(value)); - for (const item of value) - this.message(writer, options, field.T(), field.no, item); - } else { - this.message(writer, options, field.T(), field.no, value); - } + case /* string key */ + 2: + message.key = reader.string(); break; - case "map": - assert_1.assert(typeof value == "object" && value !== null); - for (const [key, val] of Object.entries(value)) - this.mapEntry(writer, options, field, key, val); + case /* repeated string restore_keys */ + 3: + message.restoreKeys.push(reader.string()); break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); let u = options.writeUnknownFields; if (u !== false) - (u === true ? binary_format_contract_1.UnknownFieldHandler.onWrite : u)(this.info.typeName, message, writer); + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - mapEntry(writer, options, field, key, value) { - writer.tag(field.no, binary_format_contract_1.WireType.LengthDelimited); - writer.fork(); - let keyValue = key; - switch (field.K) { - case reflection_info_1.ScalarType.INT32: - case reflection_info_1.ScalarType.FIXED32: - case reflection_info_1.ScalarType.UINT32: - case reflection_info_1.ScalarType.SFIXED32: - case reflection_info_1.ScalarType.SINT32: - keyValue = Number.parseInt(key); - break; - case reflection_info_1.ScalarType.BOOL: - assert_1.assert(key == "true" || key == "false"); - keyValue = key == "true"; - break; - } - this.scalar(writer, field.K, 1, keyValue, true); - switch (field.V.kind) { - case "scalar": - this.scalar(writer, field.V.T, 2, value, true); - break; - case "enum": - this.scalar(writer, reflection_info_1.ScalarType.INT32, 2, value, true); - break; - case "message": - this.message(writer, options, field.V.T(), 2, value); - break; - } - writer.join(); + }; + exports2.ListCacheEntriesRequest = new ListCacheEntriesRequest$Type(); + var ListCacheEntriesResponse$Type = class extends runtime_5.MessageType { + static { + __name(this, "ListCacheEntriesResponse$Type"); } - message(writer, options, handler, fieldNo, value) { - if (value === void 0) - return; - handler.internalBinaryWrite(value, writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited).fork(), options); - writer.join(); + constructor() { + super("github.actions.results.api.v1.ListCacheEntriesResponse", [ + { no: 1, name: "entries", kind: "message", repeat: 1, T: /* @__PURE__ */ __name(() => cacheentry_1.CacheEntry, "T") } + ]); } - /** - * Write a single scalar value. - */ - scalar(writer, type, fieldNo, value, emitDefault) { - let [wireType, method, isDefault] = this.scalarInfo(type, value); - if (!isDefault || emitDefault) { - writer.tag(fieldNo, wireType); - writer[method](value); + create(value) { + const message = { entries: [] }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated github.actions.results.entities.v1.CacheEntry entries */ + 1: + message.entries.push(cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } + return message; } - /** - * Write an array of scalar values in packed format. - */ - packed(writer, type, fieldNo, value) { - if (!value.length) - return; - assert_1.assert(type !== reflection_info_1.ScalarType.BYTES && type !== reflection_info_1.ScalarType.STRING); - writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited); - writer.fork(); - let [, method] = this.scalarInfo(type); - for (let i = 0; i < value.length; i++) - writer[method](value[i]); - writer.join(); + internalBinaryWrite(message, writer, options) { + for (let i = 0; i < message.entries.length; i++) + cacheentry_1.CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - /** - * Get information for writing a scalar value. - * - * Returns tuple: - * [0]: appropriate WireType - * [1]: name of the appropriate method of IBinaryWriter - * [2]: whether the given value is a default value - * - * If argument `value` is omitted, [2] is always false. - */ - scalarInfo(type, value) { - let t = binary_format_contract_1.WireType.Varint; - let m; - let i = value === void 0; - let d = value === 0; - switch (type) { - case reflection_info_1.ScalarType.INT32: - m = "int32"; - break; - case reflection_info_1.ScalarType.STRING: - d = i || !value.length; - t = binary_format_contract_1.WireType.LengthDelimited; - m = "string"; - break; - case reflection_info_1.ScalarType.BOOL: - d = value === false; - m = "bool"; - break; - case reflection_info_1.ScalarType.UINT32: - m = "uint32"; - break; - case reflection_info_1.ScalarType.DOUBLE: - t = binary_format_contract_1.WireType.Bit64; - m = "double"; - break; - case reflection_info_1.ScalarType.FLOAT: - t = binary_format_contract_1.WireType.Bit32; - m = "float"; - break; - case reflection_info_1.ScalarType.INT64: - d = i || pb_long_1.PbLong.from(value).isZero(); - m = "int64"; - break; - case reflection_info_1.ScalarType.UINT64: - d = i || pb_long_1.PbULong.from(value).isZero(); - m = "uint64"; - break; - case reflection_info_1.ScalarType.FIXED64: - d = i || pb_long_1.PbULong.from(value).isZero(); - t = binary_format_contract_1.WireType.Bit64; - m = "fixed64"; - break; - case reflection_info_1.ScalarType.BYTES: - d = i || !value.byteLength; - t = binary_format_contract_1.WireType.LengthDelimited; - m = "bytes"; - break; - case reflection_info_1.ScalarType.FIXED32: - t = binary_format_contract_1.WireType.Bit32; - m = "fixed32"; - break; - case reflection_info_1.ScalarType.SFIXED32: - t = binary_format_contract_1.WireType.Bit32; - m = "sfixed32"; - break; - case reflection_info_1.ScalarType.SFIXED64: - d = i || pb_long_1.PbLong.from(value).isZero(); - t = binary_format_contract_1.WireType.Bit64; - m = "sfixed64"; - break; - case reflection_info_1.ScalarType.SINT32: - m = "sint32"; - break; - case reflection_info_1.ScalarType.SINT64: - d = i || pb_long_1.PbLong.from(value).isZero(); - m = "sint64"; - break; + }; + exports2.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type(); + var LookupCacheEntryRequest$Type = class extends runtime_5.MessageType { + static { + __name(this, "LookupCacheEntryRequest$Type"); + } + constructor() { + super("github.actions.results.api.v1.LookupCacheEntryRequest", [ + { no: 1, name: "metadata", kind: "message", T: /* @__PURE__ */ __name(() => cachemetadata_1.CacheMetadata, "T") }, + { + no: 2, + name: "key", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "restore_keys", + kind: "scalar", + repeat: 2, + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 4, + name: "version", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); + } + create(value) { + const message = { key: "", restoreKeys: [], version: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* github.actions.results.entities.v1.CacheMetadata metadata */ + 1: + message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + case /* string key */ + 2: + message.key = reader.string(); + break; + case /* repeated string restore_keys */ + 3: + message.restoreKeys.push(reader.string()); + break; + case /* string version */ + 4: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } - return [t, m, i || d]; + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.metadata) + cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.key !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); + for (let i = 0; i < message.restoreKeys.length; i++) + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); + if (message.version !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } }; - exports2.ReflectionBinaryWriter = ReflectionBinaryWriter; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js -var require_reflection_create = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-create.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionCreate = void 0; - var reflection_scalar_default_1 = require_reflection_scalar_default(); - var message_type_contract_1 = require_message_type_contract(); - function reflectionCreate(type) { - const msg = type.messagePrototype ? Object.create(type.messagePrototype) : Object.defineProperty({}, message_type_contract_1.MESSAGE_TYPE, { value: type }); - for (let field of type.fields) { - let name = field.localName; - if (field.opt) - continue; - if (field.oneof) - msg[field.oneof] = { oneofKind: void 0 }; - else if (field.repeat) - msg[name] = []; - else - switch (field.kind) { - case "scalar": - msg[name] = reflection_scalar_default_1.reflectionScalarDefault(field.T, field.L); - break; - case "enum": - msg[name] = 0; + exports2.LookupCacheEntryRequest = new LookupCacheEntryRequest$Type(); + var LookupCacheEntryResponse$Type = class extends runtime_5.MessageType { + static { + __name(this, "LookupCacheEntryResponse$Type"); + } + constructor() { + super("github.actions.results.api.v1.LookupCacheEntryResponse", [ + { + no: 1, + name: "exists", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { no: 2, name: "entry", kind: "message", T: /* @__PURE__ */ __name(() => cacheentry_1.CacheEntry, "T") } + ]); + } + create(value) { + const message = { exists: false }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool exists */ + 1: + message.exists = reader.bool(); break; - case "map": - msg[name] = {}; + case /* github.actions.results.entities.v1.CacheEntry entry */ + 2: + message.entry = cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry); break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } + } + return message; } - return msg; - } - __name(reflectionCreate, "reflectionCreate"); - exports2.reflectionCreate = reflectionCreate; + internalBinaryWrite(message, writer, options) { + if (message.exists !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.exists); + if (message.entry) + cacheentry_1.CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type(); + exports2.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [ + { name: "CreateCacheEntry", options: {}, I: exports2.CreateCacheEntryRequest, O: exports2.CreateCacheEntryResponse }, + { name: "FinalizeCacheEntryUpload", options: {}, I: exports2.FinalizeCacheEntryUploadRequest, O: exports2.FinalizeCacheEntryUploadResponse }, + { name: "GetCacheEntryDownloadURL", options: {}, I: exports2.GetCacheEntryDownloadURLRequest, O: exports2.GetCacheEntryDownloadURLResponse }, + { name: "DeleteCacheEntry", options: {}, I: exports2.DeleteCacheEntryRequest, O: exports2.DeleteCacheEntryResponse }, + { name: "ListCacheEntries", options: {}, I: exports2.ListCacheEntriesRequest, O: exports2.ListCacheEntriesResponse }, + { name: "LookupCacheEntry", options: {}, I: exports2.LookupCacheEntryRequest, O: exports2.LookupCacheEntryResponse } + ]); } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js -var require_reflection_merge_partial = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-merge-partial.js"(exports2) { +// ../node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js +var require_cache_twirp = __commonJS({ + "../node_modules/@actions/cache/lib/generated/results/api/v1/cache.twirp.js"(exports2) { "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionMergePartial = void 0; - function reflectionMergePartial(info, target, source) { - let fieldValue, input = source, output; - for (let field of info.fields) { - let name = field.localName; - if (field.oneof) { - const group = input[field.oneof]; - if ((group === null || group === void 0 ? void 0 : group.oneofKind) == void 0) { - continue; + exports2.createCacheServiceServer = exports2.CacheServiceMethodList = exports2.CacheServiceMethod = exports2.CacheServiceClientProtobuf = exports2.CacheServiceClientJSON = void 0; + var twirp_ts_1 = require_twirp(); + var cache_1 = require_cache2(); + var CacheServiceClientJSON = class { + static { + __name(this, "CacheServiceClientJSON"); + } + constructor(rpc) { + this.rpc = rpc; + this.CreateCacheEntry.bind(this); + this.FinalizeCacheEntryUpload.bind(this); + this.GetCacheEntryDownloadURL.bind(this); + this.DeleteCacheEntry.bind(this); + this.ListCacheEntries.bind(this); + this.LookupCacheEntry.bind(this); + } + CreateCacheEntry(request) { + const data = cache_1.CreateCacheEntryRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data); + return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromJson(data2, { + ignoreUnknownFields: true + })); + } + FinalizeCacheEntryUpload(request) { + const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data); + return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data2, { + ignoreUnknownFields: true + })); + } + GetCacheEntryDownloadURL(request) { + const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data); + return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data2, { + ignoreUnknownFields: true + })); + } + DeleteCacheEntry(request) { + const data = cache_1.DeleteCacheEntryRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/json", data); + return promise.then((data2) => cache_1.DeleteCacheEntryResponse.fromJson(data2, { + ignoreUnknownFields: true + })); + } + ListCacheEntries(request) { + const data = cache_1.ListCacheEntriesRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/json", data); + return promise.then((data2) => cache_1.ListCacheEntriesResponse.fromJson(data2, { + ignoreUnknownFields: true + })); + } + LookupCacheEntry(request) { + const data = cache_1.LookupCacheEntryRequest.toJson(request, { + useProtoFieldName: true, + emitDefaultValues: false + }); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/json", data); + return promise.then((data2) => cache_1.LookupCacheEntryResponse.fromJson(data2, { + ignoreUnknownFields: true + })); + } + }; + exports2.CacheServiceClientJSON = CacheServiceClientJSON; + var CacheServiceClientProtobuf = class { + static { + __name(this, "CacheServiceClientProtobuf"); + } + constructor(rpc) { + this.rpc = rpc; + this.CreateCacheEntry.bind(this); + this.FinalizeCacheEntryUpload.bind(this); + this.GetCacheEntryDownloadURL.bind(this); + this.DeleteCacheEntry.bind(this); + this.ListCacheEntries.bind(this); + this.LookupCacheEntry.bind(this); + } + CreateCacheEntry(request) { + const data = cache_1.CreateCacheEntryRequest.toBinary(request); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data); + return promise.then((data2) => cache_1.CreateCacheEntryResponse.fromBinary(data2)); + } + FinalizeCacheEntryUpload(request) { + const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data); + return promise.then((data2) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data2)); + } + GetCacheEntryDownloadURL(request) { + const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data); + return promise.then((data2) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data2)); + } + DeleteCacheEntry(request) { + const data = cache_1.DeleteCacheEntryRequest.toBinary(request); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/protobuf", data); + return promise.then((data2) => cache_1.DeleteCacheEntryResponse.fromBinary(data2)); + } + ListCacheEntries(request) { + const data = cache_1.ListCacheEntriesRequest.toBinary(request); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/protobuf", data); + return promise.then((data2) => cache_1.ListCacheEntriesResponse.fromBinary(data2)); + } + LookupCacheEntry(request) { + const data = cache_1.LookupCacheEntryRequest.toBinary(request); + const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/protobuf", data); + return promise.then((data2) => cache_1.LookupCacheEntryResponse.fromBinary(data2)); + } + }; + exports2.CacheServiceClientProtobuf = CacheServiceClientProtobuf; + var CacheServiceMethod; + (function(CacheServiceMethod2) { + CacheServiceMethod2["CreateCacheEntry"] = "CreateCacheEntry"; + CacheServiceMethod2["FinalizeCacheEntryUpload"] = "FinalizeCacheEntryUpload"; + CacheServiceMethod2["GetCacheEntryDownloadURL"] = "GetCacheEntryDownloadURL"; + CacheServiceMethod2["DeleteCacheEntry"] = "DeleteCacheEntry"; + CacheServiceMethod2["ListCacheEntries"] = "ListCacheEntries"; + CacheServiceMethod2["LookupCacheEntry"] = "LookupCacheEntry"; + })(CacheServiceMethod || (exports2.CacheServiceMethod = CacheServiceMethod = {})); + exports2.CacheServiceMethodList = [ + CacheServiceMethod.CreateCacheEntry, + CacheServiceMethod.FinalizeCacheEntryUpload, + CacheServiceMethod.GetCacheEntryDownloadURL, + CacheServiceMethod.DeleteCacheEntry, + CacheServiceMethod.ListCacheEntries, + CacheServiceMethod.LookupCacheEntry + ]; + function createCacheServiceServer(service) { + return new twirp_ts_1.TwirpServer({ + service, + packageName: "github.actions.results.api.v1", + serviceName: "CacheService", + methodList: exports2.CacheServiceMethodList, + matchRoute: matchCacheServiceRoute + }); + } + __name(createCacheServiceServer, "createCacheServiceServer"); + exports2.createCacheServiceServer = createCacheServiceServer; + function matchCacheServiceRoute(method, events) { + switch (method) { + case "CreateCacheEntry": + return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { + ctx = Object.assign(Object.assign({}, ctx), { methodName: "CreateCacheEntry" }); + yield events.onMatch(ctx); + return handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors); + }); + case "FinalizeCacheEntryUpload": + return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { + ctx = Object.assign(Object.assign({}, ctx), { methodName: "FinalizeCacheEntryUpload" }); + yield events.onMatch(ctx); + return handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors); + }); + case "GetCacheEntryDownloadURL": + return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { + ctx = Object.assign(Object.assign({}, ctx), { methodName: "GetCacheEntryDownloadURL" }); + yield events.onMatch(ctx); + return handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors); + }); + case "DeleteCacheEntry": + return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { + ctx = Object.assign(Object.assign({}, ctx), { methodName: "DeleteCacheEntry" }); + yield events.onMatch(ctx); + return handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors); + }); + case "ListCacheEntries": + return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { + ctx = Object.assign(Object.assign({}, ctx), { methodName: "ListCacheEntries" }); + yield events.onMatch(ctx); + return handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors); + }); + case "LookupCacheEntry": + return (ctx, service, data, interceptors) => __awaiter3(this, void 0, void 0, function* () { + ctx = Object.assign(Object.assign({}, ctx), { methodName: "LookupCacheEntry" }); + yield events.onMatch(ctx); + return handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors); + }); + default: + events.onNotFound(); + const msg = `no handler found`; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); + } + } + __name(matchCacheServiceRoute, "matchCacheServiceRoute"); + function handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors) { + switch (ctx.contentType) { + case twirp_ts_1.TwirpContentType.JSON: + return handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors); + case twirp_ts_1.TwirpContentType.Protobuf: + return handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors); + default: + const msg = "unexpected Content-Type"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); + } + } + __name(handleCacheServiceCreateCacheEntryRequest, "handleCacheServiceCreateCacheEntryRequest"); + function handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors) { + switch (ctx.contentType) { + case twirp_ts_1.TwirpContentType.JSON: + return handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors); + case twirp_ts_1.TwirpContentType.Protobuf: + return handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors); + default: + const msg = "unexpected Content-Type"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); + } + } + __name(handleCacheServiceFinalizeCacheEntryUploadRequest, "handleCacheServiceFinalizeCacheEntryUploadRequest"); + function handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors) { + switch (ctx.contentType) { + case twirp_ts_1.TwirpContentType.JSON: + return handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors); + case twirp_ts_1.TwirpContentType.Protobuf: + return handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors); + default: + const msg = "unexpected Content-Type"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); + } + } + __name(handleCacheServiceGetCacheEntryDownloadURLRequest, "handleCacheServiceGetCacheEntryDownloadURLRequest"); + function handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors) { + switch (ctx.contentType) { + case twirp_ts_1.TwirpContentType.JSON: + return handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors); + case twirp_ts_1.TwirpContentType.Protobuf: + return handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors); + default: + const msg = "unexpected Content-Type"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); + } + } + __name(handleCacheServiceDeleteCacheEntryRequest, "handleCacheServiceDeleteCacheEntryRequest"); + function handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors) { + switch (ctx.contentType) { + case twirp_ts_1.TwirpContentType.JSON: + return handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors); + case twirp_ts_1.TwirpContentType.Protobuf: + return handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors); + default: + const msg = "unexpected Content-Type"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); + } + } + __name(handleCacheServiceListCacheEntriesRequest, "handleCacheServiceListCacheEntriesRequest"); + function handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors) { + switch (ctx.contentType) { + case twirp_ts_1.TwirpContentType.JSON: + return handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors); + case twirp_ts_1.TwirpContentType.Protobuf: + return handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors); + default: + const msg = "unexpected Content-Type"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); + } + } + __name(handleCacheServiceLookupCacheEntryRequest, "handleCacheServiceLookupCacheEntryRequest"); + function handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + const body = JSON.parse(data.toString() || "{}"); + request = cache_1.CreateCacheEntryRequest.fromJson(body, { + ignoreUnknownFields: true + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } - fieldValue = group[name]; - output = target[field.oneof]; - output.oneofKind = group.oneofKind; - if (fieldValue == void 0) { - delete output[name]; - continue; + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.CreateCacheEntry(ctx2, inputReq); + }); + } else { + response = yield service.CreateCacheEntry(ctx, request); + } + return JSON.stringify(cache_1.CreateCacheEntryResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false + })); + }); + } + __name(handleCacheServiceCreateCacheEntryJSON, "handleCacheServiceCreateCacheEntryJSON"); + function handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + const body = JSON.parse(data.toString() || "{}"); + request = cache_1.FinalizeCacheEntryUploadRequest.fromJson(body, { + ignoreUnknownFields: true + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.FinalizeCacheEntryUpload(ctx2, inputReq); + }); } else { - fieldValue = input[name]; - output = target; - if (fieldValue == void 0) { - continue; + response = yield service.FinalizeCacheEntryUpload(ctx, request); + } + return JSON.stringify(cache_1.FinalizeCacheEntryUploadResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false + })); + }); + } + __name(handleCacheServiceFinalizeCacheEntryUploadJSON, "handleCacheServiceFinalizeCacheEntryUploadJSON"); + function handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + const body = JSON.parse(data.toString() || "{}"); + request = cache_1.GetCacheEntryDownloadURLRequest.fromJson(body, { + ignoreUnknownFields: true + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } - if (field.repeat) - output[name].length = fieldValue.length; - switch (field.kind) { - case "scalar": - case "enum": - if (field.repeat) - for (let i = 0; i < fieldValue.length; i++) - output[name][i] = fieldValue[i]; - else - output[name] = fieldValue; - break; - case "message": - let T = field.T(); - if (field.repeat) - for (let i = 0; i < fieldValue.length; i++) - output[name][i] = T.create(fieldValue[i]); - else if (output[name] === void 0) - output[name] = T.create(fieldValue); - else - T.mergePartial(output[name], fieldValue); - break; - case "map": - switch (field.V.kind) { - case "scalar": - case "enum": - Object.assign(output[name], fieldValue); - break; - case "message": - let T2 = field.V.T(); - for (let k of Object.keys(fieldValue)) - output[name][k] = T2.create(fieldValue[k]); - break; - } - break; + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.GetCacheEntryDownloadURL(ctx2, inputReq); + }); + } else { + response = yield service.GetCacheEntryDownloadURL(ctx, request); } - } + return JSON.stringify(cache_1.GetCacheEntryDownloadURLResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false + })); + }); } - __name(reflectionMergePartial, "reflectionMergePartial"); - exports2.reflectionMergePartial = reflectionMergePartial; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js -var require_reflection_equals = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-equals.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.reflectionEquals = void 0; - var reflection_info_1 = require_reflection_info(); - function reflectionEquals(info, a, b) { - if (a === b) - return true; - if (!a || !b) - return false; - for (let field of info.fields) { - let localName = field.localName; - let val_a = field.oneof ? a[field.oneof][localName] : a[localName]; - let val_b = field.oneof ? b[field.oneof][localName] : b[localName]; - switch (field.kind) { - case "enum": - case "scalar": - let t = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; - if (!(field.repeat ? repeatedPrimitiveEq(t, val_a, val_b) : primitiveEq(t, val_a, val_b))) - return false; - break; - case "map": - if (!(field.V.kind == "message" ? repeatedMsgEq(field.V.T(), objectValues(val_a), objectValues(val_b)) : repeatedPrimitiveEq(field.V.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.V.T, objectValues(val_a), objectValues(val_b)))) - return false; - break; - case "message": - let T = field.T(); - if (!(field.repeat ? repeatedMsgEq(T, val_a, val_b) : T.equals(val_a, val_b))) - return false; - break; + __name(handleCacheServiceGetCacheEntryDownloadURLJSON, "handleCacheServiceGetCacheEntryDownloadURLJSON"); + function handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + const body = JSON.parse(data.toString() || "{}"); + request = cache_1.DeleteCacheEntryRequest.fromJson(body, { + ignoreUnknownFields: true + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } } - } - return true; + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.DeleteCacheEntry(ctx2, inputReq); + }); + } else { + response = yield service.DeleteCacheEntry(ctx, request); + } + return JSON.stringify(cache_1.DeleteCacheEntryResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false + })); + }); } - __name(reflectionEquals, "reflectionEquals"); - exports2.reflectionEquals = reflectionEquals; - var objectValues = Object.values; - function primitiveEq(type, a, b) { - if (a === b) - return true; - if (type !== reflection_info_1.ScalarType.BYTES) - return false; - let ba = a; - let bb = b; - if (ba.length !== bb.length) - return false; - for (let i = 0; i < ba.length; i++) - if (ba[i] != bb[i]) - return false; - return true; + __name(handleCacheServiceDeleteCacheEntryJSON, "handleCacheServiceDeleteCacheEntryJSON"); + function handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + const body = JSON.parse(data.toString() || "{}"); + request = cache_1.ListCacheEntriesRequest.fromJson(body, { + ignoreUnknownFields: true + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.ListCacheEntries(ctx2, inputReq); + }); + } else { + response = yield service.ListCacheEntries(ctx, request); + } + return JSON.stringify(cache_1.ListCacheEntriesResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false + })); + }); } - __name(primitiveEq, "primitiveEq"); - function repeatedPrimitiveEq(type, a, b) { - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; i++) - if (!primitiveEq(type, a[i], b[i])) - return false; - return true; + __name(handleCacheServiceListCacheEntriesJSON, "handleCacheServiceListCacheEntriesJSON"); + function handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + const body = JSON.parse(data.toString() || "{}"); + request = cache_1.LookupCacheEntryRequest.fromJson(body, { + ignoreUnknownFields: true + }); + } catch (e) { + if (e instanceof Error) { + const msg = "the json request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.LookupCacheEntry(ctx2, inputReq); + }); + } else { + response = yield service.LookupCacheEntry(ctx, request); + } + return JSON.stringify(cache_1.LookupCacheEntryResponse.toJson(response, { + useProtoFieldName: true, + emitDefaultValues: false + })); + }); } - __name(repeatedPrimitiveEq, "repeatedPrimitiveEq"); - function repeatedMsgEq(type, a, b) { - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; i++) - if (!type.equals(a[i], b[i])) - return false; - return true; + __name(handleCacheServiceLookupCacheEntryJSON, "handleCacheServiceLookupCacheEntryJSON"); + function handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + request = cache_1.CreateCacheEntryRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.CreateCacheEntry(ctx2, inputReq); + }); + } else { + response = yield service.CreateCacheEntry(ctx, request); + } + return Buffer.from(cache_1.CreateCacheEntryResponse.toBinary(response)); + }); } - __name(repeatedMsgEq, "repeatedMsgEq"); + __name(handleCacheServiceCreateCacheEntryProtobuf, "handleCacheServiceCreateCacheEntryProtobuf"); + function handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + request = cache_1.FinalizeCacheEntryUploadRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.FinalizeCacheEntryUpload(ctx2, inputReq); + }); + } else { + response = yield service.FinalizeCacheEntryUpload(ctx, request); + } + return Buffer.from(cache_1.FinalizeCacheEntryUploadResponse.toBinary(response)); + }); + } + __name(handleCacheServiceFinalizeCacheEntryUploadProtobuf, "handleCacheServiceFinalizeCacheEntryUploadProtobuf"); + function handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + request = cache_1.GetCacheEntryDownloadURLRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.GetCacheEntryDownloadURL(ctx2, inputReq); + }); + } else { + response = yield service.GetCacheEntryDownloadURL(ctx, request); + } + return Buffer.from(cache_1.GetCacheEntryDownloadURLResponse.toBinary(response)); + }); + } + __name(handleCacheServiceGetCacheEntryDownloadURLProtobuf, "handleCacheServiceGetCacheEntryDownloadURLProtobuf"); + function handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + request = cache_1.DeleteCacheEntryRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.DeleteCacheEntry(ctx2, inputReq); + }); + } else { + response = yield service.DeleteCacheEntry(ctx, request); + } + return Buffer.from(cache_1.DeleteCacheEntryResponse.toBinary(response)); + }); + } + __name(handleCacheServiceDeleteCacheEntryProtobuf, "handleCacheServiceDeleteCacheEntryProtobuf"); + function handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + request = cache_1.ListCacheEntriesRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.ListCacheEntries(ctx2, inputReq); + }); + } else { + response = yield service.ListCacheEntries(ctx, request); + } + return Buffer.from(cache_1.ListCacheEntriesResponse.toBinary(response)); + }); + } + __name(handleCacheServiceListCacheEntriesProtobuf, "handleCacheServiceListCacheEntriesProtobuf"); + function handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors) { + return __awaiter3(this, void 0, void 0, function* () { + let request; + let response; + try { + request = cache_1.LookupCacheEntryRequest.fromBinary(data); + } catch (e) { + if (e instanceof Error) { + const msg = "the protobuf request could not be decoded"; + throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); + } + } + if (interceptors && interceptors.length > 0) { + const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); + response = yield interceptor(ctx, request, (ctx2, inputReq) => { + return service.LookupCacheEntry(ctx2, inputReq); + }); + } else { + response = yield service.LookupCacheEntry(ctx, request); + } + return Buffer.from(cache_1.LookupCacheEntryResponse.toBinary(response)); + }); + } + __name(handleCacheServiceLookupCacheEntryProtobuf, "handleCacheServiceLookupCacheEntryProtobuf"); } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js -var require_message_type = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/message-type.js"(exports2) { +// ../node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js +var require_cacheTwirpClient = __commonJS({ + "../node_modules/@actions/cache/lib/internal/shared/cacheTwirpClient.js"(exports2) { "use strict"; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.MessageType = void 0; - var message_type_contract_1 = require_message_type_contract(); - var reflection_info_1 = require_reflection_info(); - var reflection_type_check_1 = require_reflection_type_check(); - var reflection_json_reader_1 = require_reflection_json_reader(); - var reflection_json_writer_1 = require_reflection_json_writer(); - var reflection_binary_reader_1 = require_reflection_binary_reader(); - var reflection_binary_writer_1 = require_reflection_binary_writer(); - var reflection_create_1 = require_reflection_create(); - var reflection_merge_partial_1 = require_reflection_merge_partial(); - var json_typings_1 = require_json_typings(); - var json_format_contract_1 = require_json_format_contract(); - var reflection_equals_1 = require_reflection_equals(); - var binary_writer_1 = require_binary_writer(); - var binary_reader_1 = require_binary_reader(); - var baseDescriptors = Object.getOwnPropertyDescriptors(Object.getPrototypeOf({})); - var MessageType = class { + exports2.internalCacheTwirpClient = void 0; + var core_1 = require_core(); + var user_agent_1 = require_user_agent(); + var errors_1 = require_errors2(); + var config_1 = require_config(); + var cacheUtils_1 = require_cacheUtils(); + var auth_1 = require_auth(); + var http_client_1 = require_lib(); + var cache_twirp_1 = require_cache_twirp(); + var CacheServiceClient = class { static { - __name(this, "MessageType"); - } - constructor(name, fields, options) { - this.defaultCheckDepth = 16; - this.typeName = name; - this.fields = fields.map(reflection_info_1.normalizeFieldInfo); - this.options = options !== null && options !== void 0 ? options : {}; - this.messagePrototype = Object.create(null, Object.assign(Object.assign({}, baseDescriptors), { [message_type_contract_1.MESSAGE_TYPE]: { value: this } })); - this.refTypeCheck = new reflection_type_check_1.ReflectionTypeCheck(this); - this.refJsonReader = new reflection_json_reader_1.ReflectionJsonReader(this); - this.refJsonWriter = new reflection_json_writer_1.ReflectionJsonWriter(this); - this.refBinReader = new reflection_binary_reader_1.ReflectionBinaryReader(this); - this.refBinWriter = new reflection_binary_writer_1.ReflectionBinaryWriter(this); + __name(this, "CacheServiceClient"); } - create(value) { - let message = reflection_create_1.reflectionCreate(this); - if (value !== void 0) { - reflection_merge_partial_1.reflectionMergePartial(this, message, value); + constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) { + this.maxAttempts = 5; + this.baseRetryIntervalMilliseconds = 3e3; + this.retryMultiplier = 1.5; + const token = (0, cacheUtils_1.getRuntimeToken)(); + this.baseUrl = (0, config_1.getCacheServiceURL)(); + if (maxAttempts) { + this.maxAttempts = maxAttempts; } - return message; - } - /** - * Clone the message. - * - * Unknown fields are discarded. - */ - clone(message) { - let copy = this.create(); - reflection_merge_partial_1.reflectionMergePartial(this, copy, message); - return copy; - } - /** - * Determines whether two message of the same type have the same field values. - * Checks for deep equality, traversing repeated fields, oneof groups, maps - * and messages recursively. - * Will also return true if both messages are `undefined`. - */ - equals(a, b) { - return reflection_equals_1.reflectionEquals(this, a, b); - } - /** - * Is the given value assignable to our message type - * and contains no [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? - */ - is(arg, depth = this.defaultCheckDepth) { - return this.refTypeCheck.is(arg, depth, false); - } - /** - * Is the given value assignable to our message type, - * regardless of [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? - */ - isAssignable(arg, depth = this.defaultCheckDepth) { - return this.refTypeCheck.is(arg, depth, true); - } - /** - * Copy partial data into the target message. - */ - mergePartial(target, source) { - reflection_merge_partial_1.reflectionMergePartial(this, target, source); - } - /** - * Create a new message from binary format. - */ - fromBinary(data, options) { - let opt = binary_reader_1.binaryReadOptions(options); - return this.internalBinaryRead(opt.readerFactory(data), data.byteLength, opt); + if (baseRetryIntervalMilliseconds) { + this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds; + } + if (retryMultiplier) { + this.retryMultiplier = retryMultiplier; + } + this.httpClient = new http_client_1.HttpClient(userAgent, [ + new auth_1.BearerCredentialHandler(token) + ]); } - /** - * Read a new message from a JSON value. - */ - fromJson(json, options) { - return this.internalJsonRead(json, json_format_contract_1.jsonReadOptions(options)); + // This function satisfies the Rpc interface. It is compatible with the JSON + // JSON generated client. + request(service, method, contentType, data) { + return __awaiter3(this, void 0, void 0, function* () { + const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href; + (0, core_1.debug)(`[Request] ${method} ${url}`); + const headers = { + "Content-Type": contentType + }; + try { + const { body } = yield this.retryableRequest(() => __awaiter3(this, void 0, void 0, function* () { + return this.httpClient.post(url, JSON.stringify(data), headers); + })); + return body; + } catch (error) { + throw new Error(`Failed to ${method}: ${error.message}`); + } + }); } - /** - * Read a new message from a JSON string. - * This is equivalent to `T.fromJson(JSON.parse(json))`. - */ - fromJsonString(json, options) { - let value = JSON.parse(json); - return this.fromJson(value, options); + retryableRequest(operation) { + return __awaiter3(this, void 0, void 0, function* () { + let attempt = 0; + let errorMessage = ""; + let rawBody = ""; + while (attempt < this.maxAttempts) { + let isRetryable = false; + try { + const response = yield operation(); + const statusCode = response.message.statusCode; + rawBody = yield response.readBody(); + (0, core_1.debug)(`[Response] - ${response.message.statusCode}`); + (0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`); + const body = JSON.parse(rawBody); + (0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`); + if (this.isSuccessStatusCode(statusCode)) { + return { response, body }; + } + isRetryable = this.isRetryableHttpStatusCode(statusCode); + errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`; + if (body.msg) { + if (errors_1.UsageError.isUsageErrorMessage(body.msg)) { + throw new errors_1.UsageError(); + } + errorMessage = `${errorMessage}: ${body.msg}`; + } + } catch (error) { + if (error instanceof SyntaxError) { + (0, core_1.debug)(`Raw Body: ${rawBody}`); + } + if (error instanceof errors_1.UsageError) { + throw error; + } + if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) { + throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code); + } + isRetryable = true; + errorMessage = error.message; + } + if (!isRetryable) { + throw new Error(`Received non-retryable error: ${errorMessage}`); + } + if (attempt + 1 === this.maxAttempts) { + throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`); + } + const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt); + (0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`); + yield this.sleep(retryTimeMilliseconds); + attempt++; + } + throw new Error(`Request failed`); + }); } - /** - * Write the message to canonical JSON value. - */ - toJson(message, options) { - return this.internalJsonWrite(message, json_format_contract_1.jsonWriteOptions(options)); + isSuccessStatusCode(statusCode) { + if (!statusCode) + return false; + return statusCode >= 200 && statusCode < 300; } - /** - * Convert the message to canonical JSON string. - * This is equivalent to `JSON.stringify(T.toJson(t))` - */ - toJsonString(message, options) { - var _a; - let value = this.toJson(message, options); - return JSON.stringify(value, null, (_a = options === null || options === void 0 ? void 0 : options.prettySpaces) !== null && _a !== void 0 ? _a : 0); + isRetryableHttpStatusCode(statusCode) { + if (!statusCode) + return false; + const retryableStatusCodes = [ + http_client_1.HttpCodes.BadGateway, + http_client_1.HttpCodes.GatewayTimeout, + http_client_1.HttpCodes.InternalServerError, + http_client_1.HttpCodes.ServiceUnavailable, + http_client_1.HttpCodes.TooManyRequests + ]; + return retryableStatusCodes.includes(statusCode); } - /** - * Write the message to binary format. - */ - toBinary(message, options) { - let opt = binary_writer_1.binaryWriteOptions(options); - return this.internalBinaryWrite(message, opt.writerFactory(), opt).finish(); + sleep(milliseconds) { + return __awaiter3(this, void 0, void 0, function* () { + return new Promise((resolve) => setTimeout(resolve, milliseconds)); + }); } - /** - * This is an internal method. If you just want to read a message from - * JSON, use `fromJson()` or `fromJsonString()`. - * - * Reads JSON value and merges the fields into the target - * according to protobuf rules. If the target is omitted, - * a new instance is created first. - */ - internalJsonRead(json, options, target) { - if (json !== null && typeof json == "object" && !Array.isArray(json)) { - let message = target !== null && target !== void 0 ? target : this.create(); - this.refJsonReader.read(json, message, options); - return message; + getExponentialRetryTimeMilliseconds(attempt) { + if (attempt < 0) { + throw new Error("attempt should be a positive integer"); } - throw new Error(`Unable to parse message ${this.typeName} from JSON ${json_typings_1.typeofJsonValue(json)}.`); - } - /** - * This is an internal method. If you just want to write a message - * to JSON, use `toJson()` or `toJsonString(). - * - * Writes JSON value and returns it. - */ - internalJsonWrite(message, options) { - return this.refJsonWriter.write(message, options); - } - /** - * This is an internal method. If you just want to write a message - * in binary format, use `toBinary()`. - * - * Serializes the message in binary format and appends it to the given - * writer. Returns passed writer. - */ - internalBinaryWrite(message, writer, options) { - this.refBinWriter.write(message, writer, options); - return writer; - } - /** - * This is an internal method. If you just want to read a message from - * binary data, use `fromBinary()`. - * - * Reads data from binary format and merges the fields into - * the target according to protobuf rules. If the target is - * omitted, a new instance is created first. - */ - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(); - this.refBinReader.read(reader, message, options, length); - return message; + if (attempt === 0) { + return this.baseRetryIntervalMilliseconds; + } + const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt); + const maxTime = minTime * this.retryMultiplier; + return Math.trunc(Math.random() * (maxTime - minTime) + minTime); } }; - exports2.MessageType = MessageType; + function internalCacheTwirpClient(options) { + const client = new CacheServiceClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier); + return new cache_twirp_1.CacheServiceClientJSON(client); + } + __name(internalCacheTwirpClient, "internalCacheTwirpClient"); + exports2.internalCacheTwirpClient = internalCacheTwirpClient; } }); -// ../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js -var require_reflection_contains_message_type = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/reflection-contains-message-type.js"(exports2) { +// ../node_modules/@actions/cache/lib/internal/tar.js +var require_tar = __commonJS({ + "../node_modules/@actions/cache/lib/internal/tar.js"(exports2) { "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; + } + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); + } + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.containsMessageType = void 0; - var message_type_contract_1 = require_message_type_contract(); - function containsMessageType(msg) { - return msg[message_type_contract_1.MESSAGE_TYPE] != null; + exports2.createTar = exports2.extractTar = exports2.listTar = void 0; + var exec_1 = require_exec(); + var io2 = __importStar3(require_io()); + var fs_1 = require("fs"); + var path2 = __importStar3(require("path")); + var utils = __importStar3(require_cacheUtils()); + var constants_1 = require_constants7(); + var IS_WINDOWS = process.platform === "win32"; + function getTarPath() { + return __awaiter3(this, void 0, void 0, function* () { + switch (process.platform) { + case "win32": { + const gnuTar = yield utils.getGnuTarPathOnWindows(); + const systemTar = constants_1.SystemTarPathOnWindows; + if (gnuTar) { + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } else if ((0, fs_1.existsSync)(systemTar)) { + return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; + } + break; + } + case "darwin": { + const gnuTar = yield io2.which("gtar", false); + if (gnuTar) { + return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; + } else { + return { + path: yield io2.which("tar", true), + type: constants_1.ArchiveToolType.BSD + }; + } + } + default: + break; + } + return { + path: yield io2.which("tar", true), + type: constants_1.ArchiveToolType.GNU + }; + }); } - __name(containsMessageType, "containsMessageType"); - exports2.containsMessageType = containsMessageType; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js -var require_enum_object = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/enum-object.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.listEnumNumbers = exports2.listEnumNames = exports2.listEnumValues = exports2.isEnumObject = void 0; - function isEnumObject(arg) { - if (typeof arg != "object" || arg === null) { - return false; - } - if (!arg.hasOwnProperty(0)) { - return false; - } - for (let k of Object.keys(arg)) { - let num = parseInt(k); - if (!Number.isNaN(num)) { - let nam = arg[num]; - if (nam === void 0) - return false; - if (arg[nam] !== num) - return false; + __name(getTarPath, "getTarPath"); + function getTarArgs(tarPath, compressionMethod, type, archivePath = "") { + return __awaiter3(this, void 0, void 0, function* () { + const args = [`"${tarPath.path}"`]; + const cacheFileName = utils.getCacheFileName(compressionMethod); + const tarFile = "cache.tar"; + const workingDirectory = getWorkingDirectory(); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + switch (type) { + case "create": + args.push("--posix", "-cf", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "--exclude", BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "--files-from", constants_1.ManifestFilename); + break; + case "extract": + args.push("-xf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "-P", "-C", workingDirectory.replace(new RegExp(`\\${path2.sep}`, "g"), "/")); + break; + case "list": + args.push("-tf", BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), "-P"); + break; + } + if (tarPath.type === constants_1.ArchiveToolType.GNU) { + switch (process.platform) { + case "win32": + args.push("--force-local"); + break; + case "darwin": + args.push("--delay-directory-restore"); + break; + } + } + return args; + }); + } + __name(getTarArgs, "getTarArgs"); + function getCommands(compressionMethod, type, archivePath = "") { + return __awaiter3(this, void 0, void 0, function* () { + let args; + const tarPath = yield getTarPath(); + const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); + const compressionArgs = type !== "create" ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) : yield getCompressionProgram(tarPath, compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + if (BSD_TAR_ZSTD && type !== "create") { + args = [[...compressionArgs].join(" "), [...tarArgs].join(" ")]; } else { - let num2 = arg[k]; - if (num2 === void 0) - return false; - if (typeof num2 !== "number") - return false; - if (arg[num2] === void 0) - return false; + args = [[...tarArgs].join(" "), [...compressionArgs].join(" ")]; } - } - return true; + if (BSD_TAR_ZSTD) { + return args; + } + return [args.join(" ")]; + }); } - __name(isEnumObject, "isEnumObject"); - exports2.isEnumObject = isEnumObject; - function listEnumValues(enumObject) { - if (!isEnumObject(enumObject)) - throw new Error("not a typescript enum object"); - let values = []; - for (let [name, number] of Object.entries(enumObject)) - if (typeof number == "number") - values.push({ name, number }); - return values; + __name(getCommands, "getCommands"); + function getWorkingDirectory() { + var _a; + return (_a = process.env["GITHUB_WORKSPACE"]) !== null && _a !== void 0 ? _a : process.cwd(); } - __name(listEnumValues, "listEnumValues"); - exports2.listEnumValues = listEnumValues; - function listEnumNames(enumObject) { - return listEnumValues(enumObject).map((val) => val.name); + __name(getWorkingDirectory, "getWorkingDirectory"); + function getDecompressionProgram(tarPath, compressionMethod, archivePath) { + return __awaiter3(this, void 0, void 0, function* () { + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD ? [ + "zstd -d --long=30 --force -o", + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/") + ] : [ + "--use-compress-program", + IS_WINDOWS ? '"zstd -d --long=30"' : "unzstd --long=30" + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD ? [ + "zstd -d --force -o", + constants_1.TarFilename, + archivePath.replace(new RegExp(`\\${path2.sep}`, "g"), "/") + ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -d"' : "unzstd"]; + default: + return ["-z"]; + } + }); } - __name(listEnumNames, "listEnumNames"); - exports2.listEnumNames = listEnumNames; - function listEnumNumbers(enumObject) { - return listEnumValues(enumObject).map((val) => val.number).filter((num, index, arr) => arr.indexOf(num) == index); + __name(getDecompressionProgram, "getDecompressionProgram"); + function getCompressionProgram(tarPath, compressionMethod) { + return __awaiter3(this, void 0, void 0, function* () { + const cacheFileName = utils.getCacheFileName(compressionMethod); + const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; + switch (compressionMethod) { + case constants_1.CompressionMethod.Zstd: + return BSD_TAR_ZSTD ? [ + "zstd -T0 --long=30 --force -o", + cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), + constants_1.TarFilename + ] : [ + "--use-compress-program", + IS_WINDOWS ? '"zstd -T0 --long=30"' : "zstdmt --long=30" + ]; + case constants_1.CompressionMethod.ZstdWithoutLong: + return BSD_TAR_ZSTD ? [ + "zstd -T0 --force -o", + cacheFileName.replace(new RegExp(`\\${path2.sep}`, "g"), "/"), + constants_1.TarFilename + ] : ["--use-compress-program", IS_WINDOWS ? '"zstd -T0"' : "zstdmt"]; + default: + return ["-z"]; + } + }); } - __name(listEnumNumbers, "listEnumNumbers"); - exports2.listEnumNumbers = listEnumNumbers; - } -}); - -// ../node_modules/@protobuf-ts/runtime/build/commonjs/index.js -var require_commonjs7 = __commonJS({ - "../node_modules/@protobuf-ts/runtime/build/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - var json_typings_1 = require_json_typings(); - Object.defineProperty(exports2, "typeofJsonValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return json_typings_1.typeofJsonValue; - }, "get") }); - Object.defineProperty(exports2, "isJsonObject", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return json_typings_1.isJsonObject; - }, "get") }); - var base64_1 = require_base642(); - Object.defineProperty(exports2, "base64decode", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return base64_1.base64decode; - }, "get") }); - Object.defineProperty(exports2, "base64encode", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return base64_1.base64encode; - }, "get") }); - var protobufjs_utf8_1 = require_protobufjs_utf8(); - Object.defineProperty(exports2, "utf8read", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return protobufjs_utf8_1.utf8read; - }, "get") }); - var binary_format_contract_1 = require_binary_format_contract(); - Object.defineProperty(exports2, "WireType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_format_contract_1.WireType; - }, "get") }); - Object.defineProperty(exports2, "mergeBinaryOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_format_contract_1.mergeBinaryOptions; - }, "get") }); - Object.defineProperty(exports2, "UnknownFieldHandler", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_format_contract_1.UnknownFieldHandler; - }, "get") }); - var binary_reader_1 = require_binary_reader(); - Object.defineProperty(exports2, "BinaryReader", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_reader_1.BinaryReader; - }, "get") }); - Object.defineProperty(exports2, "binaryReadOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_reader_1.binaryReadOptions; - }, "get") }); - var binary_writer_1 = require_binary_writer(); - Object.defineProperty(exports2, "BinaryWriter", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_writer_1.BinaryWriter; - }, "get") }); - Object.defineProperty(exports2, "binaryWriteOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return binary_writer_1.binaryWriteOptions; - }, "get") }); - var pb_long_1 = require_pb_long(); - Object.defineProperty(exports2, "PbLong", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return pb_long_1.PbLong; - }, "get") }); - Object.defineProperty(exports2, "PbULong", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return pb_long_1.PbULong; - }, "get") }); - var json_format_contract_1 = require_json_format_contract(); - Object.defineProperty(exports2, "jsonReadOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return json_format_contract_1.jsonReadOptions; - }, "get") }); - Object.defineProperty(exports2, "jsonWriteOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return json_format_contract_1.jsonWriteOptions; - }, "get") }); - Object.defineProperty(exports2, "mergeJsonOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return json_format_contract_1.mergeJsonOptions; - }, "get") }); - var message_type_contract_1 = require_message_type_contract(); - Object.defineProperty(exports2, "MESSAGE_TYPE", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return message_type_contract_1.MESSAGE_TYPE; - }, "get") }); - var message_type_1 = require_message_type(); - Object.defineProperty(exports2, "MessageType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return message_type_1.MessageType; - }, "get") }); - var reflection_info_1 = require_reflection_info(); - Object.defineProperty(exports2, "ScalarType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.ScalarType; - }, "get") }); - Object.defineProperty(exports2, "LongType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.LongType; - }, "get") }); - Object.defineProperty(exports2, "RepeatType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.RepeatType; - }, "get") }); - Object.defineProperty(exports2, "normalizeFieldInfo", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.normalizeFieldInfo; - }, "get") }); - Object.defineProperty(exports2, "readFieldOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.readFieldOptions; - }, "get") }); - Object.defineProperty(exports2, "readFieldOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.readFieldOption; - }, "get") }); - Object.defineProperty(exports2, "readMessageOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.readMessageOption; - }, "get") }); - var reflection_type_check_1 = require_reflection_type_check(); - Object.defineProperty(exports2, "ReflectionTypeCheck", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_type_check_1.ReflectionTypeCheck; - }, "get") }); - var reflection_create_1 = require_reflection_create(); - Object.defineProperty(exports2, "reflectionCreate", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_create_1.reflectionCreate; - }, "get") }); - var reflection_scalar_default_1 = require_reflection_scalar_default(); - Object.defineProperty(exports2, "reflectionScalarDefault", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_scalar_default_1.reflectionScalarDefault; - }, "get") }); - var reflection_merge_partial_1 = require_reflection_merge_partial(); - Object.defineProperty(exports2, "reflectionMergePartial", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_merge_partial_1.reflectionMergePartial; - }, "get") }); - var reflection_equals_1 = require_reflection_equals(); - Object.defineProperty(exports2, "reflectionEquals", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_equals_1.reflectionEquals; - }, "get") }); - var reflection_binary_reader_1 = require_reflection_binary_reader(); - Object.defineProperty(exports2, "ReflectionBinaryReader", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_binary_reader_1.ReflectionBinaryReader; - }, "get") }); - var reflection_binary_writer_1 = require_reflection_binary_writer(); - Object.defineProperty(exports2, "ReflectionBinaryWriter", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_binary_writer_1.ReflectionBinaryWriter; - }, "get") }); - var reflection_json_reader_1 = require_reflection_json_reader(); - Object.defineProperty(exports2, "ReflectionJsonReader", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_json_reader_1.ReflectionJsonReader; - }, "get") }); - var reflection_json_writer_1 = require_reflection_json_writer(); - Object.defineProperty(exports2, "ReflectionJsonWriter", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_json_writer_1.ReflectionJsonWriter; - }, "get") }); - var reflection_contains_message_type_1 = require_reflection_contains_message_type(); - Object.defineProperty(exports2, "containsMessageType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_contains_message_type_1.containsMessageType; - }, "get") }); - var oneof_1 = require_oneof(); - Object.defineProperty(exports2, "isOneofGroup", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return oneof_1.isOneofGroup; - }, "get") }); - Object.defineProperty(exports2, "setOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return oneof_1.setOneofValue; - }, "get") }); - Object.defineProperty(exports2, "getOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return oneof_1.getOneofValue; - }, "get") }); - Object.defineProperty(exports2, "clearOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return oneof_1.clearOneofValue; - }, "get") }); - Object.defineProperty(exports2, "getSelectedOneofValue", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return oneof_1.getSelectedOneofValue; - }, "get") }); - var enum_object_1 = require_enum_object(); - Object.defineProperty(exports2, "listEnumValues", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return enum_object_1.listEnumValues; - }, "get") }); - Object.defineProperty(exports2, "listEnumNames", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return enum_object_1.listEnumNames; - }, "get") }); - Object.defineProperty(exports2, "listEnumNumbers", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return enum_object_1.listEnumNumbers; - }, "get") }); - Object.defineProperty(exports2, "isEnumObject", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return enum_object_1.isEnumObject; - }, "get") }); - var lower_camel_case_1 = require_lower_camel_case(); - Object.defineProperty(exports2, "lowerCamelCase", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return lower_camel_case_1.lowerCamelCase; - }, "get") }); - var assert_1 = require_assert(); - Object.defineProperty(exports2, "assert", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return assert_1.assert; - }, "get") }); - Object.defineProperty(exports2, "assertNever", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return assert_1.assertNever; - }, "get") }); - Object.defineProperty(exports2, "assertInt32", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return assert_1.assertInt32; - }, "get") }); - Object.defineProperty(exports2, "assertUInt32", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return assert_1.assertUInt32; - }, "get") }); - Object.defineProperty(exports2, "assertFloat32", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return assert_1.assertFloat32; - }, "get") }); + __name(getCompressionProgram, "getCompressionProgram"); + function execCommands(commands, cwd) { + return __awaiter3(this, void 0, void 0, function* () { + for (const command of commands) { + try { + yield (0, exec_1.exec)(command, void 0, { + cwd, + env: Object.assign(Object.assign({}, process.env), { MSYS: "winsymlinks:nativestrict" }) + }); + } catch (error) { + throw new Error(`${command.split(" ")[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); + } + } + }); + } + __name(execCommands, "execCommands"); + function listTar(archivePath, compressionMethod) { + return __awaiter3(this, void 0, void 0, function* () { + const commands = yield getCommands(compressionMethod, "list", archivePath); + yield execCommands(commands); + }); + } + __name(listTar, "listTar"); + exports2.listTar = listTar; + function extractTar(archivePath, compressionMethod) { + return __awaiter3(this, void 0, void 0, function* () { + const workingDirectory = getWorkingDirectory(); + yield io2.mkdirP(workingDirectory); + const commands = yield getCommands(compressionMethod, "extract", archivePath); + yield execCommands(commands); + }); + } + __name(extractTar, "extractTar"); + exports2.extractTar = extractTar; + function createTar(archiveFolder, sourceDirectories, compressionMethod) { + return __awaiter3(this, void 0, void 0, function* () { + (0, fs_1.writeFileSync)(path2.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join("\n")); + const commands = yield getCommands(compressionMethod, "create"); + yield execCommands(commands, archiveFolder); + }); + } + __name(createTar, "createTar"); + exports2.createTar = createTar; } }); -// ../node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.js -var require_timestamp = __commonJS({ - "../node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.js"(exports2) { +// ../node_modules/@actions/cache/lib/cache.js +var require_cache3 = __commonJS({ + "../node_modules/@actions/cache/lib/cache.js"(exports2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Timestamp = void 0; - var runtime_1 = require_commonjs7(); - var runtime_2 = require_commonjs7(); - var runtime_3 = require_commonjs7(); - var runtime_4 = require_commonjs7(); - var runtime_5 = require_commonjs7(); - var runtime_6 = require_commonjs7(); - var runtime_7 = require_commonjs7(); - var Timestamp$Type = class extends runtime_7.MessageType { - static { - __name(this, "Timestamp$Type"); + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }; } - constructor() { - super("google.protobuf.Timestamp", [ - { - no: 1, - name: "seconds", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { - no: 2, - name: "nanos", - kind: "scalar", - T: 5 - /*ScalarType.INT32*/ + Object.defineProperty(o, k2, desc); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; + var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { + function adopt(value) { + return value instanceof P ? value : new P(function(resolve) { + resolve(value); + }); + } + __name(adopt, "adopt"); + return new (P || (P = Promise))(function(resolve, reject) { + function fulfilled(value) { + try { + step(generator.next(value)); + } catch (e) { + reject(e); } - ]); + } + __name(fulfilled, "fulfilled"); + function rejected(value) { + try { + step(generator["throw"](value)); + } catch (e) { + reject(e); + } + } + __name(rejected, "rejected"); + function step(result) { + result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + } + __name(step, "step"); + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.saveCache = exports2.restoreCache = exports2.isFeatureAvailable = exports2.ReserveCacheError = exports2.ValidationError = void 0; + var core2 = __importStar3(require_core()); + var path2 = __importStar3(require("path")); + var utils = __importStar3(require_cacheUtils()); + var cacheHttpClient = __importStar3(require_cacheHttpClient()); + var cacheTwirpClient = __importStar3(require_cacheTwirpClient()); + var config_1 = require_config(); + var tar_1 = require_tar(); + var constants_1 = require_constants7(); + var ValidationError = class _ValidationError extends Error { + static { + __name(this, "ValidationError"); } - /** - * Creates a new `Timestamp` for the current time. - */ - now() { - const msg = this.create(); - const ms = Date.now(); - msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1e3)).toString(); - msg.nanos = ms % 1e3 * 1e6; - return msg; + constructor(message) { + super(message); + this.name = "ValidationError"; + Object.setPrototypeOf(this, _ValidationError.prototype); } - /** - * Converts a `Timestamp` to a JavaScript Date. - */ - toDate(message) { - return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1e3 + Math.ceil(message.nanos / 1e6)); + }; + exports2.ValidationError = ValidationError; + var ReserveCacheError = class _ReserveCacheError extends Error { + static { + __name(this, "ReserveCacheError"); } - /** - * Converts a JavaScript Date to a `Timestamp`. - */ - fromDate(date) { - const msg = this.create(); - const ms = date.getTime(); - msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1e3)).toString(); - msg.nanos = ms % 1e3 * 1e6; - return msg; + constructor(message) { + super(message); + this.name = "ReserveCacheError"; + Object.setPrototypeOf(this, _ReserveCacheError.prototype); } - /** - * In JSON format, the `Timestamp` type is encoded as a string - * in the RFC 3339 format. - */ - internalJsonWrite(message, options) { - let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1e3; - if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) - throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); - if (message.nanos < 0) - throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative."); - let z = "Z"; - if (message.nanos > 0) { - let nanosStr = (message.nanos + 1e9).toString().substring(1); - if (nanosStr.substring(3) === "000000") - z = "." + nanosStr.substring(0, 3) + "Z"; - else if (nanosStr.substring(6) === "000") - z = "." + nanosStr.substring(0, 6) + "Z"; - else - z = "." + nanosStr + "Z"; - } - return new Date(ms).toISOString().replace(".000Z", z); + }; + exports2.ReserveCacheError = ReserveCacheError; + function checkPaths(paths) { + if (!paths || paths.length === 0) { + throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); } - /** - * In JSON format, the `Timestamp` type is encoded as a string - * in the RFC 3339 format. - */ - internalJsonRead(json, options, target) { - if (typeof json !== "string") - throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + "."); - let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/); - if (!matches) - throw new Error("Unable to parse Timestamp from JSON. Invalid format."); - let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z")); - if (Number.isNaN(ms)) - throw new Error("Unable to parse Timestamp from JSON. Invalid value."); - if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) - throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); - if (!target) - target = this.create(); - target.seconds = runtime_6.PbLong.from(ms / 1e3).toString(); - target.nanos = 0; - if (matches[7]) - target.nanos = parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1e9; - return target; + } + __name(checkPaths, "checkPaths"); + function checkKey(key) { + if (key.length > 512) { + throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); } - create(value) { - const message = { seconds: "0", nanos: 0 }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); - return message; + const regex = /^[^,]*$/; + if (!regex.test(key)) { + throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* int64 seconds */ - 1: - message.seconds = reader.int64().toString(); - break; - case /* int32 nanos */ - 2: - message.nanos = reader.int32(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + __name(checkKey, "checkKey"); + function isFeatureAvailable() { + return !!process.env["ACTIONS_CACHE_URL"]; + } + __name(isFeatureAvailable, "isFeatureAvailable"); + exports2.isFeatureAvailable = isFeatureAvailable; + function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + return __awaiter3(this, void 0, void 0, function* () { + const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); + core2.debug(`Cache service version: ${cacheServiceVersion}`); + checkPaths(paths); + switch (cacheServiceVersion) { + case "v2": + return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); + case "v1": + default: + return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); + } + }); + } + __name(restoreCache, "restoreCache"); + exports2.restoreCache = restoreCache; + function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + return __awaiter3(this, void 0, void 0, function* () { + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core2.debug("Resolved Keys:"); + core2.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + } + for (const key of keys) { + checkKey(key); + } + const compressionMethod = yield utils.getCompressionMethod(); + let archivePath = ""; + try { + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { + compressionMethod, + enableCrossOsArchive + }); + if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { + return void 0; + } + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core2.info("Lookup only - skipping download"); + return cacheEntry.cacheKey; + } + archivePath = path2.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core2.debug(`Archive Path: ${archivePath}`); + yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); + if (core2.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core2.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core2.info("Cache restored successfully"); + return cacheEntry.cacheKey; + } catch (error) { + const typedError = error; + if (typedError.name === ValidationError.name) { + throw error; + } else { + core2.warning(`Failed to restore: ${error.message}`); + } + } finally { + try { + yield utils.unlinkFile(archivePath); + } catch (error) { + core2.debug(`Failed to delete archive: ${error}`); } } - return message; - } - internalBinaryWrite(message, writer, options) { - if (message.seconds !== "0") - writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds); - if (message.nanos !== 0) - writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; - } - }; - exports2.Timestamp = new Timestamp$Type(); + return void 0; + }); + } + __name(restoreCacheV1, "restoreCacheV1"); + function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { + return __awaiter3(this, void 0, void 0, function* () { + options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); + restoreKeys = restoreKeys || []; + const keys = [primaryKey, ...restoreKeys]; + core2.debug("Resolved Keys:"); + core2.debug(JSON.stringify(keys)); + if (keys.length > 10) { + throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); + } + for (const key of keys) { + checkKey(key); + } + let archivePath = ""; + try { + const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); + const compressionMethod = yield utils.getCompressionMethod(); + const request = { + key: primaryKey, + restoreKeys, + version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive) + }; + const response = yield twirpClient.GetCacheEntryDownloadURL(request); + if (!response.ok) { + core2.warning(`Cache not found for keys: ${keys.join(", ")}`); + return void 0; + } + core2.info(`Cache hit for: ${request.key}`); + if (options === null || options === void 0 ? void 0 : options.lookupOnly) { + core2.info("Lookup only - skipping download"); + return response.matchedKey; + } + archivePath = path2.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); + core2.debug(`Archive path: ${archivePath}`); + core2.debug(`Starting download of archive to: ${archivePath}`); + yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core2.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + if (core2.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + yield (0, tar_1.extractTar)(archivePath, compressionMethod); + core2.info("Cache restored successfully"); + return response.matchedKey; + } catch (error) { + const typedError = error; + if (typedError.name === ValidationError.name) { + throw error; + } else { + core2.warning(`Failed to restore: ${error.message}`); + } + } finally { + try { + if (archivePath) { + yield utils.unlinkFile(archivePath); + } + } catch (error) { + core2.debug(`Failed to delete archive: ${error}`); + } + } + return void 0; + }); + } + __name(restoreCacheV2, "restoreCacheV2"); + function saveCache(paths, key, options, enableCrossOsArchive = false) { + return __awaiter3(this, void 0, void 0, function* () { + const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); + core2.debug(`Cache service version: ${cacheServiceVersion}`); + checkPaths(paths); + checkKey(key); + switch (cacheServiceVersion) { + case "v2": + return yield saveCacheV2(paths, key, options, enableCrossOsArchive); + case "v1": + default: + return yield saveCacheV1(paths, key, options, enableCrossOsArchive); + } + }); + } + __name(saveCache, "saveCache"); + exports2.saveCache = saveCache; + function saveCacheV1(paths, key, options, enableCrossOsArchive = false) { + var _a, _b, _c, _d, _e; + return __awaiter3(this, void 0, void 0, function* () { + const compressionMethod = yield utils.getCompressionMethod(); + let cacheId = -1; + const cachePaths = yield utils.resolvePaths(paths); + core2.debug("Cache Paths:"); + core2.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + } + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path2.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core2.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core2.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const fileSizeLimit = 10 * 1024 * 1024 * 1024; + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core2.debug(`File Size: ${archiveFileSize}`); + if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); + } + core2.debug("Reserving Cache"); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + enableCrossOsArchive, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } + core2.debug(`Saving Cache (ID: ${cacheId})`); + yield cacheHttpClient.saveCache(cacheId, archivePath, "", options); + } catch (error) { + const typedError = error; + if (typedError.name === ValidationError.name) { + throw error; + } else if (typedError.name === ReserveCacheError.name) { + core2.info(`Failed to save: ${typedError.message}`); + } else { + core2.warning(`Failed to save: ${typedError.message}`); + } + } finally { + try { + yield utils.unlinkFile(archivePath); + } catch (error) { + core2.debug(`Failed to delete archive: ${error}`); + } + } + return cacheId; + }); + } + __name(saveCacheV1, "saveCacheV1"); + function saveCacheV2(paths, key, options, enableCrossOsArchive = false) { + return __awaiter3(this, void 0, void 0, function* () { + options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true }); + const compressionMethod = yield utils.getCompressionMethod(); + const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); + let cacheId = -1; + const cachePaths = yield utils.resolvePaths(paths); + core2.debug("Cache Paths:"); + core2.debug(`${JSON.stringify(cachePaths)}`); + if (cachePaths.length === 0) { + throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); + } + const archiveFolder = yield utils.createTempDirectory(); + const archivePath = path2.join(archiveFolder, utils.getCacheFileName(compressionMethod)); + core2.debug(`Archive Path: ${archivePath}`); + try { + yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); + if (core2.isDebug()) { + yield (0, tar_1.listTar)(archivePath, compressionMethod); + } + const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); + core2.debug(`File Size: ${archiveFileSize}`); + if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { + throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); + } + options.archiveSizeBytes = archiveFileSize; + core2.debug("Reserving Cache"); + const version3 = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); + const request = { + key, + version: version3 + }; + const response = yield twirpClient.CreateCacheEntry(request); + if (!response.ok) { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); + } + core2.debug(`Attempting to upload cache located at: ${archivePath}`); + yield cacheHttpClient.saveCache(cacheId, archivePath, response.signedUploadUrl, options); + const finalizeRequest = { + key, + version: version3, + sizeBytes: `${archiveFileSize}` + }; + const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); + core2.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); + if (!finalizeResponse.ok) { + throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); + } + cacheId = parseInt(finalizeResponse.entryId); + } catch (error) { + const typedError = error; + if (typedError.name === ValidationError.name) { + throw error; + } else if (typedError.name === ReserveCacheError.name) { + core2.info(`Failed to save: ${typedError.message}`); + } else { + core2.warning(`Failed to save: ${typedError.message}`); + } + } finally { + try { + yield utils.unlinkFile(archivePath); + } catch (error) { + core2.debug(`Failed to delete archive: ${error}`); + } + } + return cacheId; + }); + } + __name(saveCacheV2, "saveCacheV2"); } }); -// ../node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.js -var require_wrappers = __commonJS({ - "../node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.BytesValue = exports2.StringValue = exports2.BoolValue = exports2.UInt32Value = exports2.Int32Value = exports2.UInt64Value = exports2.Int64Value = exports2.FloatValue = exports2.DoubleValue = void 0; - var runtime_1 = require_commonjs7(); - var runtime_2 = require_commonjs7(); - var runtime_3 = require_commonjs7(); - var runtime_4 = require_commonjs7(); - var runtime_5 = require_commonjs7(); - var runtime_6 = require_commonjs7(); - var runtime_7 = require_commonjs7(); - var DoubleValue$Type = class extends runtime_7.MessageType { - static { - __name(this, "DoubleValue$Type"); +// ../node_modules/@actions/tool-cache/node_modules/semver/semver.js +var require_semver2 = __commonJS({ + "../node_modules/@actions/tool-cache/node_modules/semver/semver.js"(exports2, module2) { + exports2 = module2.exports = SemVer; + var debug; + if (typeof process === "object" && process.env && process.env.NODE_DEBUG && /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = /* @__PURE__ */ __name(function() { + var args = Array.prototype.slice.call(arguments, 0); + args.unshift("SEMVER"); + console.log.apply(console, args); + }, "debug"); + } else { + debug = /* @__PURE__ */ __name(function() { + }, "debug"); + } + exports2.SEMVER_SPEC_VERSION = "2.0.0"; + var MAX_LENGTH = 256; + var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || /* istanbul ignore next */ + 9007199254740991; + var MAX_SAFE_COMPONENT_LENGTH = 16; + var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6; + var re2 = exports2.re = []; + var safeRe = exports2.safeRe = []; + var src = exports2.src = []; + var t = exports2.tokens = {}; + var R = 0; + function tok(n) { + t[n] = R++; + } + __name(tok, "tok"); + var LETTERDASHNUMBER = "[a-zA-Z0-9-]"; + var safeRegexReplacements = [ + ["\\s", 1], + ["\\d", MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH] + ]; + function makeSafeRe(value) { + for (var i2 = 0; i2 < safeRegexReplacements.length; i2++) { + var token = safeRegexReplacements[i2][0]; + var max = safeRegexReplacements[i2][1]; + value = value.split(token + "*").join(token + "{0," + max + "}").split(token + "+").join(token + "{1," + max + "}"); } - constructor() { - super("google.protobuf.DoubleValue", [ - { - no: 1, - name: "value", - kind: "scalar", - T: 1 - /*ScalarType.DOUBLE*/ - } - ]); + return value; + } + __name(makeSafeRe, "makeSafeRe"); + tok("NUMERICIDENTIFIER"); + src[t.NUMERICIDENTIFIER] = "0|[1-9]\\d*"; + tok("NUMERICIDENTIFIERLOOSE"); + src[t.NUMERICIDENTIFIERLOOSE] = "\\d+"; + tok("NONNUMERICIDENTIFIER"); + src[t.NONNUMERICIDENTIFIER] = "\\d*[a-zA-Z-]" + LETTERDASHNUMBER + "*"; + tok("MAINVERSION"); + src[t.MAINVERSION] = "(" + src[t.NUMERICIDENTIFIER] + ")\\.(" + src[t.NUMERICIDENTIFIER] + ")\\.(" + src[t.NUMERICIDENTIFIER] + ")"; + tok("MAINVERSIONLOOSE"); + src[t.MAINVERSIONLOOSE] = "(" + src[t.NUMERICIDENTIFIERLOOSE] + ")\\.(" + src[t.NUMERICIDENTIFIERLOOSE] + ")\\.(" + src[t.NUMERICIDENTIFIERLOOSE] + ")"; + tok("PRERELEASEIDENTIFIER"); + src[t.PRERELEASEIDENTIFIER] = "(?:" + src[t.NUMERICIDENTIFIER] + "|" + src[t.NONNUMERICIDENTIFIER] + ")"; + tok("PRERELEASEIDENTIFIERLOOSE"); + src[t.PRERELEASEIDENTIFIERLOOSE] = "(?:" + src[t.NUMERICIDENTIFIERLOOSE] + "|" + src[t.NONNUMERICIDENTIFIER] + ")"; + tok("PRERELEASE"); + src[t.PRERELEASE] = "(?:-(" + src[t.PRERELEASEIDENTIFIER] + "(?:\\." + src[t.PRERELEASEIDENTIFIER] + ")*))"; + tok("PRERELEASELOOSE"); + src[t.PRERELEASELOOSE] = "(?:-?(" + src[t.PRERELEASEIDENTIFIERLOOSE] + "(?:\\." + src[t.PRERELEASEIDENTIFIERLOOSE] + ")*))"; + tok("BUILDIDENTIFIER"); + src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + "+"; + tok("BUILD"); + src[t.BUILD] = "(?:\\+(" + src[t.BUILDIDENTIFIER] + "(?:\\." + src[t.BUILDIDENTIFIER] + ")*))"; + tok("FULL"); + tok("FULLPLAIN"); + src[t.FULLPLAIN] = "v?" + src[t.MAINVERSION] + src[t.PRERELEASE] + "?" + src[t.BUILD] + "?"; + src[t.FULL] = "^" + src[t.FULLPLAIN] + "$"; + tok("LOOSEPLAIN"); + src[t.LOOSEPLAIN] = "[v=\\s]*" + src[t.MAINVERSIONLOOSE] + src[t.PRERELEASELOOSE] + "?" + src[t.BUILD] + "?"; + tok("LOOSE"); + src[t.LOOSE] = "^" + src[t.LOOSEPLAIN] + "$"; + tok("GTLT"); + src[t.GTLT] = "((?:<|>)?=?)"; + tok("XRANGEIDENTIFIERLOOSE"); + src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + "|x|X|\\*"; + tok("XRANGEIDENTIFIER"); + src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + "|x|X|\\*"; + tok("XRANGEPLAIN"); + src[t.XRANGEPLAIN] = "[v=\\s]*(" + src[t.XRANGEIDENTIFIER] + ")(?:\\.(" + src[t.XRANGEIDENTIFIER] + ")(?:\\.(" + src[t.XRANGEIDENTIFIER] + ")(?:" + src[t.PRERELEASE] + ")?" + src[t.BUILD] + "?)?)?"; + tok("XRANGEPLAINLOOSE"); + src[t.XRANGEPLAINLOOSE] = "[v=\\s]*(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:\\.(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:\\.(" + src[t.XRANGEIDENTIFIERLOOSE] + ")(?:" + src[t.PRERELEASELOOSE] + ")?" + src[t.BUILD] + "?)?)?"; + tok("XRANGE"); + src[t.XRANGE] = "^" + src[t.GTLT] + "\\s*" + src[t.XRANGEPLAIN] + "$"; + tok("XRANGELOOSE"); + src[t.XRANGELOOSE] = "^" + src[t.GTLT] + "\\s*" + src[t.XRANGEPLAINLOOSE] + "$"; + tok("COERCE"); + src[t.COERCE] = "(^|[^\\d])(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "})(?:\\.(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "}))?(?:\\.(\\d{1," + MAX_SAFE_COMPONENT_LENGTH + "}))?(?:$|[^\\d])"; + tok("COERCERTL"); + re2[t.COERCERTL] = new RegExp(src[t.COERCE], "g"); + safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), "g"); + tok("LONETILDE"); + src[t.LONETILDE] = "(?:~>?)"; + tok("TILDETRIM"); + src[t.TILDETRIM] = "(\\s*)" + src[t.LONETILDE] + "\\s+"; + re2[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], "g"); + safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), "g"); + var tildeTrimReplace = "$1~"; + tok("TILDE"); + src[t.TILDE] = "^" + src[t.LONETILDE] + src[t.XRANGEPLAIN] + "$"; + tok("TILDELOOSE"); + src[t.TILDELOOSE] = "^" + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + "$"; + tok("LONECARET"); + src[t.LONECARET] = "(?:\\^)"; + tok("CARETTRIM"); + src[t.CARETTRIM] = "(\\s*)" + src[t.LONECARET] + "\\s+"; + re2[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], "g"); + safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), "g"); + var caretTrimReplace = "$1^"; + tok("CARET"); + src[t.CARET] = "^" + src[t.LONECARET] + src[t.XRANGEPLAIN] + "$"; + tok("CARETLOOSE"); + src[t.CARETLOOSE] = "^" + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + "$"; + tok("COMPARATORLOOSE"); + src[t.COMPARATORLOOSE] = "^" + src[t.GTLT] + "\\s*(" + src[t.LOOSEPLAIN] + ")$|^$"; + tok("COMPARATOR"); + src[t.COMPARATOR] = "^" + src[t.GTLT] + "\\s*(" + src[t.FULLPLAIN] + ")$|^$"; + tok("COMPARATORTRIM"); + src[t.COMPARATORTRIM] = "(\\s*)" + src[t.GTLT] + "\\s*(" + src[t.LOOSEPLAIN] + "|" + src[t.XRANGEPLAIN] + ")"; + re2[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], "g"); + safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), "g"); + var comparatorTrimReplace = "$1$2$3"; + tok("HYPHENRANGE"); + src[t.HYPHENRANGE] = "^\\s*(" + src[t.XRANGEPLAIN] + ")\\s+-\\s+(" + src[t.XRANGEPLAIN] + ")\\s*$"; + tok("HYPHENRANGELOOSE"); + src[t.HYPHENRANGELOOSE] = "^\\s*(" + src[t.XRANGEPLAINLOOSE] + ")\\s+-\\s+(" + src[t.XRANGEPLAINLOOSE] + ")\\s*$"; + tok("STAR"); + src[t.STAR] = "(<|>)?=?\\s*\\*"; + for (i = 0; i < R; i++) { + debug(i, src[i]); + if (!re2[i]) { + re2[i] = new RegExp(src[i]); + safeRe[i] = new RegExp(makeSafeRe(src[i])); } - /** - * Encode `DoubleValue` to JSON number. - */ - internalJsonWrite(message, options) { - return this.refJsonWriter.scalar(2, message.value, "value", false, true); + } + var i; + exports2.parse = parse2; + function parse2(version3, options) { + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; + } + if (version3 instanceof SemVer) { + return version3; + } + if (typeof version3 !== "string") { + return null; + } + if (version3.length > MAX_LENGTH) { + return null; + } + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]; + if (!r.test(version3)) { + return null; } - /** - * Decode `DoubleValue` from JSON number. - */ - internalJsonRead(json, options, target) { - if (!target) - target = this.create(); - target.value = this.refJsonReader.scalar(json, 1, void 0, "value"); - return target; + try { + return new SemVer(version3, options); + } catch (er) { + return null; } - create(value) { - const message = { value: 0 }; - globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_5.reflectionMergePartial)(this, message, value); - return message; + } + __name(parse2, "parse"); + exports2.valid = valid; + function valid(version3, options) { + var v = parse2(version3, options); + return v ? v.version : null; + } + __name(valid, "valid"); + exports2.clean = clean; + function clean(version3, options) { + var s = parse2(version3.trim().replace(/^[=v]+/, ""), options); + return s ? s.version : null; + } + __name(clean, "clean"); + exports2.SemVer = SemVer; + function SemVer(version3, options) { + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* double value */ - 1: - message.value = reader.double(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } + if (version3 instanceof SemVer) { + if (version3.loose === options.loose) { + return version3; + } else { + version3 = version3.version; } - return message; + } else if (typeof version3 !== "string") { + throw new TypeError("Invalid Version: " + version3); } - internalBinaryWrite(message, writer, options) { - if (message.value !== 0) - writer.tag(1, runtime_3.WireType.Bit64).double(message.value); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + if (version3.length > MAX_LENGTH) { + throw new TypeError("version is longer than " + MAX_LENGTH + " characters"); } - }; - exports2.DoubleValue = new DoubleValue$Type(); - var FloatValue$Type = class extends runtime_7.MessageType { - static { - __name(this, "FloatValue$Type"); + if (!(this instanceof SemVer)) { + return new SemVer(version3, options); } - constructor() { - super("google.protobuf.FloatValue", [ - { - no: 1, - name: "value", - kind: "scalar", - T: 2 - /*ScalarType.FLOAT*/ - } - ]); + debug("SemVer", version3, options); + this.options = options; + this.loose = !!options.loose; + var m = version3.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]); + if (!m) { + throw new TypeError("Invalid Version: " + version3); } - /** - * Encode `FloatValue` to JSON number. - */ - internalJsonWrite(message, options) { - return this.refJsonWriter.scalar(1, message.value, "value", false, true); + this.raw = version3; + this.major = +m[1]; + this.minor = +m[2]; + this.patch = +m[3]; + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError("Invalid major version"); } - /** - * Decode `FloatValue` from JSON number. - */ - internalJsonRead(json, options, target) { - if (!target) - target = this.create(); - target.value = this.refJsonReader.scalar(json, 1, void 0, "value"); - return target; + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError("Invalid minor version"); } - create(value) { - const message = { value: 0 }; - globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_5.reflectionMergePartial)(this, message, value); - return message; + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError("Invalid patch version"); } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* float value */ - 1: - message.value = reader.float(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + if (!m[4]) { + this.prerelease = []; + } else { + this.prerelease = m[4].split(".").map(function(id) { + if (/^[0-9]+$/.test(id)) { + var num = +id; + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num; + } } - } - return message; + return id; + }); } - internalBinaryWrite(message, writer, options) { - if (message.value !== 0) - writer.tag(1, runtime_3.WireType.Bit32).float(message.value); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + this.build = m[5] ? m[5].split(".") : []; + this.format(); + } + __name(SemVer, "SemVer"); + SemVer.prototype.format = function() { + this.version = this.major + "." + this.minor + "." + this.patch; + if (this.prerelease.length) { + this.version += "-" + this.prerelease.join("."); } + return this.version; }; - exports2.FloatValue = new FloatValue$Type(); - var Int64Value$Type = class extends runtime_7.MessageType { - static { - __name(this, "Int64Value$Type"); - } - constructor() { - super("google.protobuf.Int64Value", [ - { - no: 1, - name: "value", - kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - } - ]); + SemVer.prototype.toString = function() { + return this.version; + }; + SemVer.prototype.compare = function(other) { + debug("SemVer.compare", this.version, this.options, other); + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); } - /** - * Encode `Int64Value` to JSON string. - */ - internalJsonWrite(message, options) { - return this.refJsonWriter.scalar(runtime_1.ScalarType.INT64, message.value, "value", false, true); + return this.compareMain(other) || this.comparePre(other); + }; + SemVer.prototype.compareMain = function(other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); } - /** - * Decode `Int64Value` from JSON string. - */ - internalJsonRead(json, options, target) { - if (!target) - target = this.create(); - target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.INT64, runtime_2.LongType.STRING, "value"); - return target; + return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch); + }; + SemVer.prototype.comparePre = function(other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); } - create(value) { - const message = { value: "0" }; - globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_5.reflectionMergePartial)(this, message, value); - return message; + if (this.prerelease.length && !other.prerelease.length) { + return -1; + } else if (!this.prerelease.length && other.prerelease.length) { + return 1; + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* int64 value */ - 1: - message.value = reader.int64().toString(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } + var i2 = 0; + do { + var a = this.prerelease[i2]; + var b = other.prerelease[i2]; + debug("prerelease compare", i2, a, b); + if (a === void 0 && b === void 0) { + return 0; + } else if (b === void 0) { + return 1; + } else if (a === void 0) { + return -1; + } else if (a === b) { + continue; + } else { + return compareIdentifiers(a, b); } - return message; + } while (++i2); + }; + SemVer.prototype.compareBuild = function(other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options); } - internalBinaryWrite(message, writer, options) { - if (message.value !== "0") - writer.tag(1, runtime_3.WireType.Varint).int64(message.value); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + var i2 = 0; + do { + var a = this.build[i2]; + var b = other.build[i2]; + debug("prerelease compare", i2, a, b); + if (a === void 0 && b === void 0) { + return 0; + } else if (b === void 0) { + return 1; + } else if (a === void 0) { + return -1; + } else if (a === b) { + continue; + } else { + return compareIdentifiers(a, b); + } + } while (++i2); + }; + SemVer.prototype.inc = function(release, identifier) { + switch (release) { + case "premajor": + this.prerelease.length = 0; + this.patch = 0; + this.minor = 0; + this.major++; + this.inc("pre", identifier); + break; + case "preminor": + this.prerelease.length = 0; + this.patch = 0; + this.minor++; + this.inc("pre", identifier); + break; + case "prepatch": + this.prerelease.length = 0; + this.inc("patch", identifier); + this.inc("pre", identifier); + break; + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case "prerelease": + if (this.prerelease.length === 0) { + this.inc("patch", identifier); + } + this.inc("pre", identifier); + break; + case "major": + if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) { + this.major++; + } + this.minor = 0; + this.patch = 0; + this.prerelease = []; + break; + case "minor": + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++; + } + this.patch = 0; + this.prerelease = []; + break; + case "patch": + if (this.prerelease.length === 0) { + this.patch++; + } + this.prerelease = []; + break; + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case "pre": + if (this.prerelease.length === 0) { + this.prerelease = [0]; + } else { + var i2 = this.prerelease.length; + while (--i2 >= 0) { + if (typeof this.prerelease[i2] === "number") { + this.prerelease[i2]++; + i2 = -2; + } + } + if (i2 === -1) { + this.prerelease.push(0); + } + } + if (identifier) { + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0]; + } + } else { + this.prerelease = [identifier, 0]; + } + } + break; + default: + throw new Error("invalid increment argument: " + release); } + this.format(); + this.raw = this.version; + return this; }; - exports2.Int64Value = new Int64Value$Type(); - var UInt64Value$Type = class extends runtime_7.MessageType { - static { - __name(this, "UInt64Value$Type"); + exports2.inc = inc; + function inc(version3, release, loose, identifier) { + if (typeof loose === "string") { + identifier = loose; + loose = void 0; } - constructor() { - super("google.protobuf.UInt64Value", [ - { - no: 1, - name: "value", - kind: "scalar", - T: 4 - /*ScalarType.UINT64*/ + try { + return new SemVer(version3, loose).inc(release, identifier).version; + } catch (er) { + return null; + } + } + __name(inc, "inc"); + exports2.diff = diff; + function diff(version1, version22) { + if (eq(version1, version22)) { + return null; + } else { + var v12 = parse2(version1); + var v2 = parse2(version22); + var prefix = ""; + if (v12.prerelease.length || v2.prerelease.length) { + prefix = "pre"; + var defaultResult = "prerelease"; + } + for (var key in v12) { + if (key === "major" || key === "minor" || key === "patch") { + if (v12[key] !== v2[key]) { + return prefix + key; + } } - ]); + } + return defaultResult; } - /** - * Encode `UInt64Value` to JSON string. - */ - internalJsonWrite(message, options) { - return this.refJsonWriter.scalar(runtime_1.ScalarType.UINT64, message.value, "value", false, true); + } + __name(diff, "diff"); + exports2.compareIdentifiers = compareIdentifiers; + var numeric = /^[0-9]+$/; + function compareIdentifiers(a, b) { + var anum = numeric.test(a); + var bnum = numeric.test(b); + if (anum && bnum) { + a = +a; + b = +b; } - /** - * Decode `UInt64Value` from JSON string. - */ - internalJsonRead(json, options, target) { - if (!target) - target = this.create(); - target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.UINT64, runtime_2.LongType.STRING, "value"); - return target; + return a === b ? 0 : anum && !bnum ? -1 : bnum && !anum ? 1 : a < b ? -1 : 1; + } + __name(compareIdentifiers, "compareIdentifiers"); + exports2.rcompareIdentifiers = rcompareIdentifiers; + function rcompareIdentifiers(a, b) { + return compareIdentifiers(b, a); + } + __name(rcompareIdentifiers, "rcompareIdentifiers"); + exports2.major = major2; + function major2(a, loose) { + return new SemVer(a, loose).major; + } + __name(major2, "major"); + exports2.minor = minor; + function minor(a, loose) { + return new SemVer(a, loose).minor; + } + __name(minor, "minor"); + exports2.patch = patch; + function patch(a, loose) { + return new SemVer(a, loose).patch; + } + __name(patch, "patch"); + exports2.compare = compare; + function compare(a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)); + } + __name(compare, "compare"); + exports2.compareLoose = compareLoose; + function compareLoose(a, b) { + return compare(a, b, true); + } + __name(compareLoose, "compareLoose"); + exports2.compareBuild = compareBuild; + function compareBuild(a, b, loose) { + var versionA = new SemVer(a, loose); + var versionB = new SemVer(b, loose); + return versionA.compare(versionB) || versionA.compareBuild(versionB); + } + __name(compareBuild, "compareBuild"); + exports2.rcompare = rcompare; + function rcompare(a, b, loose) { + return compare(b, a, loose); + } + __name(rcompare, "rcompare"); + exports2.sort = sort; + function sort(list, loose) { + return list.sort(function(a, b) { + return exports2.compareBuild(a, b, loose); + }); + } + __name(sort, "sort"); + exports2.rsort = rsort; + function rsort(list, loose) { + return list.sort(function(a, b) { + return exports2.compareBuild(b, a, loose); + }); + } + __name(rsort, "rsort"); + exports2.gt = gt; + function gt(a, b, loose) { + return compare(a, b, loose) > 0; + } + __name(gt, "gt"); + exports2.lt = lt; + function lt(a, b, loose) { + return compare(a, b, loose) < 0; + } + __name(lt, "lt"); + exports2.eq = eq; + function eq(a, b, loose) { + return compare(a, b, loose) === 0; + } + __name(eq, "eq"); + exports2.neq = neq; + function neq(a, b, loose) { + return compare(a, b, loose) !== 0; + } + __name(neq, "neq"); + exports2.gte = gte; + function gte(a, b, loose) { + return compare(a, b, loose) >= 0; + } + __name(gte, "gte"); + exports2.lte = lte; + function lte(a, b, loose) { + return compare(a, b, loose) <= 0; + } + __name(lte, "lte"); + exports2.cmp = cmp; + function cmp(a, op, b, loose) { + switch (op) { + case "===": + if (typeof a === "object") + a = a.version; + if (typeof b === "object") + b = b.version; + return a === b; + case "!==": + if (typeof a === "object") + a = a.version; + if (typeof b === "object") + b = b.version; + return a !== b; + case "": + case "=": + case "==": + return eq(a, b, loose); + case "!=": + return neq(a, b, loose); + case ">": + return gt(a, b, loose); + case ">=": + return gte(a, b, loose); + case "<": + return lt(a, b, loose); + case "<=": + return lte(a, b, loose); + default: + throw new TypeError("Invalid operator: " + op); } - create(value) { - const message = { value: "0" }; - globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_5.reflectionMergePartial)(this, message, value); - return message; + } + __name(cmp, "cmp"); + exports2.Comparator = Comparator; + function Comparator(comp, options) { + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* uint64 value */ - 1: - message.value = reader.uint64().toString(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp; + } else { + comp = comp.value; } - return message; } - internalBinaryWrite(message, writer, options) { - if (message.value !== "0") - writer.tag(1, runtime_3.WireType.Varint).uint64(message.value); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + if (!(this instanceof Comparator)) { + return new Comparator(comp, options); } - }; - exports2.UInt64Value = new UInt64Value$Type(); - var Int32Value$Type = class extends runtime_7.MessageType { - static { - __name(this, "Int32Value$Type"); + comp = comp.trim().split(/\s+/).join(" "); + debug("comparator", comp, options); + this.options = options; + this.loose = !!options.loose; + this.parse(comp); + if (this.semver === ANY) { + this.value = ""; + } else { + this.value = this.operator + this.semver.version; } - constructor() { - super("google.protobuf.Int32Value", [ - { - no: 1, - name: "value", - kind: "scalar", - T: 5 - /*ScalarType.INT32*/ - } - ]); + debug("comp", this); + } + __name(Comparator, "Comparator"); + var ANY = {}; + Comparator.prototype.parse = function(comp) { + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]; + var m = comp.match(r); + if (!m) { + throw new TypeError("Invalid comparator: " + comp); } - /** - * Encode `Int32Value` to JSON string. - */ - internalJsonWrite(message, options) { - return this.refJsonWriter.scalar(5, message.value, "value", false, true); + this.operator = m[1] !== void 0 ? m[1] : ""; + if (this.operator === "=") { + this.operator = ""; } - /** - * Decode `Int32Value` from JSON string. - */ - internalJsonRead(json, options, target) { - if (!target) - target = this.create(); - target.value = this.refJsonReader.scalar(json, 5, void 0, "value"); - return target; + if (!m[2]) { + this.semver = ANY; + } else { + this.semver = new SemVer(m[2], this.options.loose); } - create(value) { - const message = { value: 0 }; - globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_5.reflectionMergePartial)(this, message, value); - return message; + }; + Comparator.prototype.toString = function() { + return this.value; + }; + Comparator.prototype.test = function(version3) { + debug("Comparator.test", version3, this.options.loose); + if (this.semver === ANY || version3 === ANY) { + return true; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* int32 value */ - 1: - message.value = reader.int32(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } + if (typeof version3 === "string") { + try { + version3 = new SemVer(version3, this.options); + } catch (er) { + return false; } - return message; } - internalBinaryWrite(message, writer, options) { - if (message.value !== 0) - writer.tag(1, runtime_3.WireType.Varint).int32(message.value); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + return cmp(version3, this.operator, this.semver, this.options); + }; + Comparator.prototype.intersects = function(comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError("a Comparator is required"); + } + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; + } + var rangeTmp; + if (this.operator === "") { + if (this.value === "") { + return true; + } + rangeTmp = new Range(comp.value, options); + return satisfies(this.value, rangeTmp, options); + } else if (comp.operator === "") { + if (comp.value === "") { + return true; + } + rangeTmp = new Range(this.value, options); + return satisfies(comp.semver, rangeTmp, options); } + var sameDirectionIncreasing = (this.operator === ">=" || this.operator === ">") && (comp.operator === ">=" || comp.operator === ">"); + var sameDirectionDecreasing = (this.operator === "<=" || this.operator === "<") && (comp.operator === "<=" || comp.operator === "<"); + var sameSemVer = this.semver.version === comp.semver.version; + var differentDirectionsInclusive = (this.operator === ">=" || this.operator === "<=") && (comp.operator === ">=" || comp.operator === "<="); + var oppositeDirectionsLessThan = cmp(this.semver, "<", comp.semver, options) && ((this.operator === ">=" || this.operator === ">") && (comp.operator === "<=" || comp.operator === "<")); + var oppositeDirectionsGreaterThan = cmp(this.semver, ">", comp.semver, options) && ((this.operator === "<=" || this.operator === "<") && (comp.operator === ">=" || comp.operator === ">")); + return sameDirectionIncreasing || sameDirectionDecreasing || sameSemVer && differentDirectionsInclusive || oppositeDirectionsLessThan || oppositeDirectionsGreaterThan; }; - exports2.Int32Value = new Int32Value$Type(); - var UInt32Value$Type = class extends runtime_7.MessageType { - static { - __name(this, "UInt32Value$Type"); + exports2.Range = Range; + function Range(range, options) { + if (!options || typeof options !== "object") { + options = { + loose: !!options, + includePrerelease: false + }; } - constructor() { - super("google.protobuf.UInt32Value", [ - { - no: 1, - name: "value", - kind: "scalar", - T: 13 - /*ScalarType.UINT32*/ - } - ]); + if (range instanceof Range) { + if (range.loose === !!options.loose && range.includePrerelease === !!options.includePrerelease) { + return range; + } else { + return new Range(range.raw, options); + } } - /** - * Encode `UInt32Value` to JSON string. - */ - internalJsonWrite(message, options) { - return this.refJsonWriter.scalar(13, message.value, "value", false, true); + if (range instanceof Comparator) { + return new Range(range.value, options); } - /** - * Decode `UInt32Value` from JSON string. - */ - internalJsonRead(json, options, target) { - if (!target) - target = this.create(); - target.value = this.refJsonReader.scalar(json, 13, void 0, "value"); - return target; + if (!(this instanceof Range)) { + return new Range(range, options); } - create(value) { - const message = { value: 0 }; - globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_5.reflectionMergePartial)(this, message, value); - return message; + this.options = options; + this.loose = !!options.loose; + this.includePrerelease = !!options.includePrerelease; + this.raw = range.trim().split(/\s+/).join(" "); + this.set = this.raw.split("||").map(function(range2) { + return this.parseRange(range2.trim()); + }, this).filter(function(c) { + return c.length; + }); + if (!this.set.length) { + throw new TypeError("Invalid SemVer Range: " + this.raw); } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* uint32 value */ - 1: - message.value = reader.uint32(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } - } - return message; + this.format(); + } + __name(Range, "Range"); + Range.prototype.format = function() { + this.range = this.set.map(function(comps) { + return comps.join(" ").trim(); + }).join("||").trim(); + return this.range; + }; + Range.prototype.toString = function() { + return this.range; + }; + Range.prototype.parseRange = function(range) { + var loose = this.options.loose; + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE]; + range = range.replace(hr, hyphenReplace); + debug("hyphen replace", range); + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace); + debug("comparator trim", range, safeRe[t.COMPARATORTRIM]); + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace); + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace); + range = range.split(/\s+/).join(" "); + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR]; + var set = range.split(" ").map(function(comp) { + return parseComparator(comp, this.options); + }, this).join(" ").split(/\s+/); + if (this.options.loose) { + set = set.filter(function(comp) { + return !!comp.match(compRe); + }); } - internalBinaryWrite(message, writer, options) { - if (message.value !== 0) - writer.tag(1, runtime_3.WireType.Varint).uint32(message.value); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + set = set.map(function(comp) { + return new Comparator(comp, this.options); + }, this); + return set; + }; + Range.prototype.intersects = function(range, options) { + if (!(range instanceof Range)) { + throw new TypeError("a Range is required"); } + return this.set.some(function(thisComparators) { + return isSatisfiable(thisComparators, options) && range.set.some(function(rangeComparators) { + return isSatisfiable(rangeComparators, options) && thisComparators.every(function(thisComparator) { + return rangeComparators.every(function(rangeComparator) { + return thisComparator.intersects(rangeComparator, options); + }); + }); + }); + }); }; - exports2.UInt32Value = new UInt32Value$Type(); - var BoolValue$Type = class extends runtime_7.MessageType { - static { - __name(this, "BoolValue$Type"); + function isSatisfiable(comparators, options) { + var result = true; + var remainingComparators = comparators.slice(); + var testComparator = remainingComparators.pop(); + while (result && remainingComparators.length) { + result = remainingComparators.every(function(otherComparator) { + return testComparator.intersects(otherComparator, options); + }); + testComparator = remainingComparators.pop(); } - constructor() { - super("google.protobuf.BoolValue", [ - { - no: 1, - name: "value", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ + return result; + } + __name(isSatisfiable, "isSatisfiable"); + exports2.toComparators = toComparators; + function toComparators(range, options) { + return new Range(range, options).set.map(function(comp) { + return comp.map(function(c) { + return c.value; + }).join(" ").trim().split(" "); + }); + } + __name(toComparators, "toComparators"); + function parseComparator(comp, options) { + debug("comp", comp, options); + comp = replaceCarets(comp, options); + debug("caret", comp); + comp = replaceTildes(comp, options); + debug("tildes", comp); + comp = replaceXRanges(comp, options); + debug("xrange", comp); + comp = replaceStars(comp, options); + debug("stars", comp); + return comp; + } + __name(parseComparator, "parseComparator"); + function isX(id) { + return !id || id.toLowerCase() === "x" || id === "*"; + } + __name(isX, "isX"); + function replaceTildes(comp, options) { + return comp.trim().split(/\s+/).map(function(comp2) { + return replaceTilde(comp2, options); + }).join(" "); + } + __name(replaceTildes, "replaceTildes"); + function replaceTilde(comp, options) { + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE]; + return comp.replace(r, function(_2, M, m, p, pr) { + debug("tilde", comp, _2, M, m, p, pr); + var ret; + if (isX(M)) { + ret = ""; + } else if (isX(m)) { + ret = ">=" + M + ".0.0 <" + (+M + 1) + ".0.0"; + } else if (isX(p)) { + ret = ">=" + M + "." + m + ".0 <" + M + "." + (+m + 1) + ".0"; + } else if (pr) { + debug("replaceTilde pr", pr); + ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + (+m + 1) + ".0"; + } else { + ret = ">=" + M + "." + m + "." + p + " <" + M + "." + (+m + 1) + ".0"; + } + debug("tilde return", ret); + return ret; + }); + } + __name(replaceTilde, "replaceTilde"); + function replaceCarets(comp, options) { + return comp.trim().split(/\s+/).map(function(comp2) { + return replaceCaret(comp2, options); + }).join(" "); + } + __name(replaceCarets, "replaceCarets"); + function replaceCaret(comp, options) { + debug("caret", comp, options); + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET]; + return comp.replace(r, function(_2, M, m, p, pr) { + debug("caret", comp, _2, M, m, p, pr); + var ret; + if (isX(M)) { + ret = ""; + } else if (isX(m)) { + ret = ">=" + M + ".0.0 <" + (+M + 1) + ".0.0"; + } else if (isX(p)) { + if (M === "0") { + ret = ">=" + M + "." + m + ".0 <" + M + "." + (+m + 1) + ".0"; + } else { + ret = ">=" + M + "." + m + ".0 <" + (+M + 1) + ".0.0"; } - ]); - } - /** - * Encode `BoolValue` to JSON bool. - */ - internalJsonWrite(message, options) { - return message.value; + } else if (pr) { + debug("replaceCaret pr", pr); + if (M === "0") { + if (m === "0") { + ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + m + "." + (+p + 1); + } else { + ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + M + "." + (+m + 1) + ".0"; + } + } else { + ret = ">=" + M + "." + m + "." + p + "-" + pr + " <" + (+M + 1) + ".0.0"; + } + } else { + debug("no pr"); + if (M === "0") { + if (m === "0") { + ret = ">=" + M + "." + m + "." + p + " <" + M + "." + m + "." + (+p + 1); + } else { + ret = ">=" + M + "." + m + "." + p + " <" + M + "." + (+m + 1) + ".0"; + } + } else { + ret = ">=" + M + "." + m + "." + p + " <" + (+M + 1) + ".0.0"; + } + } + debug("caret return", ret); + return ret; + }); + } + __name(replaceCaret, "replaceCaret"); + function replaceXRanges(comp, options) { + debug("replaceXRanges", comp, options); + return comp.split(/\s+/).map(function(comp2) { + return replaceXRange(comp2, options); + }).join(" "); + } + __name(replaceXRanges, "replaceXRanges"); + function replaceXRange(comp, options) { + comp = comp.trim(); + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE]; + return comp.replace(r, function(ret, gtlt, M, m, p, pr) { + debug("xRange", comp, ret, gtlt, M, m, p, pr); + var xM = isX(M); + var xm = xM || isX(m); + var xp = xm || isX(p); + var anyX = xp; + if (gtlt === "=" && anyX) { + gtlt = ""; + } + pr = options.includePrerelease ? "-0" : ""; + if (xM) { + if (gtlt === ">" || gtlt === "<") { + ret = "<0.0.0-0"; + } else { + ret = "*"; + } + } else if (gtlt && anyX) { + if (xm) { + m = 0; + } + p = 0; + if (gtlt === ">") { + gtlt = ">="; + if (xm) { + M = +M + 1; + m = 0; + p = 0; + } else { + m = +m + 1; + p = 0; + } + } else if (gtlt === "<=") { + gtlt = "<"; + if (xm) { + M = +M + 1; + } else { + m = +m + 1; + } + } + ret = gtlt + M + "." + m + "." + p + pr; + } else if (xm) { + ret = ">=" + M + ".0.0" + pr + " <" + (+M + 1) + ".0.0" + pr; + } else if (xp) { + ret = ">=" + M + "." + m + ".0" + pr + " <" + M + "." + (+m + 1) + ".0" + pr; + } + debug("xRange return", ret); + return ret; + }); + } + __name(replaceXRange, "replaceXRange"); + function replaceStars(comp, options) { + debug("replaceStars", comp, options); + return comp.trim().replace(safeRe[t.STAR], ""); + } + __name(replaceStars, "replaceStars"); + function hyphenReplace($0, from, fM, fm, fp, fpr, fb, to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = ""; + } else if (isX(fm)) { + from = ">=" + fM + ".0.0"; + } else if (isX(fp)) { + from = ">=" + fM + "." + fm + ".0"; + } else { + from = ">=" + from; } - /** - * Decode `BoolValue` from JSON bool. - */ - internalJsonRead(json, options, target) { - if (!target) - target = this.create(); - target.value = this.refJsonReader.scalar(json, 8, void 0, "value"); - return target; + if (isX(tM)) { + to = ""; + } else if (isX(tm)) { + to = "<" + (+tM + 1) + ".0.0"; + } else if (isX(tp)) { + to = "<" + tM + "." + (+tm + 1) + ".0"; + } else if (tpr) { + to = "<=" + tM + "." + tm + "." + tp + "-" + tpr; + } else { + to = "<=" + to; } - create(value) { - const message = { value: false }; - globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_5.reflectionMergePartial)(this, message, value); - return message; + return (from + " " + to).trim(); + } + __name(hyphenReplace, "hyphenReplace"); + Range.prototype.test = function(version3) { + if (!version3) { + return false; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bool value */ - 1: - message.value = reader.bool(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); - } + if (typeof version3 === "string") { + try { + version3 = new SemVer(version3, this.options); + } catch (er) { + return false; } - return message; } - internalBinaryWrite(message, writer, options) { - if (message.value !== false) - writer.tag(1, runtime_3.WireType.Varint).bool(message.value); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + for (var i2 = 0; i2 < this.set.length; i2++) { + if (testSet(this.set[i2], version3, this.options)) { + return true; + } } + return false; }; - exports2.BoolValue = new BoolValue$Type(); - var StringValue$Type = class extends runtime_7.MessageType { - static { - __name(this, "StringValue$Type"); + function testSet(set, version3, options) { + for (var i2 = 0; i2 < set.length; i2++) { + if (!set[i2].test(version3)) { + return false; + } } - constructor() { - super("google.protobuf.StringValue", [ - { - no: 1, - name: "value", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ + if (version3.prerelease.length && !options.includePrerelease) { + for (i2 = 0; i2 < set.length; i2++) { + debug(set[i2].semver); + if (set[i2].semver === ANY) { + continue; } - ]); - } - /** - * Encode `StringValue` to JSON string. - */ - internalJsonWrite(message, options) { - return message.value; - } - /** - * Decode `StringValue` from JSON string. - */ - internalJsonRead(json, options, target) { - if (!target) - target = this.create(); - target.value = this.refJsonReader.scalar(json, 9, void 0, "value"); - return target; - } - create(value) { - const message = { value: "" }; - globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_5.reflectionMergePartial)(this, message, value); - return message; - } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* string value */ - 1: - message.value = reader.string(); - break; - default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + if (set[i2].semver.prerelease.length > 0) { + var allowed = set[i2].semver; + if (allowed.major === version3.major && allowed.minor === version3.minor && allowed.patch === version3.patch) { + return true; + } } } - return message; + return false; } - internalBinaryWrite(message, writer, options) { - if (message.value !== "") - writer.tag(1, runtime_3.WireType.LengthDelimited).string(message.value); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + return true; + } + __name(testSet, "testSet"); + exports2.satisfies = satisfies; + function satisfies(version3, range, options) { + try { + range = new Range(range, options); + } catch (er) { + return false; } - }; - exports2.StringValue = new StringValue$Type(); - var BytesValue$Type = class extends runtime_7.MessageType { - static { - __name(this, "BytesValue$Type"); + return range.test(version3); + } + __name(satisfies, "satisfies"); + exports2.maxSatisfying = maxSatisfying; + function maxSatisfying(versions, range, options) { + var max = null; + var maxSV = null; + try { + var rangeObj = new Range(range, options); + } catch (er) { + return null; } - constructor() { - super("google.protobuf.BytesValue", [ - { - no: 1, - name: "value", - kind: "scalar", - T: 12 - /*ScalarType.BYTES*/ + versions.forEach(function(v) { + if (rangeObj.test(v)) { + if (!max || maxSV.compare(v) === -1) { + max = v; + maxSV = new SemVer(max, options); } - ]); - } - /** - * Encode `BytesValue` to JSON string. - */ - internalJsonWrite(message, options) { - return this.refJsonWriter.scalar(12, message.value, "value", false, true); + } + }); + return max; + } + __name(maxSatisfying, "maxSatisfying"); + exports2.minSatisfying = minSatisfying; + function minSatisfying(versions, range, options) { + var min = null; + var minSV = null; + try { + var rangeObj = new Range(range, options); + } catch (er) { + return null; } - /** - * Decode `BytesValue` from JSON string. - */ - internalJsonRead(json, options, target) { - if (!target) - target = this.create(); - target.value = this.refJsonReader.scalar(json, 12, void 0, "value"); - return target; + versions.forEach(function(v) { + if (rangeObj.test(v)) { + if (!min || minSV.compare(v) === 1) { + min = v; + minSV = new SemVer(min, options); + } + } + }); + return min; + } + __name(minSatisfying, "minSatisfying"); + exports2.minVersion = minVersion; + function minVersion(range, loose) { + range = new Range(range, loose); + var minver = new SemVer("0.0.0"); + if (range.test(minver)) { + return minver; } - create(value) { - const message = { value: new Uint8Array(0) }; - globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); - if (value !== void 0) - (0, runtime_5.reflectionMergePartial)(this, message, value); - return message; + minver = new SemVer("0.0.0-0"); + if (range.test(minver)) { + return minver; } - internalBinaryRead(reader, length, options, target) { - let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; - while (reader.pos < end) { - let [fieldNo, wireType] = reader.tag(); - switch (fieldNo) { - case /* bytes value */ - 1: - message.value = reader.bytes(); + minver = null; + for (var i2 = 0; i2 < range.set.length; ++i2) { + var comparators = range.set[i2]; + comparators.forEach(function(comparator) { + var compver = new SemVer(comparator.semver.version); + switch (comparator.operator) { + case ">": + if (compver.prerelease.length === 0) { + compver.patch++; + } else { + compver.prerelease.push(0); + } + compver.raw = compver.format(); + /* fallthrough */ + case "": + case ">=": + if (!minver || gt(minver, compver)) { + minver = compver; + } + break; + case "<": + case "<=": break; + /* istanbul ignore next */ default: - let u = options.readUnknownField; - if (u === "throw") - throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); - let d = reader.skip(wireType); - if (u !== false) - (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + throw new Error("Unexpected operation: " + comparator.operator); } - } - return message; + }); } - internalBinaryWrite(message, writer, options) { - if (message.value.length) - writer.tag(1, runtime_3.WireType.LengthDelimited).bytes(message.value); - let u = options.writeUnknownFields; - if (u !== false) - (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); - return writer; + if (minver && range.test(minver)) { + return minver; } - }; - exports2.BytesValue = new BytesValue$Type(); - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js -var require_reflection_info2 = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/reflection-info.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.readServiceOption = exports2.readMethodOption = exports2.readMethodOptions = exports2.normalizeMethodInfo = void 0; - var runtime_1 = require_commonjs7(); - function normalizeMethodInfo(method, service) { - var _a, _b, _c; - let m = method; - m.service = service; - m.localName = (_a = m.localName) !== null && _a !== void 0 ? _a : runtime_1.lowerCamelCase(m.name); - m.serverStreaming = !!m.serverStreaming; - m.clientStreaming = !!m.clientStreaming; - m.options = (_b = m.options) !== null && _b !== void 0 ? _b : {}; - m.idempotency = (_c = m.idempotency) !== null && _c !== void 0 ? _c : void 0; - return m; - } - __name(normalizeMethodInfo, "normalizeMethodInfo"); - exports2.normalizeMethodInfo = normalizeMethodInfo; - function readMethodOptions(service, methodName, extensionName, extensionType) { - var _a; - const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; - return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : void 0; + return null; } - __name(readMethodOptions, "readMethodOptions"); - exports2.readMethodOptions = readMethodOptions; - function readMethodOption(service, methodName, extensionName, extensionType) { - var _a; - const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; - if (!options) { - return void 0; - } - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; + __name(minVersion, "minVersion"); + exports2.validRange = validRange; + function validRange(range, options) { + try { + return new Range(range, options).range || "*"; + } catch (er) { + return null; } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; } - __name(readMethodOption, "readMethodOption"); - exports2.readMethodOption = readMethodOption; - function readServiceOption(service, extensionName, extensionType) { - const options = service.options; - if (!options) { - return void 0; - } - const optionVal = options[extensionName]; - if (optionVal === void 0) { - return optionVal; - } - return extensionType ? extensionType.fromJson(optionVal) : optionVal; + __name(validRange, "validRange"); + exports2.ltr = ltr; + function ltr(version3, range, options) { + return outside(version3, range, "<", options); } - __name(readServiceOption, "readServiceOption"); - exports2.readServiceOption = readServiceOption; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js -var require_service_type = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/service-type.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServiceType = void 0; - var reflection_info_1 = require_reflection_info2(); - var ServiceType = class { - static { - __name(this, "ServiceType"); - } - constructor(typeName, methods, options) { - this.typeName = typeName; - this.methods = methods.map((i) => reflection_info_1.normalizeMethodInfo(i, this)); - this.options = options !== null && options !== void 0 ? options : {}; - } - }; - exports2.ServiceType = ServiceType; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js -var require_rpc_error = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-error.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RpcError = void 0; - var RpcError = class extends Error { - static { - __name(this, "RpcError"); + __name(ltr, "ltr"); + exports2.gtr = gtr; + function gtr(version3, range, options) { + return outside(version3, range, ">", options); + } + __name(gtr, "gtr"); + exports2.outside = outside; + function outside(version3, range, hilo, options) { + version3 = new SemVer(version3, options); + range = new Range(range, options); + var gtfn, ltefn, ltfn, comp, ecomp; + switch (hilo) { + case ">": + gtfn = gt; + ltefn = lte; + ltfn = lt; + comp = ">"; + ecomp = ">="; + break; + case "<": + gtfn = lt; + ltefn = gte; + ltfn = gt; + comp = "<"; + ecomp = "<="; + break; + default: + throw new TypeError('Must provide a hilo val of "<" or ">"'); } - constructor(message, code = "UNKNOWN", meta) { - super(message); - this.name = "RpcError"; - Object.setPrototypeOf(this, new.target.prototype); - this.code = code; - this.meta = meta !== null && meta !== void 0 ? meta : {}; + if (satisfies(version3, range, options)) { + return false; } - toString() { - const l = [this.name + ": " + this.message]; - if (this.code) { - l.push(""); - l.push("Code: " + this.code); - } - if (this.serviceName && this.methodName) { - l.push("Method: " + this.serviceName + "/" + this.methodName); - } - let m = Object.entries(this.meta); - if (m.length) { - l.push(""); - l.push("Meta:"); - for (let [k, v] of m) { - l.push(` ${k}: ${v}`); + for (var i2 = 0; i2 < range.set.length; ++i2) { + var comparators = range.set[i2]; + var high = null; + var low = null; + comparators.forEach(function(comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator(">=0.0.0"); + } + high = high || comparator; + low = low || comparator; + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator; + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator; } + }); + if (high.operator === comp || high.operator === ecomp) { + return false; } - return l.join("\n"); - } - }; - exports2.RpcError = RpcError; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js -var require_rpc_options = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-options.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.mergeRpcOptions = void 0; - var runtime_1 = require_commonjs7(); - function mergeRpcOptions(defaults, options) { - if (!options) - return defaults; - let o = {}; - copy(defaults, o); - copy(options, o); - for (let key of Object.keys(options)) { - let val = options[key]; - switch (key) { - case "jsonOptions": - o.jsonOptions = runtime_1.mergeJsonOptions(defaults.jsonOptions, o.jsonOptions); - break; - case "binaryOptions": - o.binaryOptions = runtime_1.mergeBinaryOptions(defaults.binaryOptions, o.binaryOptions); - break; - case "meta": - o.meta = {}; - copy(defaults.meta, o.meta); - copy(options.meta, o.meta); - break; - case "interceptors": - o.interceptors = defaults.interceptors ? defaults.interceptors.concat(val) : val.concat(); - break; + if ((!low.operator || low.operator === comp) && ltefn(version3, low.semver)) { + return false; + } else if (low.operator === ecomp && ltfn(version3, low.semver)) { + return false; } } - return o; + return true; } - __name(mergeRpcOptions, "mergeRpcOptions"); - exports2.mergeRpcOptions = mergeRpcOptions; - function copy(a, into) { - if (!a) - return; - let c = into; - for (let [k, v] of Object.entries(a)) { - if (v instanceof Date) - c[k] = new Date(v.getTime()); - else if (Array.isArray(v)) - c[k] = v.concat(); - else - c[k] = v; - } + __name(outside, "outside"); + exports2.prerelease = prerelease; + function prerelease(version3, options) { + var parsed = parse2(version3, options); + return parsed && parsed.prerelease.length ? parsed.prerelease : null; } - __name(copy, "copy"); - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js -var require_deferred = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/deferred.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Deferred = exports2.DeferredState = void 0; - var DeferredState; - (function(DeferredState2) { - DeferredState2[DeferredState2["PENDING"] = 0] = "PENDING"; - DeferredState2[DeferredState2["REJECTED"] = 1] = "REJECTED"; - DeferredState2[DeferredState2["RESOLVED"] = 2] = "RESOLVED"; - })(DeferredState = exports2.DeferredState || (exports2.DeferredState = {})); - var Deferred = class { - static { - __name(this, "Deferred"); - } - /** - * @param preventUnhandledRejectionWarning - prevents the warning - * "Unhandled Promise rejection" by adding a noop rejection handler. - * Working with calls returned from the runtime-rpc package in an - * async function usually means awaiting one call property after - * the other. This means that the "status" is not being awaited when - * an earlier await for the "headers" is rejected. This causes the - * "unhandled promise reject" warning. A more correct behaviour for - * calls might be to become aware whether at least one of the - * promises is handled and swallow the rejection warning for the - * others. - */ - constructor(preventUnhandledRejectionWarning = true) { - this._state = DeferredState.PENDING; - this._promise = new Promise((resolve, reject) => { - this._resolve = resolve; - this._reject = reject; - }); - if (preventUnhandledRejectionWarning) { - this._promise.catch((_2) => { - }); - } - } - /** - * Get the current state of the promise. - */ - get state() { - return this._state; - } - /** - * Get the deferred promise. - */ - get promise() { - return this._promise; + __name(prerelease, "prerelease"); + exports2.intersects = intersects; + function intersects(r1, r2, options) { + r1 = new Range(r1, options); + r2 = new Range(r2, options); + return r1.intersects(r2); + } + __name(intersects, "intersects"); + exports2.coerce = coerce; + function coerce(version3, options) { + if (version3 instanceof SemVer) { + return version3; } - /** - * Resolve the promise. Throws if the promise is already resolved or rejected. - */ - resolve(value) { - if (this.state !== DeferredState.PENDING) - throw new Error(`cannot resolve ${DeferredState[this.state].toLowerCase()}`); - this._resolve(value); - this._state = DeferredState.RESOLVED; + if (typeof version3 === "number") { + version3 = String(version3); } - /** - * Reject the promise. Throws if the promise is already resolved or rejected. - */ - reject(reason) { - if (this.state !== DeferredState.PENDING) - throw new Error(`cannot reject ${DeferredState[this.state].toLowerCase()}`); - this._reject(reason); - this._state = DeferredState.REJECTED; + if (typeof version3 !== "string") { + return null; } - /** - * Resolve the promise. Ignore if not pending. - */ - resolvePending(val) { - if (this._state === DeferredState.PENDING) - this.resolve(val); + options = options || {}; + var match = null; + if (!options.rtl) { + match = version3.match(safeRe[t.COERCE]); + } else { + var next; + while ((next = safeRe[t.COERCERTL].exec(version3)) && (!match || match.index + match[0].length !== version3.length)) { + if (!match || next.index + next[0].length !== match.index + match[0].length) { + match = next; + } + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length; + } + safeRe[t.COERCERTL].lastIndex = -1; } - /** - * Reject the promise. Ignore if not pending. - */ - rejectPending(reason) { - if (this._state === DeferredState.PENDING) - this.reject(reason); + if (match === null) { + return null; } - }; - exports2.Deferred = Deferred; + return parse2(match[2] + "." + (match[3] || "0") + "." + (match[4] || "0"), options); + } + __name(coerce, "coerce"); } }); -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js -var require_rpc_output_stream = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-output-stream.js"(exports2) { +// ../node_modules/@actions/tool-cache/lib/manifest.js +var require_manifest = __commonJS({ + "../node_modules/@actions/tool-cache/lib/manifest.js"(exports2, module2) { "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.RpcOutputStreamController = void 0; - var deferred_1 = require_deferred(); - var runtime_1 = require_commonjs7(); - var RpcOutputStreamController = class { - static { - __name(this, "RpcOutputStreamController"); - } - constructor() { - this._lis = { - nxt: [], - msg: [], - err: [], - cmp: [] - }; - this._closed = false; - } - // --- RpcOutputStream callback API - onNext(callback) { - return this.addLis(callback, this._lis.nxt); - } - onMessage(callback) { - return this.addLis(callback, this._lis.msg); - } - onError(callback) { - return this.addLis(callback, this._lis.err); - } - onComplete(callback) { - return this.addLis(callback, this._lis.cmp); - } - addLis(callback, list) { - list.push(callback); - return () => { - let i = list.indexOf(callback); - if (i >= 0) - list.splice(i, 1); - }; - } - // remove all listeners - clearLis() { - for (let l of Object.values(this._lis)) - l.splice(0, l.length); - } - // --- Controller API - /** - * Is this stream already closed by a completion or error? - */ - get closed() { - return this._closed !== false; - } - /** - * Emit message, close with error, or close successfully, but only one - * at a time. - * Can be used to wrap a stream by using the other stream's `onNext`. - */ - notifyNext(message, error, complete) { - runtime_1.assert((message ? 1 : 0) + (error ? 1 : 0) + (complete ? 1 : 0) <= 1, "only one emission at a time"); - if (message) - this.notifyMessage(message); - if (error) - this.notifyError(error); - if (complete) - this.notifyComplete(); - } - /** - * Emits a new message. Throws if stream is closed. - * - * Triggers onNext and onMessage callbacks. - */ - notifyMessage(message) { - runtime_1.assert(!this.closed, "stream is closed"); - this.pushIt({ value: message, done: false }); - this._lis.msg.forEach((l) => l(message)); - this._lis.nxt.forEach((l) => l(message, void 0, false)); - } - /** - * Closes the stream with an error. Throws if stream is closed. - * - * Triggers onNext and onError callbacks. - */ - notifyError(error) { - runtime_1.assert(!this.closed, "stream is closed"); - this._closed = error; - this.pushIt(error); - this._lis.err.forEach((l) => l(error)); - this._lis.nxt.forEach((l) => l(void 0, error, false)); - this.clearLis(); - } - /** - * Closes the stream successfully. Throws if stream is closed. - * - * Triggers onNext and onComplete callbacks. - */ - notifyComplete() { - runtime_1.assert(!this.closed, "stream is closed"); - this._closed = true; - this.pushIt({ value: null, done: true }); - this._lis.cmp.forEach((l) => l()); - this._lis.nxt.forEach((l) => l(void 0, void 0, true)); - this.clearLis(); - } - /** - * Creates an async iterator (that can be used with `for await {...}`) - * to consume the stream. - * - * Some things to note: - * - If an error occurs, the `for await` will throw it. - * - If an error occurred before the `for await` was started, `for await` - * will re-throw it. - * - If the stream is already complete, the `for await` will be empty. - * - If your `for await` consumes slower than the stream produces, - * for example because you are relaying messages in a slow operation, - * messages are queued. - */ - [Symbol.asyncIterator]() { - if (!this._itState) { - this._itState = { q: [] }; - } - if (this._closed === true) - this.pushIt({ value: null, done: true }); - else if (this._closed !== false) - this.pushIt(this._closed); - return { - next: /* @__PURE__ */ __name(() => { - let state = this._itState; - runtime_1.assert(state, "bad state"); - runtime_1.assert(!state.p, "iterator contract broken"); - let first = state.q.shift(); - if (first) - return "value" in first ? Promise.resolve(first) : Promise.reject(first); - state.p = new deferred_1.Deferred(); - return state.p.promise; - }, "next") - }; - } - // "push" a new iterator result. - // this either resolves a pending promise, or enqueues the result. - pushIt(result) { - let state = this._itState; - if (!state) - return; - if (state.p) { - const p = state.p; - runtime_1.assert(p.state == deferred_1.DeferredState.PENDING, "iterator contract broken"); - "value" in result ? p.resolve(result) : p.reject(result); - delete state.p; - } else { - state.q.push(result); - } + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); } + __setModuleDefault3(result, mod); + return result; }; - exports2.RpcOutputStreamController = RpcOutputStreamController; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js -var require_unary_call = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/unary-call.js"(exports2) { - "use strict"; var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -97807,203 +100836,192 @@ var require_unary_call = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.UnaryCall = void 0; - var UnaryCall = class { - static { - __name(this, "UnaryCall"); - } - constructor(method, requestHeaders, request, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.request = request; - this.headers = headers; - this.response = response; - this.status = status; - this.trailers = trailers; - } - /** - * If you are only interested in the final outcome of this call, - * you can await it to receive a `FinishedUnaryCall`. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); - } - promiseFinished() { - return __awaiter3(this, void 0, void 0, function* () { - let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - request: this.request, - headers, - response, - status, - trailers - }; - }); - } - }; - exports2.UnaryCall = UnaryCall; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js -var require_server_streaming_call = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-streaming-call.js"(exports2) { - "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + exports2._readLinuxVersionFile = exports2._getOsVersion = exports2._findMatch = void 0; + var semver = __importStar3(require_semver2()); + var core_1 = require_core(); + var os = require("os"); + var cp = require("child_process"); + var fs2 = require("fs"); + function _findMatch(versionSpec, stable, candidates, archFilter) { + return __awaiter3(this, void 0, void 0, function* () { + const platFilter = os.platform(); + let result; + let match; + let file; + for (const candidate of candidates) { + const version3 = candidate.version; + core_1.debug(`check ${version3} satisfies ${versionSpec}`); + if (semver.satisfies(version3, versionSpec) && (!stable || candidate.stable === stable)) { + file = candidate.files.find((item) => { + core_1.debug(`${item.arch}===${archFilter} && ${item.platform}===${platFilter}`); + let chk = item.arch === archFilter && item.platform === platFilter; + if (chk && item.platform_version) { + const osVersion = module2.exports._getOsVersion(); + if (osVersion === item.platform_version) { + chk = true; + } else { + chk = semver.satisfies(osVersion, item.platform_version); + } + } + return chk; + }); + if (file) { + core_1.debug(`matched ${candidate.version}`); + match = candidate; + break; + } } } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + if (match && file) { + result = Object.assign({}, match); + result.files = [file]; } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); + return result; }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServerStreamingCall = void 0; - var ServerStreamingCall = class { - static { - __name(this, "ServerStreamingCall"); - } - constructor(method, requestHeaders, request, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.request = request; - this.headers = headers; - this.responses = response; - this.status = status; - this.trailers = trailers; - } - /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * You should first setup some listeners to the `request` to - * see the actual messages the server replied with. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + } + __name(_findMatch, "_findMatch"); + exports2._findMatch = _findMatch; + function _getOsVersion() { + const plat = os.platform(); + let version3 = ""; + if (plat === "darwin") { + version3 = cp.execSync("sw_vers -productVersion").toString(); + } else if (plat === "linux") { + const lsbContents = module2.exports._readLinuxVersionFile(); + if (lsbContents) { + const lines = lsbContents.split("\n"); + for (const line of lines) { + const parts = line.split("="); + if (parts.length === 2 && (parts[0].trim() === "VERSION_ID" || parts[0].trim() === "DISTRIB_RELEASE")) { + version3 = parts[1].trim().replace(/^"/, "").replace(/"$/, ""); + break; + } + } + } } - promiseFinished() { - return __awaiter3(this, void 0, void 0, function* () { - let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - request: this.request, - headers, - status, - trailers - }; - }); + return version3; + } + __name(_getOsVersion, "_getOsVersion"); + exports2._getOsVersion = _getOsVersion; + function _readLinuxVersionFile() { + const lsbReleaseFile = "/etc/lsb-release"; + const osReleaseFile = "/etc/os-release"; + let contents = ""; + if (fs2.existsSync(lsbReleaseFile)) { + contents = fs2.readFileSync(lsbReleaseFile).toString(); + } else if (fs2.existsSync(osReleaseFile)) { + contents = fs2.readFileSync(osReleaseFile).toString(); } - }; - exports2.ServerStreamingCall = ServerStreamingCall; + return contents; + } + __name(_readLinuxVersionFile, "_readLinuxVersionFile"); + exports2._readLinuxVersionFile = _readLinuxVersionFile; + } +}); + +// ../node_modules/uuid/lib/rng.js +var require_rng = __commonJS({ + "../node_modules/uuid/lib/rng.js"(exports2, module2) { + var crypto4 = require("crypto"); + module2.exports = /* @__PURE__ */ __name(function nodeRNG() { + return crypto4.randomBytes(16); + }, "nodeRNG"); + } +}); + +// ../node_modules/uuid/lib/bytesToUuid.js +var require_bytesToUuid = __commonJS({ + "../node_modules/uuid/lib/bytesToUuid.js"(exports2, module2) { + var byteToHex2 = []; + for (i = 0; i < 256; ++i) { + byteToHex2[i] = (i + 256).toString(16).substr(1); + } + var i; + function bytesToUuid(buf, offset) { + var i2 = offset || 0; + var bth = byteToHex2; + return [ + bth[buf[i2++]], + bth[buf[i2++]], + bth[buf[i2++]], + bth[buf[i2++]], + "-", + bth[buf[i2++]], + bth[buf[i2++]], + "-", + bth[buf[i2++]], + bth[buf[i2++]], + "-", + bth[buf[i2++]], + bth[buf[i2++]], + "-", + bth[buf[i2++]], + bth[buf[i2++]], + bth[buf[i2++]], + bth[buf[i2++]], + bth[buf[i2++]], + bth[buf[i2++]] + ].join(""); + } + __name(bytesToUuid, "bytesToUuid"); + module2.exports = bytesToUuid; } }); -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js -var require_client_streaming_call = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/client-streaming-call.js"(exports2) { - "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); +// ../node_modules/uuid/v4.js +var require_v4 = __commonJS({ + "../node_modules/uuid/v4.js"(exports2, module2) { + var rng2 = require_rng(); + var bytesToUuid = require_bytesToUuid(); + function v42(options, buf, offset) { + var i = buf && offset || 0; + if (typeof options == "string") { + buf = options === "binary" ? new Array(16) : null; + options = null; } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); + options = options || {}; + var rnds = options.random || (options.rng || rng2)(); + rnds[6] = rnds[6] & 15 | 64; + rnds[8] = rnds[8] & 63 | 128; + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ClientStreamingCall = void 0; - var ClientStreamingCall = class { - static { - __name(this, "ClientStreamingCall"); - } - constructor(method, requestHeaders, request, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.requests = request; - this.headers = headers; - this.response = response; - this.status = status; - this.trailers = trailers; - } - /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * Note that it may still be valid to send more request messages. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); - } - promiseFinished() { - return __awaiter3(this, void 0, void 0, function* () { - let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - headers, - response, - status, - trailers - }; - }); } - }; - exports2.ClientStreamingCall = ClientStreamingCall; + return buf || bytesToUuid(rnds); + } + __name(v42, "v4"); + module2.exports = v42; } }); -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js -var require_duplex_streaming_call = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/duplex-streaming-call.js"(exports2) { +// ../node_modules/@actions/tool-cache/lib/retry-helper.js +var require_retry_helper = __commonJS({ + "../node_modules/@actions/tool-cache/lib/retry-helper.js"(exports2) { "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -98036,49 +101054,83 @@ var require_duplex_streaming_call = __commonJS({ }); }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.DuplexStreamingCall = void 0; - var DuplexStreamingCall = class { + exports2.RetryHelper = void 0; + var core2 = __importStar3(require_core()); + var RetryHelper = class { static { - __name(this, "DuplexStreamingCall"); + __name(this, "RetryHelper"); } - constructor(method, requestHeaders, request, headers, response, status, trailers) { - this.method = method; - this.requestHeaders = requestHeaders; - this.requests = request; - this.headers = headers; - this.responses = response; - this.status = status; - this.trailers = trailers; + constructor(maxAttempts, minSeconds, maxSeconds) { + if (maxAttempts < 1) { + throw new Error("max attempts should be greater than or equal to 1"); + } + this.maxAttempts = maxAttempts; + this.minSeconds = Math.floor(minSeconds); + this.maxSeconds = Math.floor(maxSeconds); + if (this.minSeconds > this.maxSeconds) { + throw new Error("min seconds should be less than or equal to max seconds"); + } } - /** - * Instead of awaiting the response status and trailers, you can - * just as well await this call itself to receive the server outcome. - * Note that it may still be valid to send more request messages. - */ - then(onfulfilled, onrejected) { - return this.promiseFinished().then((value) => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, (reason) => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); + execute(action, isRetryable) { + return __awaiter3(this, void 0, void 0, function* () { + let attempt = 1; + while (attempt < this.maxAttempts) { + try { + return yield action(); + } catch (err) { + if (isRetryable && !isRetryable(err)) { + throw err; + } + core2.info(err.message); + } + const seconds = this.getSleepAmount(); + core2.info(`Waiting ${seconds} seconds before trying again`); + yield this.sleep(seconds); + attempt++; + } + return yield action(); + }); } - promiseFinished() { + getSleepAmount() { + return Math.floor(Math.random() * (this.maxSeconds - this.minSeconds + 1)) + this.minSeconds; + } + sleep(seconds) { return __awaiter3(this, void 0, void 0, function* () { - let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); - return { - method: this.method, - requestHeaders: this.requestHeaders, - headers, - status, - trailers - }; + return new Promise((resolve) => setTimeout(resolve, seconds * 1e3)); }); } }; - exports2.DuplexStreamingCall = DuplexStreamingCall; + exports2.RetryHelper = RetryHelper; } }); -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js -var require_test_transport = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/test-transport.js"(exports2) { +// ../node_modules/@actions/tool-cache/lib/tool-cache.js +var require_tool_cache = __commonJS({ + "../node_modules/@actions/tool-cache/lib/tool-cache.js"(exports2) { "use strict"; + var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { + return m[k]; + }, "get") }); + } : function(o, m, k, k2) { + if (k2 === void 0) k2 = k; + o[k2] = m[k]; + }); + var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + } : function(o, v) { + o["default"] = v; + }); + var __importStar3 = exports2 && exports2.__importStar || function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + } + __setModuleDefault3(result, mod); + return result; + }; var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -98110,526 +101162,699 @@ var require_test_transport = __commonJS({ step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.TestTransport = void 0; - var rpc_error_1 = require_rpc_error(); - var runtime_1 = require_commonjs7(); - var rpc_output_stream_1 = require_rpc_output_stream(); - var rpc_options_1 = require_rpc_options(); - var unary_call_1 = require_unary_call(); - var server_streaming_call_1 = require_server_streaming_call(); - var client_streaming_call_1 = require_client_streaming_call(); - var duplex_streaming_call_1 = require_duplex_streaming_call(); - var TestTransport = class _TestTransport { + exports2.evaluateVersions = exports2.isExplicitVersion = exports2.findFromManifest = exports2.getManifestFromRepo = exports2.findAllVersions = exports2.find = exports2.cacheFile = exports2.cacheDir = exports2.extractZip = exports2.extractXar = exports2.extractTar = exports2.extract7z = exports2.downloadTool = exports2.HTTPError = void 0; + var core2 = __importStar3(require_core()); + var io2 = __importStar3(require_io()); + var fs2 = __importStar3(require("fs")); + var mm = __importStar3(require_manifest()); + var os = __importStar3(require("os")); + var path2 = __importStar3(require("path")); + var httpm = __importStar3(require_lib()); + var semver = __importStar3(require_semver2()); + var stream = __importStar3(require("stream")); + var util = __importStar3(require("util")); + var assert_1 = require("assert"); + var v4_1 = __importDefault2(require_v4()); + var exec_1 = require_exec(); + var retry_helper_1 = require_retry_helper(); + var HTTPError = class extends Error { static { - __name(this, "TestTransport"); - } - /** - * Initialize with mock data. Omitted fields have default value. - */ - constructor(data) { - this.suppressUncaughtRejections = true; - this.headerDelay = 10; - this.responseDelay = 50; - this.betweenResponseDelay = 10; - this.afterResponseDelay = 10; - this.data = data !== null && data !== void 0 ? data : {}; + __name(this, "HTTPError"); } - /** - * Sent message(s) during the last operation. - */ - get sentMessages() { - if (this.lastInput instanceof TestInputStream) { - return this.lastInput.sent; - } else if (typeof this.lastInput == "object") { - return [this.lastInput.single]; - } - return []; + constructor(httpStatusCode) { + super(`Unexpected HTTP response: ${httpStatusCode}`); + this.httpStatusCode = httpStatusCode; + Object.setPrototypeOf(this, new.target.prototype); } - /** - * Sending message(s) completed? - */ - get sendComplete() { - if (this.lastInput instanceof TestInputStream) { - return this.lastInput.completed; - } else if (typeof this.lastInput == "object") { + }; + exports2.HTTPError = HTTPError; + var IS_WINDOWS = process.platform === "win32"; + var IS_MAC = process.platform === "darwin"; + var userAgent = "actions/tool-cache"; + function downloadTool(url, dest, auth, headers) { + return __awaiter3(this, void 0, void 0, function* () { + dest = dest || path2.join(_getTempDirectory(), v4_1.default()); + yield io2.mkdirP(path2.dirname(dest)); + core2.debug(`Downloading ${url}`); + core2.debug(`Destination ${dest}`); + const maxAttempts = 3; + const minSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MIN_SECONDS", 10); + const maxSeconds = _getGlobal("TEST_DOWNLOAD_TOOL_RETRY_MAX_SECONDS", 20); + const retryHelper = new retry_helper_1.RetryHelper(maxAttempts, minSeconds, maxSeconds); + return yield retryHelper.execute(() => __awaiter3(this, void 0, void 0, function* () { + return yield downloadToolAttempt(url, dest || "", auth, headers); + }), (err) => { + if (err instanceof HTTPError && err.httpStatusCode) { + if (err.httpStatusCode < 500 && err.httpStatusCode !== 408 && err.httpStatusCode !== 429) { + return false; + } + } return true; + }); + }); + } + __name(downloadTool, "downloadTool"); + exports2.downloadTool = downloadTool; + function downloadToolAttempt(url, dest, auth, headers) { + return __awaiter3(this, void 0, void 0, function* () { + if (fs2.existsSync(dest)) { + throw new Error(`Destination file path ${dest} already exists`); } - return false; - } - // Creates a promise for response headers from the mock data. - promiseHeaders() { - var _a; - const headers = (_a = this.data.headers) !== null && _a !== void 0 ? _a : _TestTransport.defaultHeaders; - return headers instanceof rpc_error_1.RpcError ? Promise.reject(headers) : Promise.resolve(headers); - } - // Creates a promise for a single, valid, message from the mock data. - promiseSingleResponse(method) { - if (this.data.response instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.response); + const http = new httpm.HttpClient(userAgent, [], { + allowRetries: false + }); + if (auth) { + core2.debug("set auth"); + if (headers === void 0) { + headers = {}; + } + headers.authorization = auth; } - let r; - if (Array.isArray(this.data.response)) { - runtime_1.assert(this.data.response.length > 0); - r = this.data.response[0]; - } else if (this.data.response !== void 0) { - r = this.data.response; - } else { - r = method.O.create(); + const response = yield http.get(url, headers); + if (response.message.statusCode !== 200) { + const err = new HTTPError(response.message.statusCode); + core2.debug(`Failed to download from "${url}". Code(${response.message.statusCode}) Message(${response.message.statusMessage})`); + throw err; } - runtime_1.assert(method.O.is(r)); - return Promise.resolve(r); - } - /** - * Pushes response messages from the mock data to the output stream. - * If an error response, status or trailers are mocked, the stream is - * closed with the respective error. - * Otherwise, stream is completed successfully. - * - * The returned promise resolves when the stream is closed. It should - * not reject. If it does, code is broken. - */ - streamResponses(method, stream, abort) { - return __awaiter3(this, void 0, void 0, function* () { - const messages = []; - if (this.data.response === void 0) { - messages.push(method.O.create()); - } else if (Array.isArray(this.data.response)) { - for (let msg of this.data.response) { - runtime_1.assert(method.O.is(msg)); - messages.push(msg); + const pipeline = util.promisify(stream.pipeline); + const responseMessageFactory = _getGlobal("TEST_DOWNLOAD_TOOL_RESPONSE_MESSAGE_FACTORY", () => response.message); + const readStream = responseMessageFactory(); + let succeeded = false; + try { + yield pipeline(readStream, fs2.createWriteStream(dest)); + core2.debug("download complete"); + succeeded = true; + return dest; + } finally { + if (!succeeded) { + core2.debug("download failed"); + try { + yield io2.rmRF(dest); + } catch (err) { + core2.debug(`Failed to delete '${dest}'. ${err.message}`); } - } else if (!(this.data.response instanceof rpc_error_1.RpcError)) { - runtime_1.assert(method.O.is(this.data.response)); - messages.push(this.data.response); } + } + }); + } + __name(downloadToolAttempt, "downloadToolAttempt"); + function extract7z(file, dest, _7zPath) { + return __awaiter3(this, void 0, void 0, function* () { + assert_1.ok(IS_WINDOWS, "extract7z() not supported on current OS"); + assert_1.ok(file, 'parameter "file" is required'); + dest = yield _createExtractFolder(dest); + const originalCwd = process.cwd(); + process.chdir(dest); + if (_7zPath) { try { - yield delay(this.responseDelay, abort)(void 0); - } catch (error) { - stream.notifyError(error); - return; - } - if (this.data.response instanceof rpc_error_1.RpcError) { - stream.notifyError(this.data.response); - return; - } - for (let msg of messages) { - stream.notifyMessage(msg); - try { - yield delay(this.betweenResponseDelay, abort)(void 0); - } catch (error) { - stream.notifyError(error); - return; - } + const logLevel = core2.isDebug() ? "-bb1" : "-bb0"; + const args = [ + "x", + logLevel, + "-bd", + "-sccUTF-8", + file + ]; + const options = { + silent: true + }; + yield exec_1.exec(`"${_7zPath}"`, args, options); + } finally { + process.chdir(originalCwd); } - if (this.data.status instanceof rpc_error_1.RpcError) { - stream.notifyError(this.data.status); - return; + } else { + const escapedScript = path2.join(__dirname, "..", "scripts", "Invoke-7zdec.ps1").replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedTarget = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const command = `& '${escapedScript}' -Source '${escapedFile}' -Target '${escapedTarget}'`; + const args = [ + "-NoLogo", + "-Sta", + "-NoProfile", + "-NonInteractive", + "-ExecutionPolicy", + "Unrestricted", + "-Command", + command + ]; + const options = { + silent: true + }; + try { + const powershellPath = yield io2.which("powershell", true); + yield exec_1.exec(`"${powershellPath}"`, args, options); + } finally { + process.chdir(originalCwd); } - if (this.data.trailers instanceof rpc_error_1.RpcError) { - stream.notifyError(this.data.trailers); - return; + } + return dest; + }); + } + __name(extract7z, "extract7z"); + exports2.extract7z = extract7z; + function extractTar(file, dest, flags = "xz") { + return __awaiter3(this, void 0, void 0, function* () { + if (!file) { + throw new Error("parameter 'file' is required"); + } + dest = yield _createExtractFolder(dest); + core2.debug("Checking tar --version"); + let versionOutput = ""; + yield exec_1.exec("tar --version", [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: /* @__PURE__ */ __name((data) => versionOutput += data.toString(), "stdout"), + stderr: /* @__PURE__ */ __name((data) => versionOutput += data.toString(), "stderr") } - stream.notifyComplete(); }); - } - // Creates a promise for response status from the mock data. - promiseStatus() { - var _a; - const status = (_a = this.data.status) !== null && _a !== void 0 ? _a : _TestTransport.defaultStatus; - return status instanceof rpc_error_1.RpcError ? Promise.reject(status) : Promise.resolve(status); - } - // Creates a promise for response trailers from the mock data. - promiseTrailers() { - var _a; - const trailers = (_a = this.data.trailers) !== null && _a !== void 0 ? _a : _TestTransport.defaultTrailers; - return trailers instanceof rpc_error_1.RpcError ? Promise.reject(trailers) : Promise.resolve(trailers); - } - maybeSuppressUncaught(...promise) { - if (this.suppressUncaughtRejections) { - for (let p of promise) { - p.catch(() => { - }); - } + core2.debug(versionOutput.trim()); + const isGnuTar = versionOutput.toUpperCase().includes("GNU TAR"); + let args; + if (flags instanceof Array) { + args = flags; + } else { + args = [flags]; } - } - mergeOptions(options) { - return rpc_options_1.mergeRpcOptions({}, options); - } - unary(method, input, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_2) => { - }).then(delay(this.responseDelay, options.abort)).then((_2) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_2) => { - }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseStatus()), trailersPromise = responsePromise.catch((_2) => { - }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = { single: input }; - return new unary_call_1.UnaryCall(method, requestHeaders, input, headersPromise, responsePromise, statusPromise, trailersPromise); - } - serverStreaming(method, input, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay(this.responseDelay, options.abort)).catch(() => { - }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = { single: input }; - return new server_streaming_call_1.ServerStreamingCall(method, requestHeaders, input, headersPromise, outputStream, statusPromise, trailersPromise); - } - clientStreaming(method, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise.catch((_2) => { - }).then(delay(this.responseDelay, options.abort)).then((_2) => this.promiseSingleResponse(method)), statusPromise = responsePromise.catch((_2) => { - }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseStatus()), trailersPromise = responsePromise.catch((_2) => { - }).then(delay(this.afterResponseDelay, options.abort)).then((_2) => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = new TestInputStream(this.data, options.abort); - return new client_streaming_call_1.ClientStreamingCall(method, requestHeaders, this.lastInput, headersPromise, responsePromise, statusPromise, trailersPromise); - } - duplex(method, options) { - var _a; - const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders().then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise.then(delay(this.responseDelay, options.abort)).catch(() => { - }).then(() => this.streamResponses(method, outputStream, options.abort)).then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise.then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise.then(() => this.promiseTrailers()); - this.maybeSuppressUncaught(statusPromise, trailersPromise); - this.lastInput = new TestInputStream(this.data, options.abort); - return new duplex_streaming_call_1.DuplexStreamingCall(method, requestHeaders, this.lastInput, headersPromise, outputStream, statusPromise, trailersPromise); - } - }; - exports2.TestTransport = TestTransport; - TestTransport.defaultHeaders = { - responseHeader: "test" - }; - TestTransport.defaultStatus = { - code: "OK", - detail: "all good" - }; - TestTransport.defaultTrailers = { - responseTrailer: "test" - }; - function delay(ms, abort) { - return (v) => new Promise((resolve, reject) => { - if (abort === null || abort === void 0 ? void 0 : abort.aborted) { - reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); + if (core2.isDebug() && !flags.includes("v")) { + args.push("-v"); + } + let destArg = dest; + let fileArg = file; + if (IS_WINDOWS && isGnuTar) { + args.push("--force-local"); + destArg = dest.replace(/\\/g, "/"); + fileArg = file.replace(/\\/g, "/"); + } + if (isGnuTar) { + args.push("--warning=no-unknown-keyword"); + args.push("--overwrite"); + } + args.push("-C", destArg, "-f", fileArg); + yield exec_1.exec(`tar`, args); + return dest; + }); + } + __name(extractTar, "extractTar"); + exports2.extractTar = extractTar; + function extractXar(file, dest, flags = []) { + return __awaiter3(this, void 0, void 0, function* () { + assert_1.ok(IS_MAC, "extractXar() not supported on current OS"); + assert_1.ok(file, 'parameter "file" is required'); + dest = yield _createExtractFolder(dest); + let args; + if (flags instanceof Array) { + args = flags; } else { - const id = setTimeout(() => resolve(v), ms); - if (abort) { - abort.addEventListener("abort", (ev) => { - clearTimeout(id); - reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); - }); - } + args = [flags]; + } + args.push("-x", "-C", dest, "-f", file); + if (core2.isDebug()) { + args.push("-v"); } + const xarPath = yield io2.which("xar", true); + yield exec_1.exec(`"${xarPath}"`, _unique(args)); + return dest; }); } - __name(delay, "delay"); - var TestInputStream = class { - static { - __name(this, "TestInputStream"); - } - constructor(data, abort) { - this._completed = false; - this._sent = []; - this.data = data; - this.abort = abort; + __name(extractXar, "extractXar"); + exports2.extractXar = extractXar; + function extractZip(file, dest) { + return __awaiter3(this, void 0, void 0, function* () { + if (!file) { + throw new Error("parameter 'file' is required"); + } + dest = yield _createExtractFolder(dest); + if (IS_WINDOWS) { + yield extractZipWin(file, dest); + } else { + yield extractZipNix(file, dest); + } + return dest; + }); + } + __name(extractZip, "extractZip"); + exports2.extractZip = extractZip; + function extractZipWin(file, dest) { + return __awaiter3(this, void 0, void 0, function* () { + const escapedFile = file.replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const escapedDest = dest.replace(/'/g, "''").replace(/"|\n|\r/g, ""); + const pwshPath = yield io2.which("pwsh", false); + if (pwshPath) { + const pwshCommand = [ + `$ErrorActionPreference = 'Stop' ;`, + `try { Add-Type -AssemblyName System.IO.Compression.ZipFile } catch { } ;`, + `try { [System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }`, + `catch { if (($_.Exception.GetType().FullName -eq 'System.Management.Automation.MethodException') -or ($_.Exception.GetType().FullName -eq 'System.Management.Automation.RuntimeException') ){ Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force } else { throw $_ } } ;` + ].join(" "); + const args = [ + "-NoLogo", + "-NoProfile", + "-NonInteractive", + "-ExecutionPolicy", + "Unrestricted", + "-Command", + pwshCommand + ]; + core2.debug(`Using pwsh at path: ${pwshPath}`); + yield exec_1.exec(`"${pwshPath}"`, args); + } else { + const powershellCommand = [ + `$ErrorActionPreference = 'Stop' ;`, + `try { Add-Type -AssemblyName System.IO.Compression.FileSystem } catch { } ;`, + `if ((Get-Command -Name Expand-Archive -Module Microsoft.PowerShell.Archive -ErrorAction Ignore)) { Expand-Archive -LiteralPath '${escapedFile}' -DestinationPath '${escapedDest}' -Force }`, + `else {[System.IO.Compression.ZipFile]::ExtractToDirectory('${escapedFile}', '${escapedDest}', $true) }` + ].join(" "); + const args = [ + "-NoLogo", + "-Sta", + "-NoProfile", + "-NonInteractive", + "-ExecutionPolicy", + "Unrestricted", + "-Command", + powershellCommand + ]; + const powershellPath = yield io2.which("powershell", true); + core2.debug(`Using powershell at path: ${powershellPath}`); + yield exec_1.exec(`"${powershellPath}"`, args); + } + }); + } + __name(extractZipWin, "extractZipWin"); + function extractZipNix(file, dest) { + return __awaiter3(this, void 0, void 0, function* () { + const unzipPath = yield io2.which("unzip", true); + const args = [file]; + if (!core2.isDebug()) { + args.unshift("-q"); + } + args.unshift("-o"); + yield exec_1.exec(`"${unzipPath}"`, args, { cwd: dest }); + }); + } + __name(extractZipNix, "extractZipNix"); + function cacheDir(sourceDir, tool, version3, arch) { + return __awaiter3(this, void 0, void 0, function* () { + version3 = semver.clean(version3) || version3; + arch = arch || os.arch(); + core2.debug(`Caching tool ${tool} ${version3} ${arch}`); + core2.debug(`source dir: ${sourceDir}`); + if (!fs2.statSync(sourceDir).isDirectory()) { + throw new Error("sourceDir is not a directory"); + } + const destPath = yield _createToolPath(tool, version3, arch); + for (const itemName of fs2.readdirSync(sourceDir)) { + const s = path2.join(sourceDir, itemName); + yield io2.cp(s, destPath, { recursive: true }); + } + _completeToolPath(tool, version3, arch); + return destPath; + }); + } + __name(cacheDir, "cacheDir"); + exports2.cacheDir = cacheDir; + function cacheFile(sourceFile, targetFile, tool, version3, arch) { + return __awaiter3(this, void 0, void 0, function* () { + version3 = semver.clean(version3) || version3; + arch = arch || os.arch(); + core2.debug(`Caching tool ${tool} ${version3} ${arch}`); + core2.debug(`source file: ${sourceFile}`); + if (!fs2.statSync(sourceFile).isFile()) { + throw new Error("sourceFile is not a file"); + } + const destFolder = yield _createToolPath(tool, version3, arch); + const destPath = path2.join(destFolder, targetFile); + core2.debug(`destination file ${destPath}`); + yield io2.cp(sourceFile, destPath); + _completeToolPath(tool, version3, arch); + return destFolder; + }); + } + __name(cacheFile, "cacheFile"); + exports2.cacheFile = cacheFile; + function find(toolName, versionSpec, arch) { + if (!toolName) { + throw new Error("toolName parameter is required"); } - get sent() { - return this._sent; + if (!versionSpec) { + throw new Error("versionSpec parameter is required"); } - get completed() { - return this._completed; + arch = arch || os.arch(); + if (!isExplicitVersion(versionSpec)) { + const localVersions = findAllVersions(toolName, arch); + const match = evaluateVersions(localVersions, versionSpec); + versionSpec = match; } - send(message) { - if (this.data.inputMessage instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.inputMessage); + let toolPath = ""; + if (versionSpec) { + versionSpec = semver.clean(versionSpec) || ""; + const cachePath = path2.join(_getCacheDirectory(), toolName, versionSpec, arch); + core2.debug(`checking cache: ${cachePath}`); + if (fs2.existsSync(cachePath) && fs2.existsSync(`${cachePath}.complete`)) { + core2.debug(`Found tool in cache ${toolName} ${versionSpec} ${arch}`); + toolPath = cachePath; + } else { + core2.debug("not found"); } - const delayMs = this.data.inputMessage === void 0 ? 10 : this.data.inputMessage; - return Promise.resolve(void 0).then(() => { - this._sent.push(message); - }).then(delay(delayMs, this.abort)); } - complete() { - if (this.data.inputComplete instanceof rpc_error_1.RpcError) { - return Promise.reject(this.data.inputComplete); + return toolPath; + } + __name(find, "find"); + exports2.find = find; + function findAllVersions(toolName, arch) { + const versions = []; + arch = arch || os.arch(); + const toolPath = path2.join(_getCacheDirectory(), toolName); + if (fs2.existsSync(toolPath)) { + const children2 = fs2.readdirSync(toolPath); + for (const child of children2) { + if (isExplicitVersion(child)) { + const fullPath = path2.join(toolPath, child, arch || ""); + if (fs2.existsSync(fullPath) && fs2.existsSync(`${fullPath}.complete`)) { + versions.push(child); + } + } } - const delayMs = this.data.inputComplete === void 0 ? 10 : this.data.inputComplete; - return Promise.resolve(void 0).then(() => { - this._completed = true; - }).then(delay(delayMs, this.abort)); } - }; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js -var require_rpc_interceptor = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/rpc-interceptor.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.stackDuplexStreamingInterceptors = exports2.stackClientStreamingInterceptors = exports2.stackServerStreamingInterceptors = exports2.stackUnaryInterceptors = exports2.stackIntercept = void 0; - var runtime_1 = require_commonjs7(); - function stackIntercept(kind, transport, method, options, input) { - var _a, _b, _c, _d; - if (kind == "unary") { - let tail = /* @__PURE__ */ __name((mtd, inp, opt) => transport.unary(mtd, inp, opt), "tail"); - for (const curr of ((_a = options.interceptors) !== null && _a !== void 0 ? _a : []).filter((i) => i.interceptUnary).reverse()) { - const next = tail; - tail = /* @__PURE__ */ __name((mtd, inp, opt) => curr.interceptUnary(next, mtd, inp, opt), "tail"); + return versions; + } + __name(findAllVersions, "findAllVersions"); + exports2.findAllVersions = findAllVersions; + function getManifestFromRepo(owner, repo, auth, branch = "master") { + return __awaiter3(this, void 0, void 0, function* () { + let releases = []; + const treeUrl = `https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}`; + const http = new httpm.HttpClient("tool-cache"); + const headers = {}; + if (auth) { + core2.debug("set auth"); + headers.authorization = auth; } - return tail(method, input, options); - } - if (kind == "serverStreaming") { - let tail = /* @__PURE__ */ __name((mtd, inp, opt) => transport.serverStreaming(mtd, inp, opt), "tail"); - for (const curr of ((_b = options.interceptors) !== null && _b !== void 0 ? _b : []).filter((i) => i.interceptServerStreaming).reverse()) { - const next = tail; - tail = /* @__PURE__ */ __name((mtd, inp, opt) => curr.interceptServerStreaming(next, mtd, inp, opt), "tail"); + const response = yield http.getJson(treeUrl, headers); + if (!response.result) { + return releases; } - return tail(method, input, options); - } - if (kind == "clientStreaming") { - let tail = /* @__PURE__ */ __name((mtd, opt) => transport.clientStreaming(mtd, opt), "tail"); - for (const curr of ((_c = options.interceptors) !== null && _c !== void 0 ? _c : []).filter((i) => i.interceptClientStreaming).reverse()) { - const next = tail; - tail = /* @__PURE__ */ __name((mtd, opt) => curr.interceptClientStreaming(next, mtd, opt), "tail"); + let manifestUrl = ""; + for (const item of response.result.tree) { + if (item.path === "versions-manifest.json") { + manifestUrl = item.url; + break; + } } - return tail(method, options); - } - if (kind == "duplex") { - let tail = /* @__PURE__ */ __name((mtd, opt) => transport.duplex(mtd, opt), "tail"); - for (const curr of ((_d = options.interceptors) !== null && _d !== void 0 ? _d : []).filter((i) => i.interceptDuplex).reverse()) { - const next = tail; - tail = /* @__PURE__ */ __name((mtd, opt) => curr.interceptDuplex(next, mtd, opt), "tail"); + headers["accept"] = "application/vnd.github.VERSION.raw"; + let versionsRaw = yield (yield http.get(manifestUrl, headers)).readBody(); + if (versionsRaw) { + versionsRaw = versionsRaw.replace(/^\uFEFF/, ""); + try { + releases = JSON.parse(versionsRaw); + } catch (_a) { + core2.debug("Invalid json"); + } + } + return releases; + }); + } + __name(getManifestFromRepo, "getManifestFromRepo"); + exports2.getManifestFromRepo = getManifestFromRepo; + function findFromManifest(versionSpec, stable, manifest, archFilter = os.arch()) { + return __awaiter3(this, void 0, void 0, function* () { + const match = yield mm._findMatch(versionSpec, stable, manifest, archFilter); + return match; + }); + } + __name(findFromManifest, "findFromManifest"); + exports2.findFromManifest = findFromManifest; + function _createExtractFolder(dest) { + return __awaiter3(this, void 0, void 0, function* () { + if (!dest) { + dest = path2.join(_getTempDirectory(), v4_1.default()); + } + yield io2.mkdirP(dest); + return dest; + }); + } + __name(_createExtractFolder, "_createExtractFolder"); + function _createToolPath(tool, version3, arch) { + return __awaiter3(this, void 0, void 0, function* () { + const folderPath = path2.join(_getCacheDirectory(), tool, semver.clean(version3) || version3, arch || ""); + core2.debug(`destination ${folderPath}`); + const markerPath = `${folderPath}.complete`; + yield io2.rmRF(folderPath); + yield io2.rmRF(markerPath); + yield io2.mkdirP(folderPath); + return folderPath; + }); + } + __name(_createToolPath, "_createToolPath"); + function _completeToolPath(tool, version3, arch) { + const folderPath = path2.join(_getCacheDirectory(), tool, semver.clean(version3) || version3, arch || ""); + const markerPath = `${folderPath}.complete`; + fs2.writeFileSync(markerPath, ""); + core2.debug("finished caching tool"); + } + __name(_completeToolPath, "_completeToolPath"); + function isExplicitVersion(versionSpec) { + const c = semver.clean(versionSpec) || ""; + core2.debug(`isExplicit: ${c}`); + const valid = semver.valid(c) != null; + core2.debug(`explicit? ${valid}`); + return valid; + } + __name(isExplicitVersion, "isExplicitVersion"); + exports2.isExplicitVersion = isExplicitVersion; + function evaluateVersions(versions, versionSpec) { + let version3 = ""; + core2.debug(`evaluating ${versions.length} versions`); + versions = versions.sort((a, b) => { + if (semver.gt(a, b)) { + return 1; + } + return -1; + }); + for (let i = versions.length - 1; i >= 0; i--) { + const potential = versions[i]; + const satisfied = semver.satisfies(potential, versionSpec); + if (satisfied) { + version3 = potential; + break; } - return tail(method, options); } - runtime_1.assertNever(kind); + if (version3) { + core2.debug(`matched: ${version3}`); + } else { + core2.debug("match not found"); + } + return version3; } - __name(stackIntercept, "stackIntercept"); - exports2.stackIntercept = stackIntercept; - function stackUnaryInterceptors(transport, method, input, options) { - return stackIntercept("unary", transport, method, options, input); + __name(evaluateVersions, "evaluateVersions"); + exports2.evaluateVersions = evaluateVersions; + function _getCacheDirectory() { + const cacheDirectory = process.env["RUNNER_TOOL_CACHE"] || ""; + assert_1.ok(cacheDirectory, "Expected RUNNER_TOOL_CACHE to be defined"); + return cacheDirectory; } - __name(stackUnaryInterceptors, "stackUnaryInterceptors"); - exports2.stackUnaryInterceptors = stackUnaryInterceptors; - function stackServerStreamingInterceptors(transport, method, input, options) { - return stackIntercept("serverStreaming", transport, method, options, input); + __name(_getCacheDirectory, "_getCacheDirectory"); + function _getTempDirectory() { + const tempDirectory = process.env["RUNNER_TEMP"] || ""; + assert_1.ok(tempDirectory, "Expected RUNNER_TEMP to be defined"); + return tempDirectory; } - __name(stackServerStreamingInterceptors, "stackServerStreamingInterceptors"); - exports2.stackServerStreamingInterceptors = stackServerStreamingInterceptors; - function stackClientStreamingInterceptors(transport, method, options) { - return stackIntercept("clientStreaming", transport, method, options); + __name(_getTempDirectory, "_getTempDirectory"); + function _getGlobal(key, defaultValue) { + const value = global[key]; + return value !== void 0 ? value : defaultValue; } - __name(stackClientStreamingInterceptors, "stackClientStreamingInterceptors"); - exports2.stackClientStreamingInterceptors = stackClientStreamingInterceptors; - function stackDuplexStreamingInterceptors(transport, method, options) { - return stackIntercept("duplex", transport, method, options); + __name(_getGlobal, "_getGlobal"); + function _unique(values) { + return Array.from(new Set(values)); } - __name(stackDuplexStreamingInterceptors, "stackDuplexStreamingInterceptors"); - exports2.stackDuplexStreamingInterceptors = stackDuplexStreamingInterceptors; + __name(_unique, "_unique"); } }); -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js -var require_server_call_context = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/server-call-context.js"(exports2) { +// ../node_modules/@actions/artifact/lib/internal/shared/config.js +var require_config2 = __commonJS({ + "../node_modules/@actions/artifact/lib/internal/shared/config.js"(exports2) { "use strict"; + var __importDefault2 = exports2 && exports2.__importDefault || function(mod) { + return mod && mod.__esModule ? mod : { "default": mod }; + }; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ServerCallContextController = void 0; - var ServerCallContextController = class { - static { - __name(this, "ServerCallContextController"); - } - constructor(method, headers, deadline, sendResponseHeadersFn, defaultStatus = { code: "OK", detail: "" }) { - this._cancelled = false; - this._listeners = []; - this.method = method; - this.headers = headers; - this.deadline = deadline; - this.trailers = {}; - this._sendRH = sendResponseHeadersFn; - this.status = defaultStatus; - } - /** - * Set the call cancelled. - * - * Invokes all callbacks registered with onCancel() and - * sets `cancelled = true`. - */ - notifyCancelled() { - if (!this._cancelled) { - this._cancelled = true; - for (let l of this._listeners) { - l(); - } - } + exports2.getUploadChunkTimeout = exports2.getConcurrency = exports2.getGitHubWorkspaceDir = exports2.isGhes = exports2.getResultsServiceUrl = exports2.getRuntimeToken = exports2.getUploadChunkSize = void 0; + var os_1 = __importDefault2(require("os")); + function getUploadChunkSize() { + return 8 * 1024 * 1024; + } + __name(getUploadChunkSize, "getUploadChunkSize"); + exports2.getUploadChunkSize = getUploadChunkSize; + function getRuntimeToken() { + const token = process.env["ACTIONS_RUNTIME_TOKEN"]; + if (!token) { + throw new Error("Unable to get the ACTIONS_RUNTIME_TOKEN env variable"); } - /** - * Send response headers. - */ - sendResponseHeaders(data) { - this._sendRH(data); + return token; + } + __name(getRuntimeToken, "getRuntimeToken"); + exports2.getRuntimeToken = getRuntimeToken; + function getResultsServiceUrl() { + const resultsUrl = process.env["ACTIONS_RESULTS_URL"]; + if (!resultsUrl) { + throw new Error("Unable to get the ACTIONS_RESULTS_URL env variable"); } - /** - * Is the call cancelled? - * - * When the client closes the connection before the server - * is done, the call is cancelled. - * - * If you want to cancel a request on the server, throw a - * RpcError with the CANCELLED status code. - */ - get cancelled() { - return this._cancelled; + return new URL(resultsUrl).origin; + } + __name(getResultsServiceUrl, "getResultsServiceUrl"); + exports2.getResultsServiceUrl = getResultsServiceUrl; + function isGhes() { + const ghUrl = new URL(process.env["GITHUB_SERVER_URL"] || "https://github.com"); + const hostname = ghUrl.hostname.trimEnd().toUpperCase(); + const isGitHubHost = hostname === "GITHUB.COM"; + const isGheHost = hostname.endsWith(".GHE.COM"); + const isLocalHost = hostname.endsWith(".LOCALHOST"); + return !isGitHubHost && !isGheHost && !isLocalHost; + } + __name(isGhes, "isGhes"); + exports2.isGhes = isGhes; + function getGitHubWorkspaceDir() { + const ghWorkspaceDir = process.env["GITHUB_WORKSPACE"]; + if (!ghWorkspaceDir) { + throw new Error("Unable to get the GITHUB_WORKSPACE env variable"); } - /** - * Add a callback for cancellation. - */ - onCancel(callback) { - const l = this._listeners; - l.push(callback); - return () => { - let i = l.indexOf(callback); - if (i >= 0) - l.splice(i, 1); - }; + return ghWorkspaceDir; + } + __name(getGitHubWorkspaceDir, "getGitHubWorkspaceDir"); + exports2.getGitHubWorkspaceDir = getGitHubWorkspaceDir; + function getConcurrency() { + const numCPUs = os_1.default.cpus().length; + if (numCPUs <= 4) { + return 32; } - }; - exports2.ServerCallContextController = ServerCallContextController; - } -}); - -// ../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js -var require_commonjs8 = __commonJS({ - "../node_modules/@protobuf-ts/runtime-rpc/build/commonjs/index.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - var service_type_1 = require_service_type(); - Object.defineProperty(exports2, "ServiceType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return service_type_1.ServiceType; - }, "get") }); - var reflection_info_1 = require_reflection_info2(); - Object.defineProperty(exports2, "readMethodOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.readMethodOptions; - }, "get") }); - Object.defineProperty(exports2, "readMethodOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.readMethodOption; - }, "get") }); - Object.defineProperty(exports2, "readServiceOption", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return reflection_info_1.readServiceOption; - }, "get") }); - var rpc_error_1 = require_rpc_error(); - Object.defineProperty(exports2, "RpcError", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_error_1.RpcError; - }, "get") }); - var rpc_options_1 = require_rpc_options(); - Object.defineProperty(exports2, "mergeRpcOptions", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_options_1.mergeRpcOptions; - }, "get") }); - var rpc_output_stream_1 = require_rpc_output_stream(); - Object.defineProperty(exports2, "RpcOutputStreamController", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_output_stream_1.RpcOutputStreamController; - }, "get") }); - var test_transport_1 = require_test_transport(); - Object.defineProperty(exports2, "TestTransport", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return test_transport_1.TestTransport; - }, "get") }); - var deferred_1 = require_deferred(); - Object.defineProperty(exports2, "Deferred", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return deferred_1.Deferred; - }, "get") }); - Object.defineProperty(exports2, "DeferredState", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return deferred_1.DeferredState; - }, "get") }); - var duplex_streaming_call_1 = require_duplex_streaming_call(); - Object.defineProperty(exports2, "DuplexStreamingCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return duplex_streaming_call_1.DuplexStreamingCall; - }, "get") }); - var client_streaming_call_1 = require_client_streaming_call(); - Object.defineProperty(exports2, "ClientStreamingCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return client_streaming_call_1.ClientStreamingCall; - }, "get") }); - var server_streaming_call_1 = require_server_streaming_call(); - Object.defineProperty(exports2, "ServerStreamingCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return server_streaming_call_1.ServerStreamingCall; - }, "get") }); - var unary_call_1 = require_unary_call(); - Object.defineProperty(exports2, "UnaryCall", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return unary_call_1.UnaryCall; - }, "get") }); - var rpc_interceptor_1 = require_rpc_interceptor(); - Object.defineProperty(exports2, "stackIntercept", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_interceptor_1.stackIntercept; - }, "get") }); - Object.defineProperty(exports2, "stackDuplexStreamingInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_interceptor_1.stackDuplexStreamingInterceptors; - }, "get") }); - Object.defineProperty(exports2, "stackClientStreamingInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_interceptor_1.stackClientStreamingInterceptors; - }, "get") }); - Object.defineProperty(exports2, "stackServerStreamingInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_interceptor_1.stackServerStreamingInterceptors; - }, "get") }); - Object.defineProperty(exports2, "stackUnaryInterceptors", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return rpc_interceptor_1.stackUnaryInterceptors; - }, "get") }); - var server_call_context_1 = require_server_call_context(); - Object.defineProperty(exports2, "ServerCallContextController", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return server_call_context_1.ServerCallContextController; - }, "get") }); + const concurrency = 16 * numCPUs; + return concurrency > 300 ? 300 : concurrency; + } + __name(getConcurrency, "getConcurrency"); + exports2.getConcurrency = getConcurrency; + function getUploadChunkTimeout() { + return 3e5; + } + __name(getUploadChunkTimeout, "getUploadChunkTimeout"); + exports2.getUploadChunkTimeout = getUploadChunkTimeout; } }); -// ../node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js -var require_artifact = __commonJS({ - "../node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js"(exports2) { +// ../node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.js +var require_timestamp2 = __commonJS({ + "../node_modules/@actions/artifact/lib/generated/google/protobuf/timestamp.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.ArtifactService = exports2.DeleteArtifactResponse = exports2.DeleteArtifactRequest = exports2.GetSignedArtifactURLResponse = exports2.GetSignedArtifactURLRequest = exports2.ListArtifactsResponse_MonolithArtifact = exports2.ListArtifactsResponse = exports2.ListArtifactsRequest = exports2.FinalizeArtifactResponse = exports2.FinalizeArtifactRequest = exports2.CreateArtifactResponse = exports2.CreateArtifactRequest = void 0; - var runtime_rpc_1 = require_commonjs8(); + exports2.Timestamp = void 0; var runtime_1 = require_commonjs7(); var runtime_2 = require_commonjs7(); var runtime_3 = require_commonjs7(); var runtime_4 = require_commonjs7(); var runtime_5 = require_commonjs7(); - var wrappers_1 = require_wrappers(); - var wrappers_2 = require_wrappers(); - var timestamp_1 = require_timestamp(); - var CreateArtifactRequest$Type = class extends runtime_5.MessageType { + var runtime_6 = require_commonjs7(); + var runtime_7 = require_commonjs7(); + var Timestamp$Type = class extends runtime_7.MessageType { static { - __name(this, "CreateArtifactRequest$Type"); + __name(this, "Timestamp$Type"); } constructor() { - super("github.actions.results.api.v1.CreateArtifactRequest", [ + super("google.protobuf.Timestamp", [ { no: 1, - name: "workflow_run_backend_id", + name: "seconds", kind: "scalar", - T: 9 - /*ScalarType.STRING*/ + T: 3 + /*ScalarType.INT64*/ }, { no: 2, - name: "workflow_job_run_backend_id", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "name", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { no: 4, name: "expires_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") }, - { - no: 5, - name: "version", + name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ } ]); } + /** + * Creates a new `Timestamp` for the current time. + */ + now() { + const msg = this.create(); + const ms = Date.now(); + msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1e3)).toString(); + msg.nanos = ms % 1e3 * 1e6; + return msg; + } + /** + * Converts a `Timestamp` to a JavaScript Date. + */ + toDate(message) { + return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1e3 + Math.ceil(message.nanos / 1e6)); + } + /** + * Converts a JavaScript Date to a `Timestamp`. + */ + fromDate(date) { + const msg = this.create(); + const ms = date.getTime(); + msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1e3)).toString(); + msg.nanos = ms % 1e3 * 1e6; + return msg; + } + /** + * In JSON format, the `Timestamp` type is encoded as a string + * in the RFC 3339 format. + */ + internalJsonWrite(message, options) { + let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1e3; + if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) + throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); + if (message.nanos < 0) + throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative."); + let z = "Z"; + if (message.nanos > 0) { + let nanosStr = (message.nanos + 1e9).toString().substring(1); + if (nanosStr.substring(3) === "000000") + z = "." + nanosStr.substring(0, 3) + "Z"; + else if (nanosStr.substring(6) === "000") + z = "." + nanosStr.substring(0, 6) + "Z"; + else + z = "." + nanosStr + "Z"; + } + return new Date(ms).toISOString().replace(".000Z", z); + } + /** + * In JSON format, the `Timestamp` type is encoded as a string + * in the RFC 3339 format. + */ + internalJsonRead(json, options, target) { + if (typeof json !== "string") + throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + "."); + let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/); + if (!matches) + throw new Error("Unable to parse Timestamp from JSON. Invalid format."); + let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z")); + if (Number.isNaN(ms)) + throw new Error("Unable to parse Timestamp from JSON. Invalid value."); + if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) + throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); + if (!target) + target = this.create(); + target.seconds = runtime_6.PbLong.from(ms / 1e3).toString(); + target.nanos = 0; + if (matches[7]) + target.nanos = parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1e9; + return target; + } create(value) { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "", version: 0 }; + const message = { seconds: "0", nanos: 0 }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) (0, runtime_3.reflectionMergePartial)(this, message, value); @@ -98640,25 +101865,13 @@ var require_artifact = __commonJS({ while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* string workflow_run_backend_id */ + case /* int64 seconds */ 1: - message.workflowRunBackendId = reader.string(); + message.seconds = reader.int64().toString(); break; - case /* string workflow_job_run_backend_id */ + case /* int32 nanos */ 2: - message.workflowJobRunBackendId = reader.string(); - break; - case /* string name */ - 3: - message.name = reader.string(); - break; - case /* google.protobuf.Timestamp expires_at */ - 4: - message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); - break; - case /* int32 version */ - 5: - message.version = reader.int32(); + message.nanos = reader.int32(); break; default: let u = options.readUnknownField; @@ -98672,50 +101885,68 @@ var require_artifact = __commonJS({ return message; } internalBinaryWrite(message, writer, options) { - if (message.workflowRunBackendId !== "") - writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId); - if (message.workflowJobRunBackendId !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId); - if (message.name !== "") - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name); - if (message.expiresAt) - timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(4, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.version !== 0) - writer.tag(5, runtime_1.WireType.Varint).int32(message.version); + if (message.seconds !== "0") + writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds); + if (message.nanos !== 0) + writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; - exports2.CreateArtifactRequest = new CreateArtifactRequest$Type(); - var CreateArtifactResponse$Type = class extends runtime_5.MessageType { + exports2.Timestamp = new Timestamp$Type(); + } +}); + +// ../node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.js +var require_wrappers = __commonJS({ + "../node_modules/@actions/artifact/lib/generated/google/protobuf/wrappers.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.BytesValue = exports2.StringValue = exports2.BoolValue = exports2.UInt32Value = exports2.Int32Value = exports2.UInt64Value = exports2.Int64Value = exports2.FloatValue = exports2.DoubleValue = void 0; + var runtime_1 = require_commonjs7(); + var runtime_2 = require_commonjs7(); + var runtime_3 = require_commonjs7(); + var runtime_4 = require_commonjs7(); + var runtime_5 = require_commonjs7(); + var runtime_6 = require_commonjs7(); + var runtime_7 = require_commonjs7(); + var DoubleValue$Type = class extends runtime_7.MessageType { static { - __name(this, "CreateArtifactResponse$Type"); + __name(this, "DoubleValue$Type"); } constructor() { - super("github.actions.results.api.v1.CreateArtifactResponse", [ + super("google.protobuf.DoubleValue", [ { no: 1, - name: "ok", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, - { - no: 2, - name: "signed_upload_url", + name: "value", kind: "scalar", - T: 9 - /*ScalarType.STRING*/ + T: 1 + /*ScalarType.DOUBLE*/ } ]); } + /** + * Encode `DoubleValue` to JSON number. + */ + internalJsonWrite(message, options) { + return this.refJsonWriter.scalar(2, message.value, "value", false, true); + } + /** + * Decode `DoubleValue` from JSON number. + */ + internalJsonRead(json, options, target) { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 1, void 0, "value"); + return target; + } create(value) { - const message = { ok: false, signedUploadUrl: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + const message = { value: 0 }; + globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); + (0, runtime_5.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { @@ -98723,13 +101954,9 @@ var require_artifact = __commonJS({ while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* bool ok */ + case /* double value */ 1: - message.ok = reader.bool(); - break; - case /* string signed_upload_url */ - 2: - message.signedUploadUrl = reader.string(); + message.value = reader.double(); break; default: let u = options.readUnknownField; @@ -98737,65 +101964,56 @@ var require_artifact = __commonJS({ throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { - if (message.ok !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); - if (message.signedUploadUrl !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl); + if (message.value !== 0) + writer.tag(1, runtime_3.WireType.Bit64).double(message.value); let u = options.writeUnknownFields; if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; - exports2.CreateArtifactResponse = new CreateArtifactResponse$Type(); - var FinalizeArtifactRequest$Type = class extends runtime_5.MessageType { + exports2.DoubleValue = new DoubleValue$Type(); + var FloatValue$Type = class extends runtime_7.MessageType { static { - __name(this, "FinalizeArtifactRequest$Type"); + __name(this, "FloatValue$Type"); } constructor() { - super("github.actions.results.api.v1.FinalizeArtifactRequest", [ + super("google.protobuf.FloatValue", [ { no: 1, - name: "workflow_run_backend_id", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 2, - name: "workflow_job_run_backend_id", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 3, - name: "name", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 4, - name: "size", + name: "value", kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { no: 5, name: "hash", kind: "message", T: /* @__PURE__ */ __name(() => wrappers_2.StringValue, "T") } + T: 2 + /*ScalarType.FLOAT*/ + } ]); } + /** + * Encode `FloatValue` to JSON number. + */ + internalJsonWrite(message, options) { + return this.refJsonWriter.scalar(1, message.value, "value", false, true); + } + /** + * Decode `FloatValue` from JSON number. + */ + internalJsonRead(json, options, target) { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 1, void 0, "value"); + return target; + } create(value) { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "", size: "0" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + const message = { value: 0 }; + globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); + (0, runtime_5.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { @@ -98803,25 +102021,9 @@ var require_artifact = __commonJS({ while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* string workflow_run_backend_id */ + case /* float value */ 1: - message.workflowRunBackendId = reader.string(); - break; - case /* string workflow_job_run_backend_id */ - 2: - message.workflowJobRunBackendId = reader.string(); - break; - case /* string name */ - 3: - message.name = reader.string(); - break; - case /* int64 size */ - 4: - message.size = reader.int64().toString(); - break; - case /* google.protobuf.StringValue hash */ - 5: - message.hash = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.hash); + message.value = reader.float(); break; default: let u = options.readUnknownField; @@ -98829,56 +102031,56 @@ var require_artifact = __commonJS({ throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { - if (message.workflowRunBackendId !== "") - writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId); - if (message.workflowJobRunBackendId !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId); - if (message.name !== "") - writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name); - if (message.size !== "0") - writer.tag(4, runtime_1.WireType.Varint).int64(message.size); - if (message.hash) - wrappers_2.StringValue.internalBinaryWrite(message.hash, writer.tag(5, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.value !== 0) + writer.tag(1, runtime_3.WireType.Bit32).float(message.value); let u = options.writeUnknownFields; if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; - exports2.FinalizeArtifactRequest = new FinalizeArtifactRequest$Type(); - var FinalizeArtifactResponse$Type = class extends runtime_5.MessageType { + exports2.FloatValue = new FloatValue$Type(); + var Int64Value$Type = class extends runtime_7.MessageType { static { - __name(this, "FinalizeArtifactResponse$Type"); + __name(this, "Int64Value$Type"); } constructor() { - super("github.actions.results.api.v1.FinalizeArtifactResponse", [ + super("google.protobuf.Int64Value", [ { no: 1, - name: "ok", - kind: "scalar", - T: 8 - /*ScalarType.BOOL*/ - }, - { - no: 2, - name: "artifact_id", + name: "value", kind: "scalar", T: 3 /*ScalarType.INT64*/ } ]); } + /** + * Encode `Int64Value` to JSON string. + */ + internalJsonWrite(message, options) { + return this.refJsonWriter.scalar(runtime_1.ScalarType.INT64, message.value, "value", false, true); + } + /** + * Decode `Int64Value` from JSON string. + */ + internalJsonRead(json, options, target) { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.INT64, runtime_2.LongType.STRING, "value"); + return target; + } create(value) { - const message = { ok: false, artifactId: "0" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + const message = { value: "0" }; + globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); + (0, runtime_5.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { @@ -98886,13 +102088,9 @@ var require_artifact = __commonJS({ while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* bool ok */ + case /* int64 value */ 1: - message.ok = reader.bool(); - break; - case /* int64 artifact_id */ - 2: - message.artifactId = reader.int64().toString(); + message.value = reader.int64().toString(); break; default: let u = options.readUnknownField; @@ -98900,52 +102098,56 @@ var require_artifact = __commonJS({ throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { - if (message.ok !== false) - writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); - if (message.artifactId !== "0") - writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId); + if (message.value !== "0") + writer.tag(1, runtime_3.WireType.Varint).int64(message.value); let u = options.writeUnknownFields; if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; - exports2.FinalizeArtifactResponse = new FinalizeArtifactResponse$Type(); - var ListArtifactsRequest$Type = class extends runtime_5.MessageType { + exports2.Int64Value = new Int64Value$Type(); + var UInt64Value$Type = class extends runtime_7.MessageType { static { - __name(this, "ListArtifactsRequest$Type"); + __name(this, "UInt64Value$Type"); } constructor() { - super("github.actions.results.api.v1.ListArtifactsRequest", [ + super("google.protobuf.UInt64Value", [ { no: 1, - name: "workflow_run_backend_id", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 2, - name: "workflow_job_run_backend_id", + name: "value", kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { no: 3, name: "name_filter", kind: "message", T: /* @__PURE__ */ __name(() => wrappers_2.StringValue, "T") }, - { no: 4, name: "id_filter", kind: "message", T: /* @__PURE__ */ __name(() => wrappers_1.Int64Value, "T") } + T: 4 + /*ScalarType.UINT64*/ + } ]); } + /** + * Encode `UInt64Value` to JSON string. + */ + internalJsonWrite(message, options) { + return this.refJsonWriter.scalar(runtime_1.ScalarType.UINT64, message.value, "value", false, true); + } + /** + * Decode `UInt64Value` from JSON string. + */ + internalJsonRead(json, options, target) { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, runtime_1.ScalarType.UINT64, runtime_2.LongType.STRING, "value"); + return target; + } create(value) { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + const message = { value: "0" }; + globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); + (0, runtime_5.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { @@ -98953,21 +102155,9 @@ var require_artifact = __commonJS({ while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* string workflow_run_backend_id */ + case /* uint64 value */ 1: - message.workflowRunBackendId = reader.string(); - break; - case /* string workflow_job_run_backend_id */ - 2: - message.workflowJobRunBackendId = reader.string(); - break; - case /* google.protobuf.StringValue name_filter */ - 3: - message.nameFilter = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.nameFilter); - break; - case /* google.protobuf.Int64Value id_filter */ - 4: - message.idFilter = wrappers_1.Int64Value.internalBinaryRead(reader, reader.uint32(), options, message.idFilter); + message.value = reader.uint64().toString(); break; default: let u = options.readUnknownField; @@ -98975,41 +102165,56 @@ var require_artifact = __commonJS({ throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { - if (message.workflowRunBackendId !== "") - writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId); - if (message.workflowJobRunBackendId !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId); - if (message.nameFilter) - wrappers_2.StringValue.internalBinaryWrite(message.nameFilter, writer.tag(3, runtime_1.WireType.LengthDelimited).fork(), options).join(); - if (message.idFilter) - wrappers_1.Int64Value.internalBinaryWrite(message.idFilter, writer.tag(4, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.value !== "0") + writer.tag(1, runtime_3.WireType.Varint).uint64(message.value); let u = options.writeUnknownFields; if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; - exports2.ListArtifactsRequest = new ListArtifactsRequest$Type(); - var ListArtifactsResponse$Type = class extends runtime_5.MessageType { + exports2.UInt64Value = new UInt64Value$Type(); + var Int32Value$Type = class extends runtime_7.MessageType { static { - __name(this, "ListArtifactsResponse$Type"); + __name(this, "Int32Value$Type"); } constructor() { - super("github.actions.results.api.v1.ListArtifactsResponse", [ - { no: 1, name: "artifacts", kind: "message", repeat: 1, T: /* @__PURE__ */ __name(() => exports2.ListArtifactsResponse_MonolithArtifact, "T") } + super("google.protobuf.Int32Value", [ + { + no: 1, + name: "value", + kind: "scalar", + T: 5 + /*ScalarType.INT32*/ + } ]); } + /** + * Encode `Int32Value` to JSON string. + */ + internalJsonWrite(message, options) { + return this.refJsonWriter.scalar(5, message.value, "value", false, true); + } + /** + * Decode `Int32Value` from JSON string. + */ + internalJsonRead(json, options, target) { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 5, void 0, "value"); + return target; + } create(value) { - const message = { artifacts: [] }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + const message = { value: 0 }; + globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); + (0, runtime_5.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { @@ -99017,9 +102222,9 @@ var require_artifact = __commonJS({ while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts */ + case /* int32 value */ 1: - message.artifacts.push(exports2.ListArtifactsResponse_MonolithArtifact.internalBinaryRead(reader, reader.uint32(), options)); + message.value = reader.int32(); break; default: let u = options.readUnknownField; @@ -99027,70 +102232,257 @@ var require_artifact = __commonJS({ throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { - for (let i = 0; i < message.artifacts.length; i++) - exports2.ListArtifactsResponse_MonolithArtifact.internalBinaryWrite(message.artifacts[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.value !== 0) + writer.tag(1, runtime_3.WireType.Varint).int32(message.value); let u = options.writeUnknownFields; if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; - exports2.ListArtifactsResponse = new ListArtifactsResponse$Type(); - var ListArtifactsResponse_MonolithArtifact$Type = class extends runtime_5.MessageType { + exports2.Int32Value = new Int32Value$Type(); + var UInt32Value$Type = class extends runtime_7.MessageType { static { - __name(this, "ListArtifactsResponse_MonolithArtifact$Type"); + __name(this, "UInt32Value$Type"); } constructor() { - super("github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact", [ + super("google.protobuf.UInt32Value", [ { no: 1, - name: "workflow_run_backend_id", - kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, - { - no: 2, - name: "workflow_job_run_backend_id", + name: "value", kind: "scalar", - T: 9 - /*ScalarType.STRING*/ - }, + T: 13 + /*ScalarType.UINT32*/ + } + ]); + } + /** + * Encode `UInt32Value` to JSON string. + */ + internalJsonWrite(message, options) { + return this.refJsonWriter.scalar(13, message.value, "value", false, true); + } + /** + * Decode `UInt32Value` from JSON string. + */ + internalJsonRead(json, options, target) { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 13, void 0, "value"); + return target; + } + create(value) { + const message = { value: 0 }; + globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_5.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* uint32 value */ + 1: + message.value = reader.uint32(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.value !== 0) + writer.tag(1, runtime_3.WireType.Varint).uint32(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.UInt32Value = new UInt32Value$Type(); + var BoolValue$Type = class extends runtime_7.MessageType { + static { + __name(this, "BoolValue$Type"); + } + constructor() { + super("google.protobuf.BoolValue", [ { - no: 3, - name: "database_id", + no: 1, + name: "value", kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, + T: 8 + /*ScalarType.BOOL*/ + } + ]); + } + /** + * Encode `BoolValue` to JSON bool. + */ + internalJsonWrite(message, options) { + return message.value; + } + /** + * Decode `BoolValue` from JSON bool. + */ + internalJsonRead(json, options, target) { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 8, void 0, "value"); + return target; + } + create(value) { + const message = { value: false }; + globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_5.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool value */ + 1: + message.value = reader.bool(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.value !== false) + writer.tag(1, runtime_3.WireType.Varint).bool(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.BoolValue = new BoolValue$Type(); + var StringValue$Type = class extends runtime_7.MessageType { + static { + __name(this, "StringValue$Type"); + } + constructor() { + super("google.protobuf.StringValue", [ { - no: 4, - name: "name", + no: 1, + name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ - }, + } + ]); + } + /** + * Encode `StringValue` to JSON string. + */ + internalJsonWrite(message, options) { + return message.value; + } + /** + * Decode `StringValue` from JSON string. + */ + internalJsonRead(json, options, target) { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 9, void 0, "value"); + return target; + } + create(value) { + const message = { value: "" }; + globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_5.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string value */ + 1: + message.value = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.value !== "") + writer.tag(1, runtime_3.WireType.LengthDelimited).string(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } + }; + exports2.StringValue = new StringValue$Type(); + var BytesValue$Type = class extends runtime_7.MessageType { + static { + __name(this, "BytesValue$Type"); + } + constructor() { + super("google.protobuf.BytesValue", [ { - no: 5, - name: "size", + no: 1, + name: "value", kind: "scalar", - T: 3 - /*ScalarType.INT64*/ - }, - { no: 6, name: "created_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") } + T: 12 + /*ScalarType.BYTES*/ + } ]); } + /** + * Encode `BytesValue` to JSON string. + */ + internalJsonWrite(message, options) { + return this.refJsonWriter.scalar(12, message.value, "value", false, true); + } + /** + * Decode `BytesValue` from JSON string. + */ + internalJsonRead(json, options, target) { + if (!target) + target = this.create(); + target.value = this.refJsonReader.scalar(json, 12, void 0, "value"); + return target; + } create(value) { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", databaseId: "0", name: "", size: "0" }; - globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + const message = { value: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, runtime_6.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) - (0, runtime_3.reflectionMergePartial)(this, message, value); + (0, runtime_5.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { @@ -99098,29 +102490,9 @@ var require_artifact = __commonJS({ while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* string workflow_run_backend_id */ + case /* bytes value */ 1: - message.workflowRunBackendId = reader.string(); - break; - case /* string workflow_job_run_backend_id */ - 2: - message.workflowJobRunBackendId = reader.string(); - break; - case /* int64 database_id */ - 3: - message.databaseId = reader.int64().toString(); - break; - case /* string name */ - 4: - message.name = reader.string(); - break; - case /* int64 size */ - 5: - message.size = reader.int64().toString(); - break; - case /* google.protobuf.Timestamp created_at */ - 6: - message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); + message.value = reader.bytes(); break; default: let u = options.readUnknownField; @@ -99128,37 +102500,45 @@ var require_artifact = __commonJS({ throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) - (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + (u === true ? runtime_4.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { - if (message.workflowRunBackendId !== "") - writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId); - if (message.workflowJobRunBackendId !== "") - writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId); - if (message.databaseId !== "0") - writer.tag(3, runtime_1.WireType.Varint).int64(message.databaseId); - if (message.name !== "") - writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.name); - if (message.size !== "0") - writer.tag(5, runtime_1.WireType.Varint).int64(message.size); - if (message.createdAt) - timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.value.length) + writer.tag(1, runtime_3.WireType.LengthDelimited).bytes(message.value); let u = options.writeUnknownFields; if (u !== false) - (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + (u == true ? runtime_4.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; - exports2.ListArtifactsResponse_MonolithArtifact = new ListArtifactsResponse_MonolithArtifact$Type(); - var GetSignedArtifactURLRequest$Type = class extends runtime_5.MessageType { + exports2.BytesValue = new BytesValue$Type(); + } +}); + +// ../node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js +var require_artifact = __commonJS({ + "../node_modules/@actions/artifact/lib/generated/results/api/v1/artifact.js"(exports2) { + "use strict"; + Object.defineProperty(exports2, "__esModule", { value: true }); + exports2.ArtifactService = exports2.DeleteArtifactResponse = exports2.DeleteArtifactRequest = exports2.GetSignedArtifactURLResponse = exports2.GetSignedArtifactURLRequest = exports2.ListArtifactsResponse_MonolithArtifact = exports2.ListArtifactsResponse = exports2.ListArtifactsRequest = exports2.FinalizeArtifactResponse = exports2.FinalizeArtifactRequest = exports2.CreateArtifactResponse = exports2.CreateArtifactRequest = void 0; + var runtime_rpc_1 = require_commonjs8(); + var runtime_1 = require_commonjs7(); + var runtime_2 = require_commonjs7(); + var runtime_3 = require_commonjs7(); + var runtime_4 = require_commonjs7(); + var runtime_5 = require_commonjs7(); + var wrappers_1 = require_wrappers(); + var wrappers_2 = require_wrappers(); + var timestamp_1 = require_timestamp2(); + var CreateArtifactRequest$Type = class extends runtime_5.MessageType { static { - __name(this, "GetSignedArtifactURLRequest$Type"); + __name(this, "CreateArtifactRequest$Type"); } constructor() { - super("github.actions.results.api.v1.GetSignedArtifactURLRequest", [ + super("github.actions.results.api.v1.CreateArtifactRequest", [ { no: 1, name: "workflow_run_backend_id", @@ -99179,11 +102559,19 @@ var require_artifact = __commonJS({ kind: "scalar", T: 9 /*ScalarType.STRING*/ + }, + { no: 4, name: "expires_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") }, + { + no: 5, + name: "version", + kind: "scalar", + T: 5 + /*ScalarType.INT32*/ } ]); } create(value) { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" }; + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "", version: 0 }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) (0, runtime_3.reflectionMergePartial)(this, message, value); @@ -99206,6 +102594,14 @@ var require_artifact = __commonJS({ 3: message.name = reader.string(); break; + case /* google.protobuf.Timestamp expires_at */ + 4: + message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); + break; + case /* int32 version */ + 5: + message.version = reader.int32(); + break; default: let u = options.readUnknownField; if (u === "throw") @@ -99224,22 +102620,33 @@ var require_artifact = __commonJS({ writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId); if (message.name !== "") writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name); + if (message.expiresAt) + timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(4, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.version !== 0) + writer.tag(5, runtime_1.WireType.Varint).int32(message.version); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; - exports2.GetSignedArtifactURLRequest = new GetSignedArtifactURLRequest$Type(); - var GetSignedArtifactURLResponse$Type = class extends runtime_5.MessageType { + exports2.CreateArtifactRequest = new CreateArtifactRequest$Type(); + var CreateArtifactResponse$Type = class extends runtime_5.MessageType { static { - __name(this, "GetSignedArtifactURLResponse$Type"); + __name(this, "CreateArtifactResponse$Type"); } constructor() { - super("github.actions.results.api.v1.GetSignedArtifactURLResponse", [ + super("github.actions.results.api.v1.CreateArtifactResponse", [ { no: 1, - name: "signed_url", + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ @@ -99247,7 +102654,7 @@ var require_artifact = __commonJS({ ]); } create(value) { - const message = { signedUrl: "" }; + const message = { ok: false, signedUploadUrl: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) (0, runtime_3.reflectionMergePartial)(this, message, value); @@ -99258,9 +102665,13 @@ var require_artifact = __commonJS({ while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { - case /* string signed_url */ + case /* bool ok */ 1: - message.signedUrl = reader.string(); + message.ok = reader.bool(); + break; + case /* string signed_upload_url */ + 2: + message.signedUploadUrl = reader.string(); break; default: let u = options.readUnknownField; @@ -99274,21 +102685,23 @@ var require_artifact = __commonJS({ return message; } internalBinaryWrite(message, writer, options) { - if (message.signedUrl !== "") - writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.signedUrl); + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.signedUploadUrl !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; - exports2.GetSignedArtifactURLResponse = new GetSignedArtifactURLResponse$Type(); - var DeleteArtifactRequest$Type = class extends runtime_5.MessageType { + exports2.CreateArtifactResponse = new CreateArtifactResponse$Type(); + var FinalizeArtifactRequest$Type = class extends runtime_5.MessageType { static { - __name(this, "DeleteArtifactRequest$Type"); + __name(this, "FinalizeArtifactRequest$Type"); } constructor() { - super("github.actions.results.api.v1.DeleteArtifactRequest", [ + super("github.actions.results.api.v1.FinalizeArtifactRequest", [ { no: 1, name: "workflow_run_backend_id", @@ -99309,11 +102722,19 @@ var require_artifact = __commonJS({ kind: "scalar", T: 9 /*ScalarType.STRING*/ - } + }, + { + no: 4, + name: "size", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { no: 5, name: "hash", kind: "message", T: /* @__PURE__ */ __name(() => wrappers_2.StringValue, "T") } ]); } create(value) { - const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" }; + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "", size: "0" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== void 0) (0, runtime_3.reflectionMergePartial)(this, message, value); @@ -99336,6 +102757,14 @@ var require_artifact = __commonJS({ 3: message.name = reader.string(); break; + case /* int64 size */ + 4: + message.size = reader.int64().toString(); + break; + case /* google.protobuf.StringValue hash */ + 5: + message.hash = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.hash); + break; default: let u = options.readUnknownField; if (u === "throw") @@ -99354,19 +102783,23 @@ var require_artifact = __commonJS({ writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId); if (message.name !== "") writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name); + if (message.size !== "0") + writer.tag(4, runtime_1.WireType.Varint).int64(message.size); + if (message.hash) + wrappers_2.StringValue.internalBinaryWrite(message.hash, writer.tag(5, runtime_1.WireType.LengthDelimited).fork(), options).join(); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } }; - exports2.DeleteArtifactRequest = new DeleteArtifactRequest$Type(); - var DeleteArtifactResponse$Type = class extends runtime_5.MessageType { + exports2.FinalizeArtifactRequest = new FinalizeArtifactRequest$Type(); + var FinalizeArtifactResponse$Type = class extends runtime_5.MessageType { static { - __name(this, "DeleteArtifactResponse$Type"); + __name(this, "FinalizeArtifactResponse$Type"); } constructor() { - super("github.actions.results.api.v1.DeleteArtifactResponse", [ + super("github.actions.results.api.v1.FinalizeArtifactResponse", [ { no: 1, name: "ok", @@ -99425,1538 +102858,523 @@ var require_artifact = __commonJS({ return writer; } }; - exports2.DeleteArtifactResponse = new DeleteArtifactResponse$Type(); - exports2.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.ArtifactService", [ - { name: "CreateArtifact", options: {}, I: exports2.CreateArtifactRequest, O: exports2.CreateArtifactResponse }, - { name: "FinalizeArtifact", options: {}, I: exports2.FinalizeArtifactRequest, O: exports2.FinalizeArtifactResponse }, - { name: "ListArtifacts", options: {}, I: exports2.ListArtifactsRequest, O: exports2.ListArtifactsResponse }, - { name: "GetSignedArtifactURL", options: {}, I: exports2.GetSignedArtifactURLRequest, O: exports2.GetSignedArtifactURLResponse }, - { name: "DeleteArtifact", options: {}, I: exports2.DeleteArtifactRequest, O: exports2.DeleteArtifactResponse } - ]); - } -}); - -// ../node_modules/twirp-ts/build/twirp/context.js -var require_context2 = __commonJS({ - "../node_modules/twirp-ts/build/twirp/context.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - } -}); - -// ../node_modules/twirp-ts/build/twirp/hooks.js -var require_hooks = __commonJS({ - "../node_modules/twirp-ts/build/twirp/hooks.js"(exports2) { - "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isHook = exports2.chainHooks = void 0; - function chainHooks(...hooks) { - if (hooks.length === 0) { - return null; - } - if (hooks.length === 1) { - return hooks[0]; - } - const serverHook = { - requestReceived(ctx) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.requestReceived) { - continue; - } - yield hook.requestReceived(ctx); - } - }); - }, - requestPrepared(ctx) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.requestPrepared) { - continue; - } - console.warn("hook requestPrepared is deprecated and will be removed in the next release. Please use responsePrepared instead."); - yield hook.requestPrepared(ctx); - } - }); - }, - responsePrepared(ctx) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.responsePrepared) { - continue; - } - yield hook.responsePrepared(ctx); - } - }); - }, - requestSent(ctx) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.requestSent) { - continue; - } - console.warn("hook requestSent is deprecated and will be removed in the next release. Please use responseSent instead."); - yield hook.requestSent(ctx); - } - }); - }, - responseSent(ctx) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.responseSent) { - continue; - } - yield hook.responseSent(ctx); - } - }); - }, - requestRouted(ctx) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.requestRouted) { - continue; - } - yield hook.requestRouted(ctx); - } - }); - }, - error(ctx, err) { - return __awaiter3(this, void 0, void 0, function* () { - for (const hook of hooks) { - if (!hook.error) { - continue; - } - yield hook.error(ctx, err); - } - }); - } - }; - return serverHook; - } - __name(chainHooks, "chainHooks"); - exports2.chainHooks = chainHooks; - function isHook(object) { - return "requestReceived" in object || "requestPrepared" in object || "requestSent" in object || "requestRouted" in object || "responsePrepared" in object || "responseSent" in object || "error" in object; - } - __name(isHook, "isHook"); - exports2.isHook = isHook; - } -}); - -// ../node_modules/twirp-ts/build/twirp/errors.js -var require_errors2 = __commonJS({ - "../node_modules/twirp-ts/build/twirp/errors.js"(exports2) { - "use strict"; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.isValidErrorCode = exports2.httpStatusFromErrorCode = exports2.TwirpErrorCode = exports2.BadRouteError = exports2.InternalServerErrorWith = exports2.InternalServerError = exports2.RequiredArgumentError = exports2.InvalidArgumentError = exports2.NotFoundError = exports2.TwirpError = void 0; - var TwirpError = class _TwirpError extends Error { - static { - __name(this, "TwirpError"); - } - constructor(code, msg) { - super(msg); - this.code = TwirpErrorCode.Internal; - this.meta = {}; - this.code = code; - this.msg = msg; - Object.setPrototypeOf(this, _TwirpError.prototype); - } - /** - * Adds a metadata kv to the error - * @param key - * @param value - */ - withMeta(key, value) { - this.meta[key] = value; - return this; - } - /** - * Returns a single metadata value - * return "" if not found - * @param key - */ - getMeta(key) { - return this.meta[key] || ""; - } - /** - * Add the original error cause - * @param err - * @param addMeta - */ - withCause(err, addMeta = false) { - this._originalCause = err; - if (addMeta) { - this.withMeta("cause", err.message); - } - return this; - } - cause() { - return this._originalCause; - } - /** - * Returns the error representation to JSON - */ - toJSON() { - try { - return JSON.stringify({ - code: this.code, - msg: this.msg, - meta: this.meta - }); - } catch (e) { - return `{"code": "internal", "msg": "There was an error but it could not be serialized into JSON"}`; - } - } - /** - * Create a twirp error from an object - * @param obj - */ - static fromObject(obj) { - const code = obj["code"] || TwirpErrorCode.Unknown; - const msg = obj["msg"] || "unknown"; - const error = new _TwirpError(code, msg); - if (obj["meta"]) { - Object.keys(obj["meta"]).forEach((key) => { - error.withMeta(key, obj["meta"][key]); - }); - } - return error; - } - }; - exports2.TwirpError = TwirpError; - var NotFoundError = class extends TwirpError { - static { - __name(this, "NotFoundError"); - } - constructor(msg) { - super(TwirpErrorCode.NotFound, msg); - } - }; - exports2.NotFoundError = NotFoundError; - var InvalidArgumentError = class extends TwirpError { - static { - __name(this, "InvalidArgumentError"); - } - constructor(argument, validationMsg) { - super(TwirpErrorCode.InvalidArgument, argument + " " + validationMsg); - this.withMeta("argument", argument); - } - }; - exports2.InvalidArgumentError = InvalidArgumentError; - var RequiredArgumentError = class extends InvalidArgumentError { - static { - __name(this, "RequiredArgumentError"); - } - constructor(argument) { - super(argument, "is required"); - } - }; - exports2.RequiredArgumentError = RequiredArgumentError; - var InternalServerError = class extends TwirpError { - static { - __name(this, "InternalServerError"); - } - constructor(msg) { - super(TwirpErrorCode.Internal, msg); - } - }; - exports2.InternalServerError = InternalServerError; - var InternalServerErrorWith = class extends InternalServerError { - static { - __name(this, "InternalServerErrorWith"); - } - constructor(err) { - super(err.message); - this.withMeta("cause", err.name); - this.withCause(err); - } - }; - exports2.InternalServerErrorWith = InternalServerErrorWith; - var BadRouteError = class extends TwirpError { - static { - __name(this, "BadRouteError"); - } - constructor(msg, method, url) { - super(TwirpErrorCode.BadRoute, msg); - this.withMeta("twirp_invalid_route", method + " " + url); - } - }; - exports2.BadRouteError = BadRouteError; - var TwirpErrorCode; - (function(TwirpErrorCode2) { - TwirpErrorCode2["Canceled"] = "canceled"; - TwirpErrorCode2["Unknown"] = "unknown"; - TwirpErrorCode2["InvalidArgument"] = "invalid_argument"; - TwirpErrorCode2["Malformed"] = "malformed"; - TwirpErrorCode2["DeadlineExceeded"] = "deadline_exceeded"; - TwirpErrorCode2["NotFound"] = "not_found"; - TwirpErrorCode2["BadRoute"] = "bad_route"; - TwirpErrorCode2["AlreadyExists"] = "already_exists"; - TwirpErrorCode2["PermissionDenied"] = "permission_denied"; - TwirpErrorCode2["Unauthenticated"] = "unauthenticated"; - TwirpErrorCode2["ResourceExhausted"] = "resource_exhausted"; - TwirpErrorCode2["FailedPrecondition"] = "failed_precondition"; - TwirpErrorCode2["Aborted"] = "aborted"; - TwirpErrorCode2["OutOfRange"] = "out_of_range"; - TwirpErrorCode2["Unimplemented"] = "unimplemented"; - TwirpErrorCode2["Internal"] = "internal"; - TwirpErrorCode2["Unavailable"] = "unavailable"; - TwirpErrorCode2["DataLoss"] = "data_loss"; - })(TwirpErrorCode = exports2.TwirpErrorCode || (exports2.TwirpErrorCode = {})); - function httpStatusFromErrorCode(code) { - switch (code) { - case TwirpErrorCode.Canceled: - return 408; - // RequestTimeout - case TwirpErrorCode.Unknown: - return 500; - // Internal Server Error - case TwirpErrorCode.InvalidArgument: - return 400; - // BadRequest - case TwirpErrorCode.Malformed: - return 400; - // BadRequest - case TwirpErrorCode.DeadlineExceeded: - return 408; - // RequestTimeout - case TwirpErrorCode.NotFound: - return 404; - // Not Found - case TwirpErrorCode.BadRoute: - return 404; - // Not Found - case TwirpErrorCode.AlreadyExists: - return 409; - // Conflict - case TwirpErrorCode.PermissionDenied: - return 403; - // Forbidden - case TwirpErrorCode.Unauthenticated: - return 401; - // Unauthorized - case TwirpErrorCode.ResourceExhausted: - return 429; - // Too Many Requests - case TwirpErrorCode.FailedPrecondition: - return 412; - // Precondition Failed - case TwirpErrorCode.Aborted: - return 409; - // Conflict - case TwirpErrorCode.OutOfRange: - return 400; - // Bad Request - case TwirpErrorCode.Unimplemented: - return 501; - // Not Implemented - case TwirpErrorCode.Internal: - return 500; - // Internal Server Error - case TwirpErrorCode.Unavailable: - return 503; - // Service Unavailable - case TwirpErrorCode.DataLoss: - return 500; - // Internal Server Error - default: - return 0; - } - } - __name(httpStatusFromErrorCode, "httpStatusFromErrorCode"); - exports2.httpStatusFromErrorCode = httpStatusFromErrorCode; - function isValidErrorCode(code) { - return httpStatusFromErrorCode(code) != 0; - } - __name(isValidErrorCode, "isValidErrorCode"); - exports2.isValidErrorCode = isValidErrorCode; - } -}); - -// ../node_modules/twirp-ts/build/twirp/request.js -var require_request3 = __commonJS({ - "../node_modules/twirp-ts/build/twirp/request.js"(exports2) { - "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.parseTwirpPath = exports2.getRequestData = exports2.validateRequest = exports2.getContentType = exports2.TwirpContentType = void 0; - var errors_1 = require_errors2(); - var TwirpContentType; - (function(TwirpContentType2) { - TwirpContentType2[TwirpContentType2["Protobuf"] = 0] = "Protobuf"; - TwirpContentType2[TwirpContentType2["JSON"] = 1] = "JSON"; - TwirpContentType2[TwirpContentType2["Unknown"] = 2] = "Unknown"; - })(TwirpContentType = exports2.TwirpContentType || (exports2.TwirpContentType = {})); - function getContentType(mimeType) { - switch (mimeType) { - case "application/protobuf": - return TwirpContentType.Protobuf; - case "application/json": - return TwirpContentType.JSON; - default: - return TwirpContentType.Unknown; - } - } - __name(getContentType, "getContentType"); - exports2.getContentType = getContentType; - function validateRequest(ctx, request, pathPrefix) { - if (request.method !== "POST") { - const msg = `unsupported method ${request.method} (only POST is allowed)`; - throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); - } - const path2 = parseTwirpPath(request.url || ""); - if (path2.pkgService !== (ctx.packageName ? ctx.packageName + "." : "") + ctx.serviceName) { - const msg = `no handler for path ${request.url}`; - throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); - } - if (path2.prefix !== pathPrefix) { - const msg = `invalid path prefix ${path2.prefix}, expected ${pathPrefix}, on path ${request.url}`; - throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); - } - const mimeContentType = request.headers["content-type"] || ""; - if (ctx.contentType === TwirpContentType.Unknown) { - const msg = `unexpected Content-Type: ${request.headers["content-type"]}`; - throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); - } - return Object.assign(Object.assign({}, path2), { mimeContentType, contentType: ctx.contentType }); - } - __name(validateRequest, "validateRequest"); - exports2.validateRequest = validateRequest; - function getRequestData(req) { - return new Promise((resolve, reject) => { - const reqWithRawBody = req; - if (reqWithRawBody.rawBody instanceof Buffer) { - resolve(reqWithRawBody.rawBody); - return; - } - const chunks = []; - req.on("data", (chunk) => chunks.push(chunk)); - req.on("end", () => __awaiter3(this, void 0, void 0, function* () { - const data = Buffer.concat(chunks); - resolve(data); - })); - req.on("error", (err) => { - if (req.aborted) { - reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.DeadlineExceeded, "failed to read request: deadline exceeded")); - } else { - reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.Malformed, err.message).withCause(err)); - } - }); - req.on("close", () => { - reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.Canceled, "failed to read request: context canceled")); - }); - }); - } - __name(getRequestData, "getRequestData"); - exports2.getRequestData = getRequestData; - function parseTwirpPath(path2) { - const parts = path2.split("/"); - if (parts.length < 2) { - return { - pkgService: "", - method: "", - prefix: "" - }; - } - return { - method: parts[parts.length - 1], - pkgService: parts[parts.length - 2], - prefix: parts.slice(0, parts.length - 2).join("/") - }; - } - __name(parseTwirpPath, "parseTwirpPath"); - exports2.parseTwirpPath = parseTwirpPath; - } -}); - -// ../node_modules/twirp-ts/build/twirp/server.js -var require_server = __commonJS({ - "../node_modules/twirp-ts/build/twirp/server.js"(exports2) { - "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.writeError = exports2.TwirpServer = void 0; - var hooks_1 = require_hooks(); - var request_1 = require_request3(); - var errors_1 = require_errors2(); - var TwirpServer = class { + exports2.FinalizeArtifactResponse = new FinalizeArtifactResponse$Type(); + var ListArtifactsRequest$Type = class extends runtime_5.MessageType { static { - __name(this, "TwirpServer"); - } - constructor(options) { - this.pathPrefix = "/twirp"; - this.hooks = []; - this.interceptors = []; - this.packageName = options.packageName; - this.serviceName = options.serviceName; - this.methodList = options.methodList; - this.matchRoute = options.matchRoute; - this.service = options.service; - } - /** - * Returns the prefix for this server - */ - get prefix() { - return this.pathPrefix; - } - /** - * The http handler for twirp complaint endpoints - * @param options - */ - httpHandler(options) { - return (req, resp) => { - if ((options === null || options === void 0 ? void 0 : options.prefix) !== void 0) { - this.withPrefix(options.prefix); - } - return this._httpHandler(req, resp); - }; - } - /** - * Adds interceptors or hooks to the request stack - * @param middlewares - */ - use(...middlewares) { - middlewares.forEach((middleware) => { - if (hooks_1.isHook(middleware)) { - this.hooks.push(middleware); - return this; - } - this.interceptors.push(middleware); - }); - return this; - } - /** - * Adds a prefix to the service url path - * @param prefix - */ - withPrefix(prefix) { - if (prefix === false) { - this.pathPrefix = ""; - } else { - this.pathPrefix = prefix; - } - return this; - } - /** - * Returns the regex matching path for this twirp server - */ - matchingPath() { - const baseRegex = this.baseURI().replace(/\./g, "\\."); - return new RegExp(`${baseRegex}/(${this.methodList.join("|")})`); - } - /** - * Returns the base URI for this twirp server - */ - baseURI() { - return `${this.pathPrefix}/${this.packageName ? this.packageName + "." : ""}${this.serviceName}`; - } - /** - * Create a twirp context - * @param req - * @param res - * @private - */ - createContext(req, res) { - return { - packageName: this.packageName, - serviceName: this.serviceName, - methodName: "", - contentType: request_1.getContentType(req.headers["content-type"]), - req, - res - }; - } - /** - * Twrip server http handler implementation - * @param req - * @param resp - * @private - */ - _httpHandler(req, resp) { - return __awaiter3(this, void 0, void 0, function* () { - const ctx = this.createContext(req, resp); - try { - yield this.invokeHook("requestReceived", ctx); - const { method, mimeContentType } = request_1.validateRequest(ctx, req, this.pathPrefix || ""); - const handler = this.matchRoute(method, { - onMatch: /* @__PURE__ */ __name((ctx2) => { - return this.invokeHook("requestRouted", ctx2); - }, "onMatch"), - onNotFound: /* @__PURE__ */ __name(() => { - const msg = `no handler for path ${req.url}`; - throw new errors_1.BadRouteError(msg, req.method || "", req.url || ""); - }, "onNotFound") - }); - const body = yield request_1.getRequestData(req); - const response = yield handler(ctx, this.service, body, this.interceptors); - yield Promise.all([ - this.invokeHook("responsePrepared", ctx), - // keep backwards compatibility till next release - this.invokeHook("requestPrepared", ctx) - ]); - resp.statusCode = 200; - resp.setHeader("Content-Type", mimeContentType); - resp.end(response); - } catch (e) { - yield this.invokeHook("error", ctx, mustBeTwirpError(e)); - if (!resp.headersSent) { - writeError(resp, e); - } - } finally { - yield Promise.all([ - this.invokeHook("responseSent", ctx), - // keep backwards compatibility till next release - this.invokeHook("requestSent", ctx) - ]); - } - }); - } - /** - * Invoke a hook - * @param hookName - * @param ctx - * @param err - * @protected - */ - invokeHook(hookName, ctx, err) { - return __awaiter3(this, void 0, void 0, function* () { - if (this.hooks.length === 0) { - return; - } - const chainedHooks = hooks_1.chainHooks(...this.hooks); - const hook = chainedHooks === null || chainedHooks === void 0 ? void 0 : chainedHooks[hookName]; - if (hook) { - yield hook(ctx, err || new errors_1.InternalServerError("internal server error")); - } - }); - } - }; - exports2.TwirpServer = TwirpServer; - function writeError(res, error) { - const twirpError = mustBeTwirpError(error); - res.setHeader("Content-Type", "application/json"); - res.statusCode = errors_1.httpStatusFromErrorCode(twirpError.code); - res.end(twirpError.toJSON()); - } - __name(writeError, "writeError"); - exports2.writeError = writeError; - function mustBeTwirpError(err) { - if (err instanceof errors_1.TwirpError) { - return err; - } - return new errors_1.InternalServerErrorWith(err); - } - __name(mustBeTwirpError, "mustBeTwirpError"); - } -}); - -// ../node_modules/twirp-ts/build/twirp/interceptors.js -var require_interceptors = __commonJS({ - "../node_modules/twirp-ts/build/twirp/interceptors.js"(exports2) { - "use strict"; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.chainInterceptors = void 0; - function chainInterceptors(...interceptors) { - if (interceptors.length === 0) { - return; - } - if (interceptors.length === 1) { - return interceptors[0]; - } - const first = interceptors[0]; - return (ctx, request, handler) => __awaiter3(this, void 0, void 0, function* () { - let next = handler; - for (let i = interceptors.length - 1; i > 0; i--) { - next = /* @__PURE__ */ ((next2) => (ctx2, typedRequest) => { - return interceptors[i](ctx2, typedRequest, next2); - })(next); - } - return first(ctx, request, next); - }); - } - __name(chainInterceptors, "chainInterceptors"); - exports2.chainInterceptors = chainInterceptors; - } -}); - -// ../node_modules/dot-object/index.js -var require_dot_object = __commonJS({ - "../node_modules/dot-object/index.js"(exports2, module2) { - "use strict"; - function _process(v, mod) { - var i; - var r; - if (typeof mod === "function") { - r = mod(v); - if (r !== void 0) { - v = r; - } - } else if (Array.isArray(mod)) { - for (i = 0; i < mod.length; i++) { - r = mod[i](v); - if (r !== void 0) { - v = r; - } - } - } - return v; - } - __name(_process, "_process"); - function parseKey(key, val) { - if (key[0] === "-" && Array.isArray(val) && /^-\d+$/.test(key)) { - return val.length + parseInt(key, 10); - } - return key; - } - __name(parseKey, "parseKey"); - function isIndex(k) { - return /^\d+$/.test(k); - } - __name(isIndex, "isIndex"); - function isObject(val) { - return Object.prototype.toString.call(val) === "[object Object]"; - } - __name(isObject, "isObject"); - function isArrayOrObject(val) { - return Object(val) === val; - } - __name(isArrayOrObject, "isArrayOrObject"); - function isEmptyObject(val) { - return Object.keys(val).length === 0; - } - __name(isEmptyObject, "isEmptyObject"); - var blacklist = ["__proto__", "prototype", "constructor"]; - var blacklistFilter = /* @__PURE__ */ __name(function(part) { - return blacklist.indexOf(part) === -1; - }, "blacklistFilter"); - function parsePath(path2, sep) { - if (path2.indexOf("[") >= 0) { - path2 = path2.replace(/\[/g, sep).replace(/]/g, ""); - } - var parts = path2.split(sep); - var check = parts.filter(blacklistFilter); - if (check.length !== parts.length) { - throw Error("Refusing to update blacklisted property " + path2); - } - return parts; - } - __name(parsePath, "parsePath"); - var hasOwnProperty = Object.prototype.hasOwnProperty; - function DotObject(separator, override, useArray, useBrackets) { - if (!(this instanceof DotObject)) { - return new DotObject(separator, override, useArray, useBrackets); - } - if (typeof override === "undefined") override = false; - if (typeof useArray === "undefined") useArray = true; - if (typeof useBrackets === "undefined") useBrackets = true; - this.separator = separator || "."; - this.override = override; - this.useArray = useArray; - this.useBrackets = useBrackets; - this.keepArray = false; - this.cleanup = []; - } - __name(DotObject, "DotObject"); - var dotDefault = new DotObject(".", false, true, true); - function wrap(method) { - return function() { - return dotDefault[method].apply(dotDefault, arguments); - }; - } - __name(wrap, "wrap"); - DotObject.prototype._fill = function(a, obj, v, mod) { - var k = a.shift(); - if (a.length > 0) { - obj[k] = obj[k] || (this.useArray && isIndex(a[0]) ? [] : {}); - if (!isArrayOrObject(obj[k])) { - if (this.override) { - obj[k] = {}; - } else { - if (!(isArrayOrObject(v) && isEmptyObject(v))) { - throw new Error( - "Trying to redefine `" + k + "` which is a " + typeof obj[k] - ); - } - return; - } - } - this._fill(a, obj[k], v, mod); - } else { - if (!this.override && isArrayOrObject(obj[k]) && !isEmptyObject(obj[k])) { - if (!(isArrayOrObject(v) && isEmptyObject(v))) { - throw new Error("Trying to redefine non-empty obj['" + k + "']"); - } - return; - } - obj[k] = _process(v, mod); - } - }; - DotObject.prototype.object = function(obj, mods) { - var self2 = this; - Object.keys(obj).forEach(function(k) { - var mod = mods === void 0 ? null : mods[k]; - var ok = parsePath(k, self2.separator).join(self2.separator); - if (ok.indexOf(self2.separator) !== -1) { - self2._fill(ok.split(self2.separator), obj, obj[k], mod); - delete obj[k]; - } else { - obj[k] = _process(obj[k], mod); - } - }); - return obj; - }; - DotObject.prototype.str = function(path2, v, obj, mod) { - var ok = parsePath(path2, this.separator).join(this.separator); - if (path2.indexOf(this.separator) !== -1) { - this._fill(ok.split(this.separator), obj, v, mod); - } else { - obj[path2] = _process(v, mod); - } - return obj; - }; - DotObject.prototype.pick = function(path2, obj, remove, reindexArray) { - var i; - var keys; - var val; - var key; - var cp; - keys = parsePath(path2, this.separator); - for (i = 0; i < keys.length; i++) { - key = parseKey(keys[i], obj); - if (obj && typeof obj === "object" && key in obj) { - if (i === keys.length - 1) { - if (remove) { - val = obj[key]; - if (reindexArray && Array.isArray(obj)) { - obj.splice(key, 1); - } else { - delete obj[key]; - } - if (Array.isArray(obj)) { - cp = keys.slice(0, -1).join("."); - if (this.cleanup.indexOf(cp) === -1) { - this.cleanup.push(cp); - } - } - return val; - } else { - return obj[key]; - } - } else { - obj = obj[key]; - } - } else { - return void 0; - } + __name(this, "ListArtifactsRequest$Type"); } - if (remove && Array.isArray(obj)) { - obj = obj.filter(function(n) { - return n !== void 0; - }); + constructor() { + super("github.actions.results.api.v1.ListArtifactsRequest", [ + { + no: 1, + name: "workflow_run_backend_id", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 2, + name: "workflow_job_run_backend_id", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { no: 3, name: "name_filter", kind: "message", T: /* @__PURE__ */ __name(() => wrappers_2.StringValue, "T") }, + { no: 4, name: "id_filter", kind: "message", T: /* @__PURE__ */ __name(() => wrappers_1.Int64Value, "T") } + ]); } - return obj; - }; - DotObject.prototype.delete = function(path2, obj) { - return this.remove(path2, obj, true); - }; - DotObject.prototype.remove = function(path2, obj, reindexArray) { - var i; - this.cleanup = []; - if (Array.isArray(path2)) { - for (i = 0; i < path2.length; i++) { - this.pick(path2[i], obj, true, reindexArray); - } - if (!reindexArray) { - this._cleanup(obj); - } - return obj; - } else { - return this.pick(path2, obj, true, reindexArray); + create(value) { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - }; - DotObject.prototype._cleanup = function(obj) { - var ret; - var i; - var keys; - var root; - if (this.cleanup.length) { - for (i = 0; i < this.cleanup.length; i++) { - keys = this.cleanup[i].split("."); - root = keys.splice(0, -1).join("."); - ret = root ? this.pick(root, obj) : obj; - ret = ret[keys[0]].filter(function(v) { - return v !== void 0; - }); - this.set(this.cleanup[i], ret, obj); + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ + 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ + 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* google.protobuf.StringValue name_filter */ + 3: + message.nameFilter = wrappers_2.StringValue.internalBinaryRead(reader, reader.uint32(), options, message.nameFilter); + break; + case /* google.protobuf.Int64Value id_filter */ + 4: + message.idFilter = wrappers_1.Int64Value.internalBinaryRead(reader, reader.uint32(), options, message.idFilter); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } } - this.cleanup = []; + return message; } - }; - DotObject.prototype.del = DotObject.prototype.remove; - DotObject.prototype.move = function(source, target, obj, mods, merge) { - if (typeof mods === "function" || Array.isArray(mods)) { - this.set(target, _process(this.pick(source, obj, true), mods), obj, merge); - } else { - merge = mods; - this.set(target, this.pick(source, obj, true), obj, merge); + internalBinaryWrite(message, writer, options) { + if (message.workflowRunBackendId !== "") + writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId); + if (message.workflowJobRunBackendId !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId); + if (message.nameFilter) + wrappers_2.StringValue.internalBinaryWrite(message.nameFilter, writer.tag(3, runtime_1.WireType.LengthDelimited).fork(), options).join(); + if (message.idFilter) + wrappers_1.Int64Value.internalBinaryWrite(message.idFilter, writer.tag(4, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - return obj; }; - DotObject.prototype.transfer = function(source, target, obj1, obj2, mods, merge) { - if (typeof mods === "function" || Array.isArray(mods)) { - this.set( - target, - _process(this.pick(source, obj1, true), mods), - obj2, - merge - ); - } else { - merge = mods; - this.set(target, this.pick(source, obj1, true), obj2, merge); + exports2.ListArtifactsRequest = new ListArtifactsRequest$Type(); + var ListArtifactsResponse$Type = class extends runtime_5.MessageType { + static { + __name(this, "ListArtifactsResponse$Type"); } - return obj2; - }; - DotObject.prototype.copy = function(source, target, obj1, obj2, mods, merge) { - if (typeof mods === "function" || Array.isArray(mods)) { - this.set( - target, - _process( - // clone what is picked - JSON.parse(JSON.stringify(this.pick(source, obj1, false))), - mods - ), - obj2, - merge - ); - } else { - merge = mods; - this.set(target, this.pick(source, obj1, false), obj2, merge); + constructor() { + super("github.actions.results.api.v1.ListArtifactsResponse", [ + { no: 1, name: "artifacts", kind: "message", repeat: 1, T: /* @__PURE__ */ __name(() => exports2.ListArtifactsResponse_MonolithArtifact, "T") } + ]); } - return obj2; - }; - DotObject.prototype.set = function(path2, val, obj, merge) { - var i; - var k; - var keys; - var key; - if (typeof val === "undefined") { - return obj; + create(value) { + const message = { artifacts: [] }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - keys = parsePath(path2, this.separator); - for (i = 0; i < keys.length; i++) { - key = keys[i]; - if (i === keys.length - 1) { - if (merge && isObject(val) && isObject(obj[key])) { - for (k in val) { - if (hasOwnProperty.call(val, k)) { - obj[key][k] = val[k]; - } - } - } else if (merge && Array.isArray(obj[key]) && Array.isArray(val)) { - for (var j = 0; j < val.length; j++) { - obj[keys[i]].push(val[j]); - } - } else { - obj[key] = val; - } - } else if ( - // force the value to be an object - !hasOwnProperty.call(obj, key) || !isObject(obj[key]) && !Array.isArray(obj[key]) - ) { - if (/^\d+$/.test(keys[i + 1])) { - obj[key] = []; - } else { - obj[key] = {}; + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact artifacts */ + 1: + message.artifacts.push(exports2.ListArtifactsResponse_MonolithArtifact.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } - obj = obj[key]; + return message; } - return obj; - }; - DotObject.prototype.transform = function(recipe, obj, tgt) { - obj = obj || {}; - tgt = tgt || {}; - Object.keys(recipe).forEach( - function(key) { - this.set(recipe[key], this.pick(key, obj), tgt); - }.bind(this) - ); - return tgt; - }; - DotObject.prototype.dot = function(obj, tgt, path2) { - tgt = tgt || {}; - path2 = path2 || []; - var isArray = Array.isArray(obj); - Object.keys(obj).forEach( - function(key) { - var index = isArray && this.useBrackets ? "[" + key + "]" : key; - if (isArrayOrObject(obj[key]) && (isObject(obj[key]) && !isEmptyObject(obj[key]) || Array.isArray(obj[key]) && !this.keepArray && obj[key].length !== 0)) { - if (isArray && this.useBrackets) { - var previousKey = path2[path2.length - 1] || ""; - return this.dot( - obj[key], - tgt, - path2.slice(0, -1).concat(previousKey + index) - ); - } else { - return this.dot(obj[key], tgt, path2.concat(index)); - } - } else { - if (isArray && this.useBrackets) { - tgt[path2.join(this.separator).concat("[" + key + "]")] = obj[key]; - } else { - tgt[path2.concat(index).join(this.separator)] = obj[key]; - } - } - }.bind(this) - ); - return tgt; - }; - DotObject.pick = wrap("pick"); - DotObject.move = wrap("move"); - DotObject.transfer = wrap("transfer"); - DotObject.transform = wrap("transform"); - DotObject.copy = wrap("copy"); - DotObject.object = wrap("object"); - DotObject.str = wrap("str"); - DotObject.set = wrap("set"); - DotObject.delete = wrap("delete"); - DotObject.del = DotObject.remove = wrap("remove"); - DotObject.dot = wrap("dot"); - ["override", "overwrite"].forEach(function(prop) { - Object.defineProperty(DotObject, prop, { - get: /* @__PURE__ */ __name(function() { - return dotDefault.override; - }, "get"), - set: /* @__PURE__ */ __name(function(val) { - dotDefault.override = !!val; - }, "set") - }); - }); - ["useArray", "keepArray", "useBrackets"].forEach(function(prop) { - Object.defineProperty(DotObject, prop, { - get: /* @__PURE__ */ __name(function() { - return dotDefault[prop]; - }, "get"), - set: /* @__PURE__ */ __name(function(val) { - dotDefault[prop] = val; - }, "set") - }); - }); - DotObject._process = _process; - module2.exports = DotObject; - } -}); - -// ../node_modules/twirp-ts/build/twirp/http.client.js -var require_http_client = __commonJS({ - "../node_modules/twirp-ts/build/twirp/http.client.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + internalBinaryWrite(message, writer, options) { + for (let i = 0; i < message.artifacts.length; i++) + exports2.ListArtifactsResponse_MonolithArtifact.internalBinaryWrite(message.artifacts[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - __setModuleDefault3(result, mod); - return result; }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); + exports2.ListArtifactsResponse = new ListArtifactsResponse$Type(); + var ListArtifactsResponse_MonolithArtifact$Type = class extends runtime_5.MessageType { + static { + __name(this, "ListArtifactsResponse_MonolithArtifact$Type"); } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + constructor() { + super("github.actions.results.api.v1.ListArtifactsResponse.MonolithArtifact", [ + { + no: 1, + name: "workflow_run_backend_id", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 2, + name: "workflow_job_run_backend_id", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "database_id", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { + no: 4, + name: "name", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 5, + name: "size", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ + }, + { no: 6, name: "created_at", kind: "message", T: /* @__PURE__ */ __name(() => timestamp_1.Timestamp, "T") } + ]); + } + create(value) { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", databaseId: "0", name: "", size: "0" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ + 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ + 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* int64 database_id */ + 3: + message.databaseId = reader.int64().toString(); + break; + case /* string name */ + 4: + message.name = reader.string(); + break; + case /* int64 size */ + 5: + message.size = reader.int64().toString(); + break; + case /* google.protobuf.Timestamp created_at */ + 6: + message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.workflowRunBackendId !== "") + writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId); + if (message.workflowJobRunBackendId !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId); + if (message.databaseId !== "0") + writer.tag(3, runtime_1.WireType.Varint).int64(message.databaseId); + if (message.name !== "") + writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.name); + if (message.size !== "0") + writer.tag(5, runtime_1.WireType.Varint).int64(message.size); + if (message.createdAt) + timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.FetchRPC = exports2.wrapErrorResponseToTwirpError = exports2.NodeHttpRPC = void 0; - var http = __importStar3(require("http")); - var https = __importStar3(require("https")); - var url_1 = require("url"); - var errors_1 = require_errors2(); - var NodeHttpRPC = /* @__PURE__ */ __name((options) => ({ - request(service, method, contentType, data) { - let client; - return new Promise((resolve, rejected) => { - const responseChunks = []; - const requestData = contentType === "application/protobuf" ? Buffer.from(data) : JSON.stringify(data); - const url = new url_1.URL(options.baseUrl); - const isHttps = url.protocol === "https:"; - if (isHttps) { - client = https; - } else { - client = http; + exports2.ListArtifactsResponse_MonolithArtifact = new ListArtifactsResponse_MonolithArtifact$Type(); + var GetSignedArtifactURLRequest$Type = class extends runtime_5.MessageType { + static { + __name(this, "GetSignedArtifactURLRequest$Type"); + } + constructor() { + super("github.actions.results.api.v1.GetSignedArtifactURLRequest", [ + { + no: 1, + name: "workflow_run_backend_id", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 2, + name: "workflow_job_run_backend_id", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "name", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ } - const prefix = url.pathname !== "/" ? url.pathname : ""; - const req = client.request(Object.assign(Object.assign({}, options ? options : {}), { method: "POST", protocol: url.protocol, host: url.hostname, port: url.port ? url.port : isHttps ? 443 : 80, path: `${prefix}/${service}/${method}`, headers: Object.assign(Object.assign({}, options.headers ? options.headers : {}), { "Content-Type": contentType, "Content-Length": contentType === "application/protobuf" ? Buffer.byteLength(requestData) : Buffer.from(requestData).byteLength }) }), (res) => { - res.on("data", (chunk) => responseChunks.push(chunk)); - res.on("end", () => { - const data2 = Buffer.concat(responseChunks); - if (res.statusCode != 200) { - rejected(wrapErrorResponseToTwirpError(data2.toString())); - } else { - if (contentType === "application/json") { - resolve(JSON.parse(data2.toString())); - } else { - resolve(data2); - } - } - }); - res.on("error", (err) => { - rejected(err); - }); - }).on("error", (err) => { - rejected(err); - }); - req.end(requestData); - }); + ]); } - }), "NodeHttpRPC"); - exports2.NodeHttpRPC = NodeHttpRPC; - function wrapErrorResponseToTwirpError(errorResponse) { - return errors_1.TwirpError.fromObject(JSON.parse(errorResponse)); - } - __name(wrapErrorResponseToTwirpError, "wrapErrorResponseToTwirpError"); - exports2.wrapErrorResponseToTwirpError = wrapErrorResponseToTwirpError; - var FetchRPC = /* @__PURE__ */ __name((options) => ({ - request(service, method, contentType, data) { - return __awaiter3(this, void 0, void 0, function* () { - const headers = new Headers(options.headers); - headers.set("content-type", contentType); - const response = yield fetch(`${options.baseUrl}/${service}/${method}`, Object.assign(Object.assign({}, options), { method: "POST", headers, body: data instanceof Uint8Array ? data : JSON.stringify(data) })); - if (response.status === 200) { - if (contentType === "application/json") { - return yield response.json(); - } - return new Uint8Array(yield response.arrayBuffer()); + create(value) { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ + 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ + 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string name */ + 3: + message.name = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } - throw errors_1.TwirpError.fromObject(yield response.json()); - }); + } + return message; } - }), "FetchRPC"); - exports2.FetchRPC = FetchRPC; - } -}); - -// ../node_modules/twirp-ts/build/twirp/gateway.js -var require_gateway = __commonJS({ - "../node_modules/twirp-ts/build/twirp/gateway.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __setModuleDefault3 = exports2 && exports2.__setModuleDefault || (Object.create ? function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - } : function(o, v) { - o["default"] = v; - }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); + internalBinaryWrite(message, writer, options) { + if (message.workflowRunBackendId !== "") + writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId); + if (message.workflowJobRunBackendId !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId); + if (message.name !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - __setModuleDefault3(result, mod); - return result; }; - var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); + exports2.GetSignedArtifactURLRequest = new GetSignedArtifactURLRequest$Type(); + var GetSignedArtifactURLResponse$Type = class extends runtime_5.MessageType { + static { + __name(this, "GetSignedArtifactURLResponse$Type"); } - __name(adopt, "adopt"); - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); + constructor() { + super("github.actions.results.api.v1.GetSignedArtifactURLResponse", [ + { + no: 1, + name: "signed_url", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ } - } - __name(fulfilled, "fulfilled"); - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); + ]); + } + create(value) { + const message = { signedUrl: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string signed_url */ + 1: + message.signedUrl = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } - __name(rejected, "rejected"); - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - __name(step, "step"); - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - var __rest2 = exports2 && exports2.__rest || function(s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; + return message; + } + internalBinaryWrite(message, writer, options) { + if (message.signedUrl !== "") + writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.signedUrl); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.Gateway = exports2.Pattern = void 0; - var querystring_1 = require("querystring"); - var dotObject = __importStar3(require_dot_object()); - var request_1 = require_request3(); - var errors_1 = require_errors2(); - var http_client_1 = require_http_client(); - var server_1 = require_server(); - var Pattern; - (function(Pattern2) { - Pattern2["POST"] = "post"; - Pattern2["GET"] = "get"; - Pattern2["PATCH"] = "patch"; - Pattern2["PUT"] = "put"; - Pattern2["DELETE"] = "delete"; - })(Pattern = exports2.Pattern || (exports2.Pattern = {})); - var Gateway = class { + exports2.GetSignedArtifactURLResponse = new GetSignedArtifactURLResponse$Type(); + var DeleteArtifactRequest$Type = class extends runtime_5.MessageType { static { - __name(this, "Gateway"); + __name(this, "DeleteArtifactRequest$Type"); } - constructor(routes) { - this.routes = routes; + constructor() { + super("github.actions.results.api.v1.DeleteArtifactRequest", [ + { + no: 1, + name: "workflow_run_backend_id", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 2, + name: "workflow_job_run_backend_id", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + }, + { + no: 3, + name: "name", + kind: "scalar", + T: 9 + /*ScalarType.STRING*/ + } + ]); } - /** - * Middleware that rewrite the current request - * to a Twirp compliant request - */ - twirpRewrite(prefix = "/twirp") { - return (req, resp, next) => { - this.rewrite(req, resp, prefix).then(() => next()).catch((e) => { - if (e instanceof errors_1.TwirpError) { - if (e.code !== errors_1.TwirpErrorCode.NotFound) { - server_1.writeError(resp, e); - } else { - next(); - } - } - }); - }; + create(value) { + const message = { workflowRunBackendId: "", workflowJobRunBackendId: "", name: "" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; } - /** - * Rewrite an incoming request to a Twirp compliant request - * @param req - * @param resp - * @param prefix - */ - rewrite(req, resp, prefix = "/twirp") { - return __awaiter3(this, void 0, void 0, function* () { - const [match, route] = this.matchRoute(req); - const body = yield this.prepareTwirpBody(req, match, route); - const twirpUrl = `${prefix}/${route.packageName}.${route.serviceName}/${route.methodName}`; - req.url = twirpUrl; - req.originalUrl = twirpUrl; - req.method = "POST"; - req.headers["content-type"] = "application/json"; - req.rawBody = Buffer.from(JSON.stringify(body)); - if (route.responseBodyKey) { - const endFn = resp.end.bind(resp); - resp.end = function(chunk) { - if (resp.statusCode === 200) { - endFn(`{ "${route.responseBodyKey}": ${chunk} }`); - } else { - endFn(chunk); - } - }; + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string workflow_run_backend_id */ + 1: + message.workflowRunBackendId = reader.string(); + break; + case /* string workflow_job_run_backend_id */ + 2: + message.workflowJobRunBackendId = reader.string(); + break; + case /* string name */ + 3: + message.name = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } - }); + } + return message; } - /** - * Create a reverse proxy handler to - * proxy http requests to Twirp Compliant handlers - * @param httpClientOption - */ - reverseProxy(httpClientOption) { - const client = http_client_1.NodeHttpRPC(httpClientOption); - return (req, res) => __awaiter3(this, void 0, void 0, function* () { - try { - const [match, route] = this.matchRoute(req); - const body = yield this.prepareTwirpBody(req, match, route); - const response = yield client.request(`${route.packageName}.${route.serviceName}`, route.methodName, "application/json", body); - res.statusCode = 200; - res.setHeader("content-type", "application/json"); - let jsonResponse; - if (route.responseBodyKey) { - jsonResponse = JSON.stringify({ [route.responseBodyKey]: response }); - } else { - jsonResponse = JSON.stringify(response); - } - res.end(jsonResponse); - } catch (e) { - server_1.writeError(res, e); - } - }); + internalBinaryWrite(message, writer, options) { + if (message.workflowRunBackendId !== "") + writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.workflowRunBackendId); + if (message.workflowJobRunBackendId !== "") + writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.workflowJobRunBackendId); + if (message.name !== "") + writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.name); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } - /** - * Prepares twirp body requests using http.google.annotions - * compliant spec - * - * @param req - * @param match - * @param route - * @protected - */ - prepareTwirpBody(req, match, route) { - return __awaiter3(this, void 0, void 0, function* () { - const _a = match.params, { query_string } = _a, params = __rest2(_a, ["query_string"]); - let requestBody = Object.assign({}, params); - if (query_string && route.bodyKey !== "*") { - const queryParams = this.parseQueryString(query_string); - requestBody = Object.assign(Object.assign({}, queryParams), requestBody); - } - let body = {}; - if (route.bodyKey) { - const data = yield request_1.getRequestData(req); - try { - const jsonBody = JSON.parse(data.toString() || "{}"); - if (route.bodyKey === "*") { - body = jsonBody; - } else { - body[route.bodyKey] = jsonBody; - } - } catch (e) { - const msg = "the json request could not be decoded"; - throw new errors_1.TwirpError(errors_1.TwirpErrorCode.Malformed, msg).withCause(e, true); - } + }; + exports2.DeleteArtifactRequest = new DeleteArtifactRequest$Type(); + var DeleteArtifactResponse$Type = class extends runtime_5.MessageType { + static { + __name(this, "DeleteArtifactResponse$Type"); + } + constructor() { + super("github.actions.results.api.v1.DeleteArtifactResponse", [ + { + no: 1, + name: "ok", + kind: "scalar", + T: 8 + /*ScalarType.BOOL*/ + }, + { + no: 2, + name: "artifact_id", + kind: "scalar", + T: 3 + /*ScalarType.INT64*/ } - return Object.assign(Object.assign({}, body), requestBody); - }); + ]); } - /** - * Matches a route - * @param req - */ - matchRoute(req) { - var _a; - const httpMethod = (_a = req.method) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (!httpMethod) { - throw new errors_1.BadRouteError(`method not allowed`, req.method || "", req.url || ""); - } - const routes = this.routes[httpMethod]; - for (const route of routes) { - const match = route.matcher(req.url || "/"); - if (match) { - return [match, route]; + create(value) { + const message = { ok: false, artifactId: "0" }; + globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== void 0) + (0, runtime_3.reflectionMergePartial)(this, message, value); + return message; + } + internalBinaryRead(reader, length, options, target) { + let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool ok */ + 1: + message.ok = reader.bool(); + break; + case /* int64 artifact_id */ + 2: + message.artifactId = reader.int64().toString(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } - throw new errors_1.NotFoundError(`url ${req.url} not found`); + return message; } - /** - * Parse query string - * @param queryString - */ - parseQueryString(queryString) { - const queryParams = querystring_1.parse(queryString.replace("?", "")); - return dotObject.object(queryParams); + internalBinaryWrite(message, writer, options) { + if (message.ok !== false) + writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); + if (message.artifactId !== "0") + writer.tag(2, runtime_1.WireType.Varint).int64(message.artifactId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; } }; - exports2.Gateway = Gateway; - } -}); - -// ../node_modules/twirp-ts/build/twirp/index.js -var require_twirp = __commonJS({ - "../node_modules/twirp-ts/build/twirp/index.js"(exports2) { - "use strict"; - var __createBinding3 = exports2 && exports2.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: /* @__PURE__ */ __name(function() { - return m[k]; - }, "get") }); - } : function(o, m, k, k2) { - if (k2 === void 0) k2 = k; - o[k2] = m[k]; - }); - var __exportStar2 = exports2 && exports2.__exportStar || function(m, exports3) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports3, p)) __createBinding3(exports3, m, p); - }; - Object.defineProperty(exports2, "__esModule", { value: true }); - exports2.TwirpContentType = void 0; - __exportStar2(require_context2(), exports2); - __exportStar2(require_server(), exports2); - __exportStar2(require_interceptors(), exports2); - __exportStar2(require_hooks(), exports2); - __exportStar2(require_errors2(), exports2); - __exportStar2(require_gateway(), exports2); - __exportStar2(require_http_client(), exports2); - var request_1 = require_request3(); - Object.defineProperty(exports2, "TwirpContentType", { enumerable: true, get: /* @__PURE__ */ __name(function() { - return request_1.TwirpContentType; - }, "get") }); + exports2.DeleteArtifactResponse = new DeleteArtifactResponse$Type(); + exports2.ArtifactService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.ArtifactService", [ + { name: "CreateArtifact", options: {}, I: exports2.CreateArtifactRequest, O: exports2.CreateArtifactResponse }, + { name: "FinalizeArtifact", options: {}, I: exports2.FinalizeArtifactRequest, O: exports2.FinalizeArtifactResponse }, + { name: "ListArtifacts", options: {}, I: exports2.ListArtifactsRequest, O: exports2.ListArtifactsResponse }, + { name: "GetSignedArtifactURL", options: {}, I: exports2.GetSignedArtifactURLRequest, O: exports2.GetSignedArtifactURLResponse }, + { name: "DeleteArtifact", options: {}, I: exports2.DeleteArtifactRequest, O: exports2.DeleteArtifactResponse } + ]); } }); @@ -101519,7 +103937,7 @@ var require_generated = __commonJS({ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports3, p)) __createBinding3(exports3, m, p); }; Object.defineProperty(exports2, "__esModule", { value: true }); - __exportStar2(require_timestamp(), exports2); + __exportStar2(require_timestamp2(), exports2); __exportStar2(require_wrappers(), exports2); __exportStar2(require_artifact(), exports2); __exportStar2(require_artifact_twirp(), exports2); @@ -101652,7 +104070,7 @@ The following characters are not allowed in files that are uploaded due to limit }); // ../node_modules/@actions/artifact/package.json -var require_package = __commonJS({ +var require_package2 = __commonJS({ "../node_modules/@actions/artifact/package.json"(exports2, module2) { module2.exports = { name: "@actions/artifact", @@ -101722,12 +104140,12 @@ var require_package = __commonJS({ }); // ../node_modules/@actions/artifact/lib/internal/shared/user-agent.js -var require_user_agent = __commonJS({ +var require_user_agent2 = __commonJS({ "../node_modules/@actions/artifact/lib/internal/shared/user-agent.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getUserAgentString = void 0; - var packageJson = require_package(); + var packageJson = require_package2(); function getUserAgentString() { return `@actions/artifact-${packageJson.version}`; } @@ -101737,7 +104155,7 @@ var require_user_agent = __commonJS({ }); // ../node_modules/@actions/artifact/lib/internal/shared/errors.js -var require_errors3 = __commonJS({ +var require_errors4 = __commonJS({ "../node_modules/@actions/artifact/lib/internal/shared/errors.js"(exports2) { "use strict"; Object.defineProperty(exports2, "__esModule", { value: true }); @@ -101872,9 +104290,9 @@ var require_artifact_twirp_client = __commonJS({ var auth_1 = require_auth(); var core_1 = require_core(); var generated_1 = require_generated(); - var config_1 = require_config(); - var user_agent_1 = require_user_agent(); - var errors_1 = require_errors3(); + var config_1 = require_config2(); + var user_agent_1 = require_user_agent2(); + var errors_1 = require_errors4(); var ArtifactHttpClient = class { static { __name(this, "ArtifactHttpClient"); @@ -102198,7 +104616,7 @@ var require_util9 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.getBackendIdsFromToken = void 0; var core2 = __importStar3(require_core()); - var config_1 = require_config(); + var config_1 = require_config2(); var jwt_decode_1 = __importDefault2(require_jwt_decode_cjs()); var InvalidJwtError = new Error("Failed to get backend IDs: The provided JWT token is invalid and/or missing claims"); function getBackendIdsFromToken() { @@ -102299,11 +104717,11 @@ var require_blob_upload = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.uploadZipToBlobStorage = void 0; var storage_blob_1 = require_dist4(); - var config_1 = require_config(); + var config_1 = require_config2(); var core2 = __importStar3(require_core()); - var crypto7 = __importStar3(require("crypto")); + var crypto4 = __importStar3(require("crypto")); var stream = __importStar3(require("stream")); - var errors_1 = require_errors3(); + var errors_1 = require_errors4(); function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { return __awaiter3(this, void 0, void 0, function* () { let uploadByteCount = 0; @@ -102339,7 +104757,7 @@ var require_blob_upload = __commonJS({ }; let sha256Hash = void 0; const uploadStream = new stream.PassThrough(); - const hashStream = crypto7.createHash("sha256"); + const hashStream = crypto4.createHash("sha256"); zipUploadStream.pipe(uploadStream); zipUploadStream.pipe(hashStream).setEncoding("hex"); core2.info("Beginning upload of artifact content to blob storage"); @@ -109920,7 +112338,7 @@ var require_util10 = __commonJS({ }); // ../node_modules/readable-stream/lib/ours/errors.js -var require_errors4 = __commonJS({ +var require_errors5 = __commonJS({ "../node_modules/readable-stream/lib/ours/errors.js"(exports2, module2) { "use strict"; var { format, inspect, AggregateError: CustomAggregateError } = require_util10(); @@ -110270,7 +112688,7 @@ var require_validators = __commonJS({ var { hideStackFrames, codes: { ERR_SOCKET_BAD_PORT, ERR_INVALID_ARG_TYPE: ERR_INVALID_ARG_TYPE2, ERR_INVALID_ARG_VALUE, ERR_OUT_OF_RANGE, ERR_UNKNOWN_SIGNAL } - } = require_errors4(); + } = require_errors5(); var { normalizeEncoding } = require_util10(); var { isAsyncFunction, isArrayBufferView } = require_util10().types; var signals = {}; @@ -110773,7 +113191,7 @@ var require_utils7 = __commonJS({ var require_end_of_stream = __commonJS({ "../node_modules/readable-stream/lib/internal/streams/end-of-stream.js"(exports2, module2) { var process2 = require_process(); - var { AbortError, codes } = require_errors4(); + var { AbortError, codes } = require_errors5(); var { ERR_INVALID_ARG_TYPE: ERR_INVALID_ARG_TYPE2, ERR_STREAM_PREMATURE_CLOSE } = codes; var { kEmptyObject, once } = require_util10(); var { validateAbortSignal, validateFunction, validateObject, validateBoolean } = require_validators(); @@ -111033,7 +113451,7 @@ var require_destroy3 = __commonJS({ aggregateTwoErrors, codes: { ERR_MULTIPLE_CALLBACK }, AbortError - } = require_errors4(); + } = require_errors5(); var { Symbol: Symbol2 } = require_primordials(); var { kIsDestroyed, isDestroyed, isFinished, isServerRequest } = require_utils7(); var kDestroy = Symbol2("kDestroy"); @@ -111397,7 +113815,7 @@ var require_add_abort_signal = __commonJS({ "../node_modules/readable-stream/lib/internal/streams/add-abort-signal.js"(exports2, module2) { "use strict"; var { SymbolDispose } = require_primordials(); - var { AbortError, codes } = require_errors4(); + var { AbortError, codes } = require_errors5(); var { isNodeStream, isWebStream, kControllerErrorFunction } = require_utils7(); var eos = require_end_of_stream(); var { ERR_INVALID_ARG_TYPE: ERR_INVALID_ARG_TYPE2 } = codes; @@ -111607,7 +114025,7 @@ var require_state2 = __commonJS({ "use strict"; var { MathFloor, NumberIsInteger } = require_primordials(); var { validateInteger } = require_validators(); - var { ERR_INVALID_ARG_VALUE } = require_errors4().codes; + var { ERR_INVALID_ARG_VALUE } = require_errors5().codes; var defaultHighWaterMarkBytes = 16 * 1024; var defaultHighWaterMarkObjectMode = 16; function highWaterMarkFrom(options, isDuplex, duplexKey) { @@ -111654,7 +114072,7 @@ var require_from = __commonJS({ var process2 = require_process(); var { PromisePrototypeThen, SymbolAsyncIterator, SymbolIterator } = require_primordials(); var { Buffer: Buffer2 } = require("buffer"); - var { ERR_INVALID_ARG_TYPE: ERR_INVALID_ARG_TYPE2, ERR_STREAM_NULL_VALUES } = require_errors4().codes; + var { ERR_INVALID_ARG_TYPE: ERR_INVALID_ARG_TYPE2, ERR_STREAM_NULL_VALUES } = require_errors5().codes; function from(Readable, iterable, opts) { let iterator; if (typeof iterable === "string" || iterable instanceof Buffer2) { @@ -111786,7 +114204,7 @@ var require_readable4 = __commonJS({ ERR_STREAM_UNSHIFT_AFTER_END_EVENT }, AbortError - } = require_errors4(); + } = require_errors5(); var { validateObject } = require_validators(); var kPaused = Symbol2("kPaused"); var { StringDecoder } = require("string_decoder"); @@ -112778,7 +115196,7 @@ var require_writable = __commonJS({ ERR_STREAM_NULL_VALUES, ERR_STREAM_WRITE_AFTER_END, ERR_UNKNOWN_ENCODING - } = require_errors4().codes; + } = require_errors5().codes; var { errorOrDestroy } = destroyImpl; ObjectSetPrototypeOf(Writable.prototype, Stream.prototype); ObjectSetPrototypeOf(Writable, Stream); @@ -113405,7 +115823,7 @@ var require_duplexify = __commonJS({ var { AbortError, codes: { ERR_INVALID_ARG_TYPE: ERR_INVALID_ARG_TYPE2, ERR_INVALID_RETURN_VALUE } - } = require_errors4(); + } = require_errors5(); var { destroyer } = require_destroy3(); var Duplex = require_duplex(); var Readable = require_readable4(); @@ -113863,7 +116281,7 @@ var require_transform = __commonJS({ "use strict"; var { ObjectSetPrototypeOf, Symbol: Symbol2 } = require_primordials(); module2.exports = Transform; - var { ERR_METHOD_NOT_IMPLEMENTED } = require_errors4().codes; + var { ERR_METHOD_NOT_IMPLEMENTED } = require_errors5().codes; var Duplex = require_duplex(); var { getHighWaterMark } = require_state2(); ObjectSetPrototypeOf(Transform.prototype, Duplex.prototype); @@ -114001,7 +116419,7 @@ var require_pipeline = __commonJS({ ERR_STREAM_PREMATURE_CLOSE }, AbortError - } = require_errors4(); + } = require_errors5(); var { validateFunction, validateAbortSignal } = require_validators(); var { isIterable, @@ -114443,7 +116861,7 @@ var require_compose = __commonJS({ var { AbortError, codes: { ERR_INVALID_ARG_VALUE, ERR_MISSING_ARGS } - } = require_errors4(); + } = require_errors5(); var eos = require_end_of_stream(); module2.exports = /* @__PURE__ */ __name(function compose(...streams) { if (streams.length === 0) { @@ -114626,7 +117044,7 @@ var require_operators = __commonJS({ var { codes: { ERR_INVALID_ARG_VALUE, ERR_INVALID_ARG_TYPE: ERR_INVALID_ARG_TYPE2, ERR_MISSING_ARGS, ERR_OUT_OF_RANGE }, AbortError - } = require_errors4(); + } = require_errors5(); var { validateAbortSignal, validateInteger, validateObject } = require_validators(); var kWeakHandler = require_primordials().Symbol("kWeak"); var kResistStopPropagation = require_primordials().Symbol("kResistStopPropagation"); @@ -115100,7 +117518,7 @@ var require_stream3 = __commonJS({ var { streamReturningOperators, promiseReturningOperators } = require_operators(); var { codes: { ERR_ILLEGAL_CONSTRUCTOR } - } = require_errors4(); + } = require_errors5(); var compose = require_compose(); var { setDefaultHighWaterMark, getDefaultHighWaterMark } = require_state2(); var { pipeline } = require_pipeline(); @@ -127584,7 +130002,7 @@ var require_zip2 = __commonJS({ var promises_1 = require("fs/promises"); var archiver = __importStar3(require_archiver()); var core2 = __importStar3(require_core()); - var config_1 = require_config(); + var config_1 = require_config2(); exports2.DEFAULT_COMPRESSION_LEVEL = 6; var ZipUploadStream = class extends stream.Transform { static { @@ -127732,7 +130150,7 @@ var require_upload_artifact = __commonJS({ var blob_upload_1 = require_blob_upload(); var zip_1 = require_zip2(); var generated_1 = require_generated(); - var errors_1 = require_errors3(); + var errors_1 = require_errors4(); function uploadArtifact(name, files, rootDirectory, options) { return __awaiter3(this, void 0, void 0, function* () { (0, path_and_artifact_name_validation_1.validateArtifactName)(name); @@ -128324,7 +130742,7 @@ var require_dist_node12 = __commonJS({ }); } __name(expand, "expand"); - function parse3(options) { + function parse2(options) { let method = options.method.toUpperCase(); let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); let headers = Object.assign({}, options.headers); @@ -128379,9 +130797,9 @@ var require_dist_node12 = __commonJS({ request: options.request } : null); } - __name(parse3, "parse"); + __name(parse2, "parse"); function endpointWithDefaults(defaults, route, options) { - return parse3(merge(defaults, route, options)); + return parse2(merge(defaults, route, options)); } __name(endpointWithDefaults, "endpointWithDefaults"); function withDefaults(oldDefaults, newDefaults) { @@ -128391,7 +130809,7 @@ var require_dist_node12 = __commonJS({ DEFAULTS: DEFAULTS2, defaults: withDefaults.bind(null, DEFAULTS2), merge: merge.bind(null, DEFAULTS2), - parse: parse3 + parse: parse2 }); } __name(withDefaults, "withDefaults"); @@ -131109,7 +133527,7 @@ var require_binary = __commonJS({ }); return stream; }; - exports2.parse = /* @__PURE__ */ __name(function parse3(buffer) { + exports2.parse = /* @__PURE__ */ __name(function parse2(buffer) { var self2 = words(function(bytes, cb) { return function(name) { if (offset + bytes <= buffer.length) { @@ -132249,12 +134667,12 @@ var require_download_artifact = __commonJS({ var core2 = __importStar3(require_core()); var httpClient = __importStar3(require_lib()); var unzip_stream_1 = __importDefault2(require_unzip()); - var user_agent_1 = require_user_agent(); - var config_1 = require_config(); + var user_agent_1 = require_user_agent2(); + var config_1 = require_config2(); var artifact_twirp_client_1 = require_artifact_twirp_client(); var generated_1 = require_generated(); var util_1 = require_util9(); - var errors_1 = require_errors3(); + var errors_1 = require_errors4(); var scrubQueryParameters = /* @__PURE__ */ __name((url) => { const parsed = new URL(url); parsed.search = ""; @@ -133216,12 +135634,12 @@ var require_light = __commonJS({ } }; var Sync_1 = Sync; - var version4 = "2.19.5"; + var version3 = "2.19.5"; var version$1 = { - version: version4 + version: version3 }; var version$2 = /* @__PURE__ */ Object.freeze({ - version: version4, + version: version3, default: version$1 }); var require$$2 = /* @__PURE__ */ __name(() => console.log("You must import the full version of Bottleneck in order to use this feature."), "require$$2"); @@ -133981,10 +136399,10 @@ var require_get_artifact = __commonJS({ var retry_options_1 = require_retry_options(); var plugin_request_log_1 = require_dist_node20(); var util_1 = require_util9(); - var user_agent_1 = require_user_agent(); + var user_agent_1 = require_user_agent2(); var artifact_twirp_client_1 = require_artifact_twirp_client(); var generated_1 = require_generated(); - var errors_1 = require_errors3(); + var errors_1 = require_errors4(); function getArtifactPublic(artifactName, workflowRunId, repositoryOwner, repositoryName, token) { var _a; return __awaiter3(this, void 0, void 0, function* () { @@ -134102,7 +136520,7 @@ var require_delete_artifact = __commonJS({ exports2.deleteArtifactInternal = exports2.deleteArtifactPublic = void 0; var core_1 = require_core(); var github_1 = require_github2(); - var user_agent_1 = require_user_agent(); + var user_agent_1 = require_user_agent2(); var retry_options_1 = require_retry_options(); var utils_12 = require_utils9(); var plugin_request_log_1 = require_dist_node20(); @@ -134111,7 +136529,7 @@ var require_delete_artifact = __commonJS({ var util_1 = require_util9(); var generated_1 = require_generated(); var get_artifact_1 = require_get_artifact(); - var errors_1 = require_errors3(); + var errors_1 = require_errors4(); function deleteArtifactPublic(artifactName, workflowRunId, repositoryOwner, repositoryName, token) { var _a; return __awaiter3(this, void 0, void 0, function* () { @@ -134214,7 +136632,7 @@ var require_list_artifacts = __commonJS({ exports2.listArtifactsInternal = exports2.listArtifactsPublic = void 0; var core_1 = require_core(); var github_1 = require_github2(); - var user_agent_1 = require_user_agent(); + var user_agent_1 = require_user_agent2(); var retry_options_1 = require_retry_options(); var utils_12 = require_utils9(); var plugin_request_log_1 = require_dist_node20(); @@ -134381,13 +136799,13 @@ var require_client2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); exports2.DefaultArtifactClient = void 0; var core_1 = require_core(); - var config_1 = require_config(); + var config_1 = require_config2(); var upload_artifact_1 = require_upload_artifact(); var download_artifact_1 = require_download_artifact(); var delete_artifact_1 = require_delete_artifact(); var get_artifact_1 = require_get_artifact(); var list_artifacts_1 = require_list_artifacts(); - var errors_1 = require_errors3(); + var errors_1 = require_errors4(); var DefaultArtifactClient = class { static { __name(this, "DefaultArtifactClient"); @@ -134529,7 +136947,7 @@ var require_artifact2 = __commonJS({ Object.defineProperty(exports2, "__esModule", { value: true }); var client_1 = require_client2(); __exportStar2(require_interfaces(), exports2); - __exportStar2(require_errors3(), exports2); + __exportStar2(require_errors4(), exports2); __exportStar2(require_client2(), exports2); var client = new client_1.DefaultArtifactClient(); exports2.default = client; @@ -134558,15 +136976,25 @@ var require_output = __commonJS({ } : function(o, v) { o["default"] = v; }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; + var __importStar3 = exports2 && exports2.__importStar || /* @__PURE__ */ function() { + var ownKeys = /* @__PURE__ */ __name(function(o) { + ownKeys = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }, "ownKeys"); + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding3(result, mod, k[i]); + } + __setModuleDefault3(result, mod); + return result; + }; + }(); var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -134644,14 +137072,14 @@ ${message} \`\`\``; } __name(wrapToDiffBlock, "wrapToDiffBlock"); - function getCoverageStats(c, threshold) { + function getCoverageStats(c) { if (c.totalLines === 0 && c.totalCoveredLines === 0) { return ""; } let stats = ""; if (c.totalLines !== 0) { let conclusion = `${c.totalCoverage}% total lines covered`; - if (c.totalCoverage < threshold) { + if (c.totalCoverage < c.totalCoverageThreshold) { conclusion = `- ${conclusion}`; } else { conclusion = `+ ${conclusion}`; @@ -134672,18 +137100,18 @@ ${c.freshLines} lines analyzed, ${c.freshCoveredLines} lines covered`; } __name(getCoverageStats, "getCoverageStats"); function getReportURL(resultsDir) { + var _a; let reportUrlFile = `${resultsDir}/${qodana_12.QODANA_OPEN_IN_IDE_NAME}`; if (fs2.existsSync(reportUrlFile)) { - const data = JSON.parse(fs2.readFileSync(reportUrlFile, { encoding: "utf8" })); - if (data && data.cloud && data.cloud.url) { + const rawData = fs2.readFileSync(reportUrlFile, { encoding: "utf8" }); + const data = JSON.parse(rawData); + if ((_a = data === null || data === void 0 ? void 0 : data.cloud) === null || _a === void 0 ? void 0 : _a.url) { return data.cloud.url; } } else { reportUrlFile = `${resultsDir}/${qodana_12.QODANA_REPORT_URL_NAME}`; if (fs2.existsSync(reportUrlFile)) { - return fs2.readFileSync(`${resultsDir}/${qodana_12.QODANA_REPORT_URL_NAME}`, { - encoding: "utf8" - }); + return fs2.readFileSync(reportUrlFile, { encoding: "utf8" }); } } return ""; @@ -134698,7 +137126,7 @@ ${c.freshLines} lines analyzed, ${c.freshCoveredLines} lines covered`; try { const problems = (0, annotations_1.parseSarif)(`${resultsDir}/${qodana_12.QODANA_SARIF_NAME}`, projectDir); const reportUrl = getReportURL(resultsDir); - const coverageInfo = getCoverageStats((0, qodana_12.getCoverageFromSarif)(`${resultsDir}/${qodana_12.QODANA_SHORT_SARIF_NAME}`), qodana_12.COVERAGE_THRESHOLD); + const coverageInfo = getCoverageStats((0, qodana_12.getCoverageFromSarif)(`${resultsDir}/${qodana_12.QODANA_SHORT_SARIF_NAME}`)); let licensesInfo = ""; let packages = 0; const licensesJson = `${resultsDir}/projectStructure/${qodana_12.QODANA_LICENSES_JSON}`; @@ -134843,15 +137271,25 @@ var require_annotations = __commonJS({ } : function(o, v) { o["default"] = v; }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; + var __importStar3 = exports2 && exports2.__importStar || /* @__PURE__ */ function() { + var ownKeys = /* @__PURE__ */ __name(function(o) { + ownKeys = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }, "ownKeys"); + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding3(result, mod, k[i]); + } + __setModuleDefault3(result, mod); + return result; + }; + }(); var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -135060,15 +137498,25 @@ var require_utils10 = __commonJS({ } : function(o, v) { o["default"] = v; }); - var __importStar3 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding3(result, mod, k); - } - __setModuleDefault3(result, mod); - return result; - }; + var __importStar3 = exports2 && exports2.__importStar || /* @__PURE__ */ function() { + var ownKeys = /* @__PURE__ */ __name(function(o) { + ownKeys = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }, "ownKeys"); + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding3(result, mod, k[i]); + } + __setModuleDefault3(result, mod); + return result; + }; + }(); var __awaiter3 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -135120,7 +137568,7 @@ var require_utils10 = __commonJS({ exports2.updateComment = updateComment; exports2.putReaction = putReaction; exports2.publishGitHubCheck = publishGitHubCheck; - var cache = __importStar3(require_cache2()); + var cache = __importStar3(require_cache3()); var core2 = __importStar3(require_core()); var exec = __importStar3(require_exec()); var github2 = __importStar3(require_github()); @@ -135135,8 +137583,10 @@ var require_utils10 = __commonJS({ exports2.ANALYSIS_FINISHED_REACTION = "+1"; exports2.ANALYSIS_STARTED_REACTION = "eyes"; function getInputs() { + const rawArgs = core2.getInput("args"); + const argList = rawArgs ? rawArgs.split(",").map((arg) => arg.trim()) : []; return { - args: core2.getInput("args").split(",").map((arg) => arg.trim()), + args: argList, resultsDir: core2.getInput("results-dir"), cacheDir: core2.getInput("cache-dir"), primaryCacheKey: core2.getInput("primary-cache-key"), @@ -135159,21 +137609,18 @@ var require_utils10 = __commonJS({ __name(getInputs, "getInputs"); function getPrSha() { return __awaiter3(this, void 0, void 0, function* () { + const pr = github2.context.payload.pull_request; if (process.env.QODANA_PR_SHA) { return process.env.QODANA_PR_SHA; } - if (github2.context.payload.pull_request !== void 0) { - const output = yield gitOutput([ - "merge-base", - github2.context.payload.pull_request.base.sha, - github2.context.payload.pull_request.head.sha - ], { + if (pr) { + const output = yield gitOutput(["merge-base", pr.base.sha, pr.head.sha], { ignoreReturnCode: true }); if (output.exitCode === 0) { return output.stdout.trim(); } else { - return github2.context.payload.pull_request.base.sha; + return pr.base.sha; } } return ""; @@ -135181,13 +137628,15 @@ var require_utils10 = __commonJS({ } __name(getPrSha, "getPrSha"); function getHeadSha() { + const c = github2.context; + const pr = c.payload.pull_request; if (process.env.QODANA_REVISION) { return process.env.QODANA_REVISION; } - if (github2.context.payload.pull_request !== void 0) { - return github2.context.payload.pull_request.head.sha; + if (pr) { + return pr.head.sha; } - return github2.context.sha; + return c.sha; } __name(getHeadSha, "getHeadSha"); function qodana(inputs_1) { @@ -135201,10 +137650,11 @@ var require_utils10 = __commonJS({ } } } - return (yield exec.getExecOutput(qodana_12.EXECUTABLE, args, { + const exit = yield exec.getExecOutput(qodana_12.EXECUTABLE, args, { ignoreReturnCode: true, env: Object.assign(Object.assign({}, process.env), { QODANA_REVISION: getHeadSha(), NONINTERACTIVE: "1" }) - })).exitCode; + }); + return exit.exitCode; }); } __name(qodana, "qodana"); @@ -135216,9 +137666,10 @@ var require_utils10 = __commonJS({ } try { const c = github2.context; + const pr = c.payload.pull_request; let currentBranch = c.ref; - if (((_a = c.payload.pull_request) === null || _a === void 0 ? void 0 : _a.head.ref) !== void 0) { - currentBranch = c.payload.pull_request.head.ref; + if ((_a = pr === null || pr === void 0 ? void 0 : pr.head) === null || _a === void 0 ? void 0 : _a.ref) { + currentBranch = pr.head.ref; } const currentCommit = (yield exec.getExecOutput("git", ["rev-parse", "HEAD"])).stdout.trim(); currentBranch = (0, qodana_12.validateBranchName)(currentBranch); @@ -135321,7 +137772,7 @@ var require_utils10 = __commonJS({ if (!execute) { return ""; } - const restoreKeys = [additionalCacheKey]; + const restoreKeys = [additionalCacheKey].filter((k) => k); try { const cacheKey = yield cache.restoreCache([cacheDir], primaryKey, restoreKeys); if (!cacheKey) { @@ -135343,10 +137794,10 @@ var require_utils10 = __commonJS({ core2.warning('Turn on "use-cache" option to use "cache-default-branch-only"'); } if (useCaches && cacheDefaultBranchOnly) { - const currentBranch = github2.context.payload.ref; + const currentBranch = github2.context.payload.ref_name; const defaultBranch = (_a = github2.context.payload.repository) === null || _a === void 0 ? void 0 : _a.default_branch; core2.debug(`Current branch: ${currentBranch} | Default branch: ${defaultBranch}`); - return currentBranch === defaultBranch; + return currentBranch === `refs/heads/${defaultBranch}`; } return useCaches; } @@ -135363,8 +137814,7 @@ var require_utils10 = __commonJS({ __name(getWorkflowRunUrl, "getWorkflowRunUrl"); function postResultsToPRComments(toolName, content, postComment) { return __awaiter3(this, void 0, void 0, function* () { - var _a; - const pr = (_a = github2.context.payload.pull_request) !== null && _a !== void 0 ? _a : ""; + const pr = github2.context.payload.pull_request; if (!postComment || !pr) { return; } @@ -135429,9 +137879,12 @@ ${comment_tag_pattern}`; __name(updateComment, "updateComment"); function putReaction(newReaction, oldReaction) { return __awaiter3(this, void 0, void 0, function* () { - var _a; + const pr = github2.context.payload.pull_request; + if (!pr) { + return; + } const client = github2.getOctokit(getInputs().githubToken); - const issue_number = (_a = github2.context.payload.pull_request) === null || _a === void 0 ? void 0 : _a.number; + const issue_number = pr.number; if (oldReaction !== "") { try { const { data: reactions } = yield client.rest.reactions.listForIssue(Object.assign(Object.assign({}, github2.context.repo), { issue_number })); @@ -135454,9 +137907,11 @@ ${comment_tag_pattern}`; function publishGitHubCheck(failedByThreshold, name, output) { return __awaiter3(this, void 0, void 0, function* () { const conclusion = (0, annotations_1.getGitHubCheckConclusion)(output.annotations, failedByThreshold); - let sha = github2.context.sha; - if (github2.context.payload.pull_request) { - sha = github2.context.payload.pull_request.head.sha; + const c = github2.context; + const pr = c.payload.pull_request; + let sha = c.sha; + if (pr) { + sha = pr.head.sha; } const client = github2.getOctokit(getInputs().githubToken); const result = yield client.rest.checks.listForRef(Object.assign(Object.assign({}, github2.context.repo), { ref: sha })); @@ -135502,7 +137957,7 @@ ${comment_tag_pattern}`; __name(git, "git"); function gitOutput(args_1) { return __awaiter3(this, arguments, void 0, function* (args, options = {}) { - return yield exec.getExecOutput("git", args, options); + return exec.getExecOutput("git", args, options); }); } __name(gitOutput, "gitOutput"); @@ -135551,15 +138006,25 @@ var __setModuleDefault2 = exports && exports.__setModuleDefault || (Object.creat } : function(o, v) { o["default"] = v; }); -var __importStar2 = exports && exports.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); - } - __setModuleDefault2(result, mod); - return result; -}; +var __importStar2 = exports && exports.__importStar || /* @__PURE__ */ function() { + var ownKeys = /* @__PURE__ */ __name(function(o) { + ownKeys = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }, "ownKeys"); + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; +}(); var __awaiter2 = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -135639,7 +138104,7 @@ function main() { }); } __name(main, "main"); -main(); +void main(); /*! Bundled license information: undici/lib/fetch/body.js: diff --git a/scan/package.json b/scan/package.json index 2469ccba..c6936e25 100644 --- a/scan/package.json +++ b/scan/package.json @@ -5,7 +5,7 @@ "license": "Apache-2.0", "main": "lib/main.js", "scripts": { - "lint": "prettier --write '**/*.ts' && eslint --fix **/*.ts", + "lint": "prettier --write '**/*.ts' && eslint --fix **/*.ts -c ../.github/linters/.eslintrc.yml", "build": "tsc --build .", "package": "../node_modules/.bin/esbuild lib/main.js --platform=node --keep-names --bundle --outfile=dist/index.js", "test": "jest --config jest.config.js", @@ -17,8 +17,8 @@ }, "dependencies": { "@actions/artifact": "^2.1.11", - "@actions/cache": "^3.2.4", - "@actions/core": "^1.10.1", + "@actions/cache": "^4.0.0", + "@actions/core": "^1.11.1", "@actions/exec": "^1.1.0", "@actions/github": "^6.0.0", "@actions/tool-cache": "^2.0.1", @@ -30,20 +30,19 @@ "jszip": "^3.10.1" }, "devDependencies": { - "@types/jest": "^29.5.12", - "@types/node": "^22.5.2", - "@typescript-eslint/parser": "^7.18.0", - "axios": "^1.7.7", - "esbuild": "0.23.1", + "@types/jest": "^29.5.14", + "@types/node": "^22.10.1", + "@typescript-eslint/parser": "^8.18.0", + "axios": "^1.7.9", + "esbuild": "0.24.0", "eslint": "^8.57.1", - "eslint-import-resolver-typescript": "^3.6.3", - "eslint-plugin-github": "^5.0.1", - "eslint-plugin-jest": "^28.8.2", + "eslint-import-resolver-typescript": "^3.7.0", + "eslint-plugin-jest": "^28.9.0", "jest": "latest", "js-yaml": "^4.1.0", - "nock": "^13.5.5", - "prettier": "3.3.3", + "nock": "^13.5.6", + "prettier": "3.4.2", "ts-jest": "latest", - "typescript": "^5.5.4" + "typescript": "^5.7.2" } } diff --git a/scan/src/annotations.ts b/scan/src/annotations.ts index a3bfdd25..49a99794 100644 --- a/scan/src/annotations.ts +++ b/scan/src/annotations.ts @@ -14,7 +14,7 @@ * limitations under the License. */ -/* eslint-disable @typescript-eslint/no-non-null-assertion,github/array-foreach */ +/* eslint-disable @typescript-eslint/no-non-null-assertion */ import * as core from '@actions/core' import {AnnotationProperties} from '@actions/core' import * as fs from 'fs' @@ -144,7 +144,7 @@ function parseResult( : `${projectDir}/` return { message: result.message.markdown ?? result.message.text!, - title: rules.get(result.ruleId!)?.shortDescription!, + title: rules.get(result.ruleId!)?.shortDescription, path: pathPrefix + location.artifactLocation!.uri!, start_line: region?.startLine || 0, end_line: region?.endLine || region?.startLine || 1, @@ -199,7 +199,9 @@ function parseRules(tool: Tool): Map { * @returns GitHub Check Outputs with annotations are created for each result. */ export function parseSarif(path: string, projectDir: string): Output { - const sarif: Log = JSON.parse(fs.readFileSync(path, {encoding: 'utf8'})) + const sarif: Log = JSON.parse( + fs.readFileSync(path, {encoding: 'utf8'}) + ) as Log const run = sarif.runs[0] const rules = parseRules(run.tool) let title = 'No new problems found by ' diff --git a/scan/src/main.ts b/scan/src/main.ts index 73da9c51..fbec2563 100644 --- a/scan/src/main.ts +++ b/scan/src/main.ts @@ -87,7 +87,7 @@ async function main(): Promise { restoreCachesPromise ]) const reservedCacheKey = await restoreCachesPromise - const exitCode = await qodana(inputs) + const exitCode = (await qodana(inputs)) as QodanaExitCode const canUploadCache = isNeedToUploadCache(inputs.useCaches, inputs.cacheDefaultBranchOnly) && isExecutionSuccessful(exitCode) @@ -125,5 +125,4 @@ async function main(): Promise { } } -// noinspection JSIgnoredPromiseFromCall -main() +void main() diff --git a/scan/src/output.ts b/scan/src/output.ts index 1834f533..3d4ad56c 100644 --- a/scan/src/output.ts +++ b/scan/src/output.ts @@ -19,7 +19,6 @@ import * as core from '@actions/core' import * as fs from 'fs' import { Coverage, - COVERAGE_THRESHOLD, getCoverageFromSarif, QODANA_LICENSES_JSON, QODANA_LICENSES_MD, @@ -71,13 +70,27 @@ so that the action will upload the files as the job artifacts: const SUMMARY_PR_MODE = `💡 Qodana analysis was run in the pull request mode: only the changed files were checked` const DEPENDENCY_CHARS_LIMIT = 65000 // 65k chars is the GitHub limit for a comment +interface CloudData { + url?: string +} + +interface OpenInIDEData { + cloud?: CloudData +} + +interface LicenseEntry { + name?: string + version?: string + license?: string +} + function wrapToDiffBlock(message: string): string { return `\`\`\`diff ${message} \`\`\`` } -export function getCoverageStats(c: Coverage, threshold: number): string { +export function getCoverageStats(c: Coverage): string { if (c.totalLines === 0 && c.totalCoveredLines === 0) { return '' } @@ -85,7 +98,7 @@ export function getCoverageStats(c: Coverage, threshold: number): string { let stats = '' if (c.totalLines !== 0) { let conclusion = `${c.totalCoverage}% total lines covered` - if (c.totalCoverage < threshold) { + if (c.totalCoverage < c.totalCoverageThreshold) { conclusion = `- ${conclusion}` } else { conclusion = `+ ${conclusion}` @@ -112,16 +125,15 @@ ${c.freshLines} lines analyzed, ${c.freshCoveredLines} lines covered` export function getReportURL(resultsDir: string): string { let reportUrlFile = `${resultsDir}/${QODANA_OPEN_IN_IDE_NAME}` if (fs.existsSync(reportUrlFile)) { - const data = JSON.parse(fs.readFileSync(reportUrlFile, {encoding: 'utf8'})) - if (data && data.cloud && data.cloud.url) { + const rawData = fs.readFileSync(reportUrlFile, {encoding: 'utf8'}) + const data = JSON.parse(rawData) as OpenInIDEData + if (data?.cloud?.url) { return data.cloud.url } } else { reportUrlFile = `${resultsDir}/${QODANA_REPORT_URL_NAME}` if (fs.existsSync(reportUrlFile)) { - return fs.readFileSync(`${resultsDir}/${QODANA_REPORT_URL_NAME}`, { - encoding: 'utf8' - }) + return fs.readFileSync(reportUrlFile, {encoding: 'utf8'}) } } return '' @@ -156,16 +168,16 @@ export async function publishOutput( ) const reportUrl = getReportURL(resultsDir) const coverageInfo = getCoverageStats( - getCoverageFromSarif(`${resultsDir}/${QODANA_SHORT_SARIF_NAME}`), - COVERAGE_THRESHOLD + getCoverageFromSarif(`${resultsDir}/${QODANA_SHORT_SARIF_NAME}`) ) + let licensesInfo = '' let packages = 0 const licensesJson = `${resultsDir}/projectStructure/${QODANA_LICENSES_JSON}` if (fs.existsSync(licensesJson)) { const licenses = JSON.parse( fs.readFileSync(licensesJson, {encoding: 'utf8'}) - ) + ) as LicenseEntry[] if (licenses.length > 0) { packages = licenses.length licensesInfo = fs.readFileSync( diff --git a/scan/src/utils.ts b/scan/src/utils.ts index 42359ecb..270501e7 100644 --- a/scan/src/utils.ts +++ b/scan/src/utils.ts @@ -17,15 +17,16 @@ import * as cache from '@actions/cache' import * as core from '@actions/core' import * as exec from '@actions/exec' +import {ExecOutput} from '@actions/exec' import * as github from '@actions/github' import * as tc from '@actions/tool-cache' import artifact from '@actions/artifact' import {GitHub} from '@actions/github/lib/utils' import {Conclusion, getGitHubCheckConclusion, Output} from './annotations' import { + BRANCH, + compressFolder, EXECUTABLE, - Inputs, - VERSION, getProcessArchName, getProcessPlatformName, getQodanaPullArgs, @@ -33,45 +34,54 @@ import { getQodanaSha256, getQodanaSha256MismatchMessage, getQodanaUrl, - sha256sum, - PushFixesType, + Inputs, + isNativeMode, NONE, PULL_REQUEST, - BRANCH, - isNativeMode, + PushFixesType, + sha256sum, validateBranchName, - compressFolder + VERSION } from '../../common/qodana' import path from 'path' import * as fs from 'fs' import * as os from 'os' import {COMMIT_EMAIL, COMMIT_USER, prFixesBody} from './output' -import {ExecOutput} from '@actions/exec' export const ANALYSIS_FINISHED_REACTION = '+1' export const ANALYSIS_STARTED_REACTION = 'eyes' -const REACTIONS = [ - '+1', - '-1', - 'laugh', - 'confused', - 'heart', - 'hooray', - 'rocket', - 'eyes' -] as const -type Reaction = (typeof REACTIONS)[number] + +type Reaction = + | '+1' + | '-1' + | 'laugh' + | 'confused' + | 'heart' + | 'hooray' + | 'rocket' + | 'eyes' + +interface PullRequestPayload { + number: number + head: { + sha: string + ref: string + } + base: { + sha: string + ref: string + } +} /** * The context for the action. * @returns The action inputs. */ export function getInputs(): Inputs { + const rawArgs = core.getInput('args') + const argList = rawArgs ? rawArgs.split(',').map(arg => arg.trim()) : [] return { - args: core - .getInput('args') - .split(',') - .map(arg => arg.trim()), + args: argList, resultsDir: core.getInput('results-dir'), cacheDir: core.getInput('cache-dir'), primaryCacheKey: core.getInput('primary-cache-key'), @@ -92,45 +102,37 @@ export function getInputs(): Inputs { } async function getPrSha(): Promise { + const pr = github.context.payload.pull_request as + | PullRequestPayload + | undefined if (process.env.QODANA_PR_SHA) { return process.env.QODANA_PR_SHA } - if (github.context.payload.pull_request !== undefined) { - const output = await gitOutput( - [ - 'merge-base', - github.context.payload.pull_request.base.sha, - github.context.payload.pull_request.head.sha - ], - { - ignoreReturnCode: true - } - ) + if (pr) { + const output = await gitOutput(['merge-base', pr.base.sha, pr.head.sha], { + ignoreReturnCode: true + }) if (output.exitCode === 0) { return output.stdout.trim() } else { - return github.context.payload.pull_request.base.sha + return pr.base.sha } } return '' } function getHeadSha(): string { + const c = github.context + const pr = c.payload.pull_request as PullRequestPayload | undefined if (process.env.QODANA_REVISION) { return process.env.QODANA_REVISION } - if (github.context.payload.pull_request !== undefined) { - return github.context.payload.pull_request.head.sha + if (pr) { + return pr.head.sha } - return github.context.sha + return c.sha } -/** - * Runs the qodana command with the given arguments. - * @param inputs the action inputs. - * @param args docker command arguments. - * @returns The qodana command execution output. - */ export async function qodana( inputs: Inputs, args: string[] = [] @@ -144,16 +146,15 @@ export async function qodana( } } } - return ( - await exec.getExecOutput(EXECUTABLE, args, { - ignoreReturnCode: true, - env: { - ...process.env, - QODANA_REVISION: getHeadSha(), - NONINTERACTIVE: '1' - } - }) - ).exitCode + const exit = await exec.getExecOutput(EXECUTABLE, args, { + ignoreReturnCode: true, + env: { + ...process.env, + QODANA_REVISION: getHeadSha(), + NONINTERACTIVE: '1' + } + }) + return exit.exitCode } export async function pushQuickFixes( @@ -165,10 +166,13 @@ export async function pushQuickFixes( } try { const c = github.context + const pr = c.payload.pull_request as PullRequestPayload | undefined let currentBranch = c.ref - if (c.payload.pull_request?.head.ref !== undefined) { - currentBranch = c.payload.pull_request.head.ref + + if (pr?.head?.ref) { + currentBranch = pr.head.ref } + const currentCommit = ( await exec.getExecOutput('git', ['rev-parse', 'HEAD']) ).stdout.trim() @@ -204,11 +208,6 @@ export async function pushQuickFixes( } } -/** - * Prepares the agent for qodana scan: install Qodana CLI and pull the linter. - * @param args qodana arguments - * @param useNightly whether to use a nightly version of Qodana CLI - */ export async function prepareAgent( args: string[], useNightly = false @@ -319,7 +318,7 @@ export async function restoreCaches( if (!execute) { return '' } - const restoreKeys = [additionalCacheKey] + const restoreKeys = [additionalCacheKey].filter(k => k) try { const cacheKey = await cache.restoreCache( [cacheDir], @@ -328,9 +327,7 @@ export async function restoreCaches( ) if (!cacheKey) { core.info( - `No cache found for input keys: ${[primaryKey, ...restoreKeys].join( - ', ' - )}. + `No cache found for input keys: ${[primaryKey, ...restoreKeys].join(', ')}. With cache the pipeline would be faster.` ) return '' @@ -338,9 +335,7 @@ export async function restoreCaches( return cacheKey } catch (error) { core.warning( - `Failed to restore cache with key ${primaryKey} – ${ - (error as Error).message - }` + `Failed to restore cache with key ${primaryKey} – ${(error as Error).message}` ) } return '' @@ -360,12 +355,13 @@ export function isNeedToUploadCache( } if (useCaches && cacheDefaultBranchOnly) { - const currentBranch = github.context.payload.ref - const defaultBranch = github.context.payload.repository?.default_branch + const currentBranch = github.context.payload.ref_name as string + const defaultBranch = github.context.payload.repository + ?.default_branch as string core.debug( `Current branch: ${currentBranch} | Default branch: ${defaultBranch}` ) - return currentBranch === defaultBranch + return currentBranch === `refs/heads/${defaultBranch}` } return useCaches @@ -396,7 +392,9 @@ export async function postResultsToPRComments( content: string, postComment: boolean ): Promise { - const pr = github.context.payload.pull_request ?? '' + const pr = github.context.payload.pull_request as + | PullRequestPayload + | undefined if (!postComment || !pr) { return } @@ -497,9 +495,15 @@ export async function putReaction( newReaction: Reaction, oldReaction: string ): Promise { + const pr = github.context.payload.pull_request as + | PullRequestPayload + | undefined + if (!pr) { + return + } const client = github.getOctokit(getInputs().githubToken) + const issue_number = pr.number - const issue_number = github.context.payload.pull_request?.number as number if (oldReaction !== '') { try { const {data: reactions} = await client.rest.reactions.listForIssue({ @@ -547,9 +551,11 @@ export async function publishGitHubCheck( output.annotations, failedByThreshold ) - let sha = github.context.sha - if (github.context.payload.pull_request) { - sha = github.context.payload.pull_request.head.sha + const c = github.context + const pr = c.payload.pull_request as PullRequestPayload | undefined + let sha = c.sha + if (pr) { + sha = pr.head.sha } const client = github.getOctokit(getInputs().githubToken) const result = await client.rest.checks.listForRef({ @@ -624,7 +630,7 @@ async function gitOutput( args: string[], options: exec.ExecOptions = {} ): Promise { - return await exec.getExecOutput('git', args, options) + return exec.getExecOutput('git', args, options) } async function createPr( diff --git a/vsts/.eslintignore b/vsts/.eslintignore deleted file mode 100644 index 47dcf33c..00000000 --- a/vsts/.eslintignore +++ /dev/null @@ -1,4 +0,0 @@ -dist/ -lib/ -node_modules/ -jest.config.js \ No newline at end of file diff --git a/vsts/.eslintrc.json b/vsts/.eslintrc.json deleted file mode 100644 index 98d0bd73..00000000 --- a/vsts/.eslintrc.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "plugins": ["jest", "@typescript-eslint"], - "extends": ["plugin:github/recommended"], - "parser": "@typescript-eslint/parser", - "parserOptions": { - "ecmaVersion": 9, - "sourceType": "module", - "project": "tsconfig.json" - }, - "rules": { - "i18n-text/no-en": "off", - "eslint-comments/no-use": "off", - "import/no-namespace": "off", - "no-unused-vars": "off", - "@typescript-eslint/no-unused-vars": "error", - "@typescript-eslint/explicit-member-accessibility": ["error", {"accessibility": "no-public"}], - "@typescript-eslint/no-require-imports": "error", - "@typescript-eslint/array-type": "error", - "@typescript-eslint/await-thenable": "error", - "@typescript-eslint/ban-ts-comment": "error", - "camelcase": "off", - "@typescript-eslint/consistent-type-assertions": "error", - "@typescript-eslint/explicit-function-return-type": ["error", {"allowExpressions": true}], - "@typescript-eslint/func-call-spacing": ["error", "never"], - "@typescript-eslint/no-array-constructor": "error", - "@typescript-eslint/no-empty-interface": "error", - "@typescript-eslint/no-explicit-any": "error", - "@typescript-eslint/no-extraneous-class": "error", - "@typescript-eslint/no-for-in-array": "error", - "@typescript-eslint/no-inferrable-types": "error", - "@typescript-eslint/no-misused-new": "error", - "@typescript-eslint/no-namespace": "error", - "@typescript-eslint/no-non-null-assertion": "warn", - "@typescript-eslint/no-unnecessary-qualifier": "error", - "@typescript-eslint/no-unnecessary-type-assertion": "error", - "@typescript-eslint/no-useless-constructor": "error", - "@typescript-eslint/no-var-requires": "error", - "@typescript-eslint/prefer-for-of": "warn", - "@typescript-eslint/prefer-function-type": "warn", - "@typescript-eslint/prefer-includes": "error", - "@typescript-eslint/prefer-string-starts-ends-with": "error", - "@typescript-eslint/promise-function-async": "error", - "@typescript-eslint/require-array-sort-compare": "error", - "@typescript-eslint/restrict-plus-operands": "error", - "semi": "off", - "@typescript-eslint/semi": ["error", "never"], - "@typescript-eslint/type-annotation-spacing": "error", - "@typescript-eslint/unbound-method": "error" - }, - "env": { - "node": true, - "es6": true, - "jest/globals": true - } - } \ No newline at end of file diff --git a/vsts/QodanaScan/index.js b/vsts/QodanaScan/index.js index 12b0930b..009d875a 100644 --- a/vsts/QodanaScan/index.js +++ b/vsts/QodanaScan/index.js @@ -9899,6 +9899,7 @@ function getQodanaScanArgs(args, resultsDir, cacheDir) { return cliArgs; } function getCoverageFromSarif(sarifPath) { + var _a, _b, _c, _d; if (fs.existsSync(sarifPath)) { const sarifContents = JSON.parse( fs.readFileSync(sarifPath, { encoding: "utf8" }) @@ -9910,7 +9911,9 @@ function getCoverageFromSarif(sarifPath) { totalCoveredLines: sarifContents.runs[0].properties["coverage"]["totalCoveredLines"] || 0, freshCoverage: sarifContents.runs[0].properties["coverage"]["freshCoverage"] || 0, freshLines: sarifContents.runs[0].properties["coverage"]["freshLines"] || 0, - freshCoveredLines: sarifContents.runs[0].properties["coverage"]["freshCoveredLines"] || 0 + freshCoveredLines: sarifContents.runs[0].properties["coverage"]["freshCoveredLines"] || 0, + totalCoverageThreshold: ((_b = (_a = sarifContents.runs[0].properties["qodanaFailureConditions"]) == null ? void 0 : _a["testCoverageThresholds"]) == null ? void 0 : _b["totalCoverage"]) || COVERAGE_THRESHOLD, + freshCoverageThreshold: ((_d = (_c = sarifContents.runs[0].properties["qodanaFailureConditions"]) == null ? void 0 : _c["testCoverageThresholds"]) == null ? void 0 : _d["freshCoverage"]) || COVERAGE_THRESHOLD }; } else { return { @@ -9919,7 +9922,9 @@ function getCoverageFromSarif(sarifPath) { totalCoveredLines: 0, freshCoverage: 0, freshLines: 0, - freshCoveredLines: 0 + freshCoveredLines: 0, + totalCoverageThreshold: COVERAGE_THRESHOLD, + freshCoverageThreshold: COVERAGE_THRESHOLD }; } } @@ -14628,15 +14633,25 @@ var require_utils3 = __commonJS({ } : function(o, v) { o["default"] = v; }); - var __importStar2 = exports2 && exports2.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding2(result, mod, k); - } - __setModuleDefault2(result, mod); - return result; - }; + var __importStar2 = exports2 && exports2.__importStar || /* @__PURE__ */ function() { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding2(result, mod, k[i]); + } + __setModuleDefault2(result, mod); + return result; + }; + }(); var __awaiter2 = exports2 && exports2.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -14707,7 +14722,7 @@ var require_utils3 = __commonJS({ const inputs = getInputs(); args = (0, qodana_12.getQodanaScanArgs)(inputs.args, inputs.resultsDir, inputs.cacheDir); } - return tl2.exec(qodana_12.EXECUTABLE, args, { + return yield tl2.execAsync(qodana_12.EXECUTABLE, args, { ignoreReturnCode: true, env: Object.assign(Object.assign({}, process.env), { NONINTERACTIVE: "1" }) }); @@ -14756,19 +14771,17 @@ var require_utils3 = __commonJS({ }); } function uploadSarif(resultsDir, execute) { - return __awaiter2(this, void 0, void 0, function* () { - if (!execute) { - return; - } - try { - const parentDir = path2.dirname(resultsDir); - const qodanaSarif = path2.join(parentDir, "qodana.sarif"); - tl2.cp(path2.join(resultsDir, "qodana.sarif.json"), qodanaSarif); - tl2.uploadArtifact("CodeAnalysisLogs", qodanaSarif, "CodeAnalysisLogs"); - } catch (error) { - tl2.warning(`Failed to upload SARIF \u2013 ${error.message}`); - } - }); + if (!execute) { + return; + } + try { + const parentDir = path2.dirname(resultsDir); + const qodanaSarif = path2.join(parentDir, "qodana.sarif"); + tl2.cp(path2.join(resultsDir, "qodana.sarif.json"), qodanaSarif); + tl2.uploadArtifact("CodeAnalysisLogs", qodanaSarif, "CodeAnalysisLogs"); + } catch (error) { + tl2.warning(`Failed to upload SARIF \u2013 ${error.message}`); + } } } }); @@ -14792,15 +14805,25 @@ var __setModuleDefault = exports && exports.__setModuleDefault || (Object.create } : function(o, v) { o["default"] = v; }); -var __importStar = exports && exports.__importStar || function(mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) { - for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - } - __setModuleDefault(result, mod); - return result; -}; +var __importStar = exports && exports.__importStar || /* @__PURE__ */ function() { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function(o2) { + var ar = []; + for (var k in o2) if (Object.prototype.hasOwnProperty.call(o2, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function(mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) { + for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + } + __setModuleDefault(result, mod); + return result; + }; +}(); var __awaiter = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function(resolve) { @@ -14842,7 +14865,7 @@ function main() { yield (0, utils_1.prepareAgent)(inputs.args); const exitCode = yield (0, utils_1.qodana)(); yield (0, utils_1.uploadArtifacts)(inputs.resultsDir, inputs.artifactName, inputs.uploadResult); - yield (0, utils_1.uploadSarif)(inputs.resultsDir, inputs.uploadSarif); + (0, utils_1.uploadSarif)(inputs.resultsDir, inputs.uploadSarif); if (!(0, qodana_1.isExecutionSuccessful)(exitCode)) { (0, utils_1.setFailed)(`qodana scan failed with exit code ${exitCode}`); } else if (exitCode === qodana_1.QodanaExitCode.FailThreshold) { @@ -14853,4 +14876,4 @@ function main() { } }); } -main(); +void main(); diff --git a/vsts/package.json b/vsts/package.json index 52bf64fa..ecc1e2b0 100644 --- a/vsts/package.json +++ b/vsts/package.json @@ -4,7 +4,7 @@ "description": "Qodana for Azure Pipelines extension", "main": "azure.js", "scripts": { - "lint": "prettier --write '**/*.ts' && eslint --fix **/*.ts", + "lint": "prettier --write '**/*.ts' && eslint --fix **/*.ts -c ../.github/linters/.eslintrc.yml", "bump-dev": "jq '.version |= (split(\".\") | .[2] = (. [2]| tonumber + 1 | tostring) | join(\".\"))' vss-extension.dev.json > tmp.json && mv tmp.json vss-extension.dev.json\n", "package": "npm run bump-dev && cd QodanaScan && npm ci && cd .. && ../node_modules/.bin/esbuild lib/main.js --platform=node --bundle --outfile=QodanaScan/index.js --target=node16 --external:shelljs --external:azure-pipelines-task-lib", "readme": "cp ../.github/md/azure.md README.md && curl -sS https://raw.githubusercontent.com/JetBrains/Qodana/2022.2/topics/azure-pipelines.md >> README.md && cat ../.github/md/tracker.md >> README.md", @@ -29,17 +29,16 @@ "azure-pipelines-tool-lib": "^2.0.8" }, "devDependencies": { - "@types/node": "^22.5.2", - "@typescript-eslint/parser": "^7.18.0", - "esbuild": "0.23.1", + "@types/node": "^22.10.1", + "@typescript-eslint/parser": "^8.18.0", + "esbuild": "0.24.0", "eslint": "^8.57.1", - "eslint-plugin-github": "^5.0.1", - "eslint-plugin-jest": "^28.8.2", + "eslint-plugin-jest": "^28.9.0", "jest": "^29.7.0", "js-yaml": "^4.1.0", - "prettier": "3.3.3", + "prettier": "3.4.2", "sync-request": "^6.1.0", "ts-jest": "^29.2.5", - "typescript": "^5.5.4" + "typescript": "^5.7.2" } } diff --git a/vsts/src/main.ts b/vsts/src/main.ts index 00bdd016..0d261ab8 100644 --- a/vsts/src/main.ts +++ b/vsts/src/main.ts @@ -40,13 +40,13 @@ async function main(): Promise { tl.mkdirP(inputs.resultsDir) tl.mkdirP(inputs.cacheDir) await prepareAgent(inputs.args) - const exitCode = await qodana() + const exitCode = (await qodana()) as QodanaExitCode await uploadArtifacts( inputs.resultsDir, inputs.artifactName, inputs.uploadResult ) - await uploadSarif(inputs.resultsDir, inputs.uploadSarif) + uploadSarif(inputs.resultsDir, inputs.uploadSarif) if (!isExecutionSuccessful(exitCode)) { setFailed(`qodana scan failed with exit code ${exitCode}`) } else if (exitCode === QodanaExitCode.FailThreshold) { @@ -57,5 +57,4 @@ async function main(): Promise { } } -// noinspection JSIgnoredPromiseFromCall -main() +void main() diff --git a/vsts/src/utils.ts b/vsts/src/utils.ts index 538076ad..3c727d5d 100644 --- a/vsts/src/utils.ts +++ b/vsts/src/utils.ts @@ -17,9 +17,8 @@ import * as tl from 'azure-pipelines-task-lib/task' import * as tool from 'azure-pipelines-tool-lib' import { + compressFolder, EXECUTABLE, - Inputs, - VERSION, getProcessArchName, getProcessPlatformName, getQodanaPullArgs, @@ -27,9 +26,10 @@ import { getQodanaSha256, getQodanaSha256MismatchMessage, getQodanaUrl, - sha256sum, + Inputs, isNativeMode, - compressFolder + sha256sum, + VERSION } from '../../common/qodana' // eslint-disable-next-line @typescript-eslint/no-require-imports @@ -78,7 +78,7 @@ export async function qodana(args: string[] = []): Promise { const inputs = getInputs() args = getQodanaScanArgs(inputs.args, inputs.resultsDir, inputs.cacheDir) } - return tl.exec(EXECUTABLE, args, { + return await tl.execAsync(EXECUTABLE, args, { ignoreReturnCode: true, env: { ...process.env, @@ -158,10 +158,7 @@ export async function uploadArtifacts( * @param resultsDir The path to upload a report from. * @param execute whether to execute promise or not. */ -export async function uploadSarif( - resultsDir: string, - execute: boolean -): Promise { +export function uploadSarif(resultsDir: string, execute: boolean): void { if (!execute) { return } diff --git a/vsts/vss-extension.dev.json b/vsts/vss-extension.dev.json index 607b04f5..3a2b349d 100644 --- a/vsts/vss-extension.dev.json +++ b/vsts/vss-extension.dev.json @@ -2,7 +2,7 @@ "manifestVersion": 1, "id": "qodana-dev", "name": "Qodana (Dev)", - "version": "2024.2.124", + "version": "2024.2.133", "publisher": "JetBrains", "targets": [ {